diff --git a/docs/CONFIGURATION.md b/docs/CONFIGURATION.md index 522a956..9946c1c 100644 --- a/docs/CONFIGURATION.md +++ b/docs/CONFIGURATION.md @@ -26,7 +26,8 @@ Comma-separated list of enabled handlers. Disabled verbs return `404`. |---|---|---| | `RECEIPT_SIGNER_ID` | `runtime` (or `ENS_NAME` when set) | Receipt proof signer identifier. | | `RECEIPT_SIGNING_PRIVATE_KEY_PEM_B64` | empty | Required for signing receipts. Base64 of PEM private key. | -| `RECEIPT_SIGNING_PUBLIC_KEY` | empty | Optional local verifier pubkey text in `ed25519:` format for `/verify`. | +| `RECEIPT_SIGNING_PUBLIC_KEY_PEM_B64` | empty | Public key for `/verify` (base64-encoded PEM). | +| `RECEIPT_SIGNING_PUBLIC_KEY_PEM` | empty | Public key for `/verify` (plain PEM text). Either this or the B64 variant is sufficient. | | `ENS_NAME` | empty | Optional identity alias fallback. | ## ENS-based verification @@ -47,6 +48,7 @@ Comma-separated list of enabled handlers. Disabled verbs return `404`. | `SCHEMA_FETCH_TIMEOUT_MS` | `15000` | Timeout per schema document fetch. | | `SCHEMA_VALIDATE_BUDGET_MS` | `15000` | Budget for async schema compilation. | | `VERIFY_SCHEMA_CACHED_ONLY` | `1` | If `1`, `/verify?schema=1` only uses warm validators and returns `202` on cold cache. | +| `REQUEST_SCHEMA_VALIDATION` | `0` | If `1`, validate verb request payloads against published request schemas. Returns `503` if schemas are unavailable. | ## Cache controls @@ -73,6 +75,35 @@ Comma-separated list of enabled handlers. Disabled verbs return `404`. | `ENABLE_SSRF_GUARD` | `1` | Enables DNS/IP/local-network SSRF checks. | | `ALLOW_FETCH_HOSTS` | empty | Optional CSV domain allowlist (`example.com,api.example.com`). | +## CORS + +| Variable | Default | Purpose | +|---|---|---| +| `CORS_ALLOW_ORIGINS` | empty | Comma-separated list of allowed origins. Empty = deny browser-origin requests. Use `*` to allow all (not recommended in production). | +| `CORS_ALLOW_HEADERS` | `Content-Type, Authorization` | Allowed request headers. | +| `CORS_ALLOW_METHODS` | `GET,POST,OPTIONS` | Allowed HTTP methods. | + +## Debug routes + +| Variable | Default | Purpose | +|---|---|---| +| `DEBUG_ROUTES_ENABLED` | `0` | If `1`, enables `/debug/*` endpoints. Disabled by default in production. | +| `DEBUG_BEARER_TOKEN` | empty | If set, requires `Authorization: Bearer ` on all debug routes. | + +## Request logging + +| Variable | Default | Purpose | +|---|---|---| +| `LOG_REQUESTS` | `1` | If `1`, emits structured JSON log lines to stdout for every request. | + +## Rate limiting + +| Variable | Default | Purpose | +|---|---|---| +| `RATE_LIMIT_ENABLED` | `0` | If `1`, enables per-IP rate limiting. | +| `RATE_LIMIT_MAX` | `120` | Max requests per window per IP. | +| `RATE_LIMIT_WINDOW_MS` | `60000` | Sliding window duration in milliseconds. | + ## Schema prewarm behavior | Variable | Default | Purpose | @@ -83,8 +114,11 @@ Comma-separated list of enabled handlers. Disabled verbs return `404`. ## Recommended production baseline -- Set explicit signing keys and verify `signer_ok=true` on `/health`. +- Set explicit signing keys and verify `signer_ok=true` and `verifier_ok=true` on `/health`. - Keep `VERIFY_SCHEMA_CACHED_ONLY=1` for edge stability. +- Set `CORS_ALLOW_ORIGINS` to specific origins (never `*` in production). +- Set `DEBUG_ROUTES_ENABLED=0` (default) or protect with `DEBUG_BEARER_TOKEN`. +- Set `RATE_LIMIT_ENABLED=1` with appropriate limits for your traffic profile. - Restrict egress using both network policy and `ALLOW_FETCH_HOSTS` where possible. - Tune `FETCH_MAX_BYTES` and timeout budgets based on expected payload sizes. - Poll `/debug/validators` after deploy and prewarm critical verbs. diff --git a/server.mjs b/server.mjs index 9de1063..02f8452 100644 --- a/server.mjs +++ b/server.mjs @@ -1,1298 +1,1298 @@ -// server.mjs -import express from "express"; -import crypto from "crypto"; -import Ajv from "ajv"; -import addFormats from "ajv-formats"; -import { ethers } from "ethers"; -import net from "net"; - -import { - signReceiptEd25519Sha256, - verifyReceiptEd25519Sha256, - CANONICAL_ID_SORTED_KEYS_V1, -} from "@commandlayer/runtime-core"; - -// ---- instance identity + uptime (add near top) -const INSTANCE_ID = crypto.randomUUID(); -const BOOTED_AT = Date.now(); -const uptimeMs = () => Date.now() - BOOTED_AT; -const instancePayload = () => ({ instance: { id: INSTANCE_ID, uptime_ms: uptimeMs() } }); - -// 2) Add global crash logging so you see the reason in Railway logs -process.on("unhandledRejection", (reason) => { - console.error("[fatal] unhandledRejection", reason); -}); -process.on("uncaughtException", (err) => { - console.error("[fatal] uncaughtException", err); -}); - -const app = express(); -app.use(express.json({ limit: "2mb" })); - -// ---- basic CORS (no dependency) -app.use((req, res, next) => { - res.setHeader("Access-Control-Allow-Origin", "*"); - res.setHeader( - "Access-Control-Allow-Headers", - "Content-Type, Authorization, X-Debug-Token" - ); - res.setHeader("Access-Control-Allow-Methods", "GET,POST,OPTIONS"); - if (req.method === "OPTIONS") return res.status(204).end(); - next(); -}); - -const PORT = Number(process.env.PORT || 8080); - -// ---- runtime config -const ENABLED_VERBS = ( - process.env.ENABLED_VERBS || - "fetch,describe,format,clean,parse,summarize,convert,explain,analyze,classify" -) - .split(",") - .map((s) => s.trim()) - .filter(Boolean); - -const SIGNER_ID = - process.env.RECEIPT_SIGNER_ID || - process.env.ENS_NAME || - process.env.CL_SIGNER_ID || - process.env.SIGNER_ID || - "runtime.commandlayer.eth"; - -const SIGNER_KID = process.env.SIGNER_KID || "v1"; - -// ---- key material (support both raw PEM and base64-PEM) -const PRIV_PEM_RAW = process.env.RECEIPT_SIGNING_PRIVATE_KEY_PEM || ""; -const PUB_PEM_RAW = process.env.RECEIPT_SIGNING_PUBLIC_KEY_PEM || ""; - -const PRIV_PEM_B64 = process.env.RECEIPT_SIGNING_PRIVATE_KEY_PEM_B64 || ""; -const PUB_PEM_B64 = process.env.RECEIPT_SIGNING_PUBLIC_KEY_PEM_B64 || ""; - -// ---- service identity / discovery -const SERVICE_NAME = process.env.SERVICE_NAME || "commandlayer-runtime"; -const SERVICE_VERSION = process.env.SERVICE_VERSION || "1.0.0"; -const CANONICAL_BASE = ( - process.env.CANONICAL_BASE_URL || "https://runtime.commandlayer.org" -).replace(/\/+$/, ""); -const API_VERSION = process.env.API_VERSION || "1.0.0"; - -// ENS verifier config -const ETH_RPC_URL = process.env.ETH_RPC_URL || ""; -const VERIFIER_ENS_NAME = - process.env.VERIFIER_ENS_NAME || process.env.ENS_NAME || SIGNER_ID || ""; -const ENS_PUBKEY_TEXT_KEY = - process.env.ENS_PUBKEY_TEXT_KEY || "cl.receipt.pubkey.pem"; - -// IMPORTANT: AJV should fetch schemas from www, but schemas' $id/refs may be commandlayer.org. -// We normalize fetch URLs to https://www.commandlayer.org to avoid redirect/host mismatches. -const SCHEMA_HOST = (process.env.SCHEMA_HOST || "https://www.commandlayer.org").replace( - /\/+$/, - "" -); -const SCHEMA_FETCH_TIMEOUT_MS = Number(process.env.SCHEMA_FETCH_TIMEOUT_MS || 15000); -const SCHEMA_VALIDATE_BUDGET_MS = Number( - process.env.SCHEMA_VALIDATE_BUDGET_MS || 15000 -); - -// ---- scaling + safety knobs (server-side caps) -const MAX_JSON_CACHE_ENTRIES = Number(process.env.MAX_JSON_CACHE_ENTRIES || 256); -const JSON_CACHE_TTL_MS = Number(process.env.JSON_CACHE_TTL_MS || 10 * 60 * 1000); -const MAX_VALIDATOR_CACHE_ENTRIES = Number( - process.env.MAX_VALIDATOR_CACHE_ENTRIES || 128 -); -const VALIDATOR_CACHE_TTL_MS = Number( - process.env.VALIDATOR_CACHE_TTL_MS || 30 * 60 * 1000 -); -const SERVER_MAX_HANDLER_MS = Number(process.env.SERVER_MAX_HANDLER_MS || 12000); - -// fetch hardening -const FETCH_TIMEOUT_MS = Number(process.env.FETCH_TIMEOUT_MS || 8000); -const FETCH_MAX_BYTES = Number(process.env.FETCH_MAX_BYTES || 256 * 1024); -const ENABLE_SSRF_GUARD = String(process.env.ENABLE_SSRF_GUARD || "1") === "1"; -const ALLOW_FETCH_HOSTS = (process.env.ALLOW_FETCH_HOSTS || "") - .split(",") - .map((s) => s.trim().toLowerCase()) - .filter(Boolean); - -// verify hardening -const VERIFY_MAX_MS = Number(process.env.VERIFY_MAX_MS || 30000); - -// Edge-safe schema verify -const VERIFY_SCHEMA_CACHED_ONLY = - String(process.env.VERIFY_SCHEMA_CACHED_ONLY || "1") === "1"; - -// Prewarm knobs -const PREWARM_MAX_VERBS = Number(process.env.PREWARM_MAX_VERBS || 25); -const PREWARM_TOTAL_BUDGET_MS = Number(process.env.PREWARM_TOTAL_BUDGET_MS || 12000); -const PREWARM_PER_VERB_BUDGET_MS = Number( - process.env.PREWARM_PER_VERB_BUDGET_MS || 5000 -); - -// ---- debug gating -const ENABLE_DEBUG = String(process.env.ENABLE_DEBUG || "0") === "1"; -const DEBUG_TOKEN = process.env.DEBUG_TOKEN || ""; - -function requireDebug(req, res, next) { - // If debug disabled, pretend it doesn't exist. - if (!ENABLE_DEBUG) return res.status(404).end(); - - // Require token - if (!DEBUG_TOKEN) return res.status(404).end(); - - const auth = String(req.headers.authorization || ""); - const xTok = String(req.headers["x-debug-token"] || ""); - const bearer = auth.toLowerCase().startsWith("bearer ") - ? auth.slice(7).trim() - : ""; - - const ok = (bearer && bearer === DEBUG_TOKEN) || (xTok && xTok === DEBUG_TOKEN); - - if (!ok) return res.status(404).end(); // 404 to avoid oracle/probing - next(); -} - -function nowIso() { - return new Date().toISOString(); -} - -function randId(prefix = "trace_") { - return prefix + crypto.randomBytes(6).toString("hex"); -} - -function normalizePemLoose(input) { - if (!input) return null; - let s = String(input).replace(/\\n/g, "\n").trim(); - - const begin = s.match(/-----BEGIN [^-]+-----/); - const end = s.match(/-----END [^-]+-----/); - if (!begin || !end) return null; - - const type = begin[0].replace("-----BEGIN ", "").replace("-----", "").trim(); - const body = s - .replace(/-----BEGIN [^-]+-----/g, "") - .replace(/-----END [^-]+-----/g, "") - .replace(/[\r\n\s]/g, "") - .trim(); - - if (!body || !/^[A-Za-z0-9+/=]+$/.test(body)) return null; - - const wrapped = body.match(/.{1,64}/g)?.join("\n") || body; - return `-----BEGIN ${type}-----\n${wrapped}\n-----END ${type}-----`; -} - -function pemFromB64(b64) { - if (!b64) return null; - try { - const text = Buffer.from(b64, "base64").toString("utf8"); - return normalizePemLoose(text); - } catch { - return null; - } -} - -function getPrivatePem() { - return normalizePemLoose(PRIV_PEM_RAW) || pemFromB64(PRIV_PEM_B64); -} - -function getPublicPemFromEnv() { - return normalizePemLoose(PUB_PEM_RAW) || pemFromB64(PUB_PEM_B64); -} - -function enabled(verb) { - return ENABLED_VERBS.includes(verb); -} - -function makeError(code, message, extra = {}) { - return { status: "error", code, message, ...extra }; -} - -function requireBody(req, res) { - if (!req.body || typeof req.body !== "object") { - res.status(400).json({ ...makeError(400, "Invalid JSON body"), ...instancePayload() }); - return false; - } - return true; -} - -// ----------------------- -// SSRF guard for fetch() -// ----------------------- -function isPrivateIp(ip) { - if (!net.isIP(ip)) return false; - if (net.isIP(ip) === 6) return true; // block ipv6 by default - const parts = ip.split(".").map((n) => Number(n)); - const [a, b] = parts; - if (a === 10) return true; - if (a === 127) return true; - if (a === 169 && b === 254) return true; - if (a === 172 && b >= 16 && b <= 31) return true; - if (a === 192 && b === 168) return true; - if (a === 0) return true; - if (a === 100 && b >= 64 && b <= 127) return true; // CGNAT - return false; -} - -async function resolveARecords(hostname) { - const dns = await import("dns/promises"); - try { - const addrs = await dns.resolve4(hostname); - return Array.isArray(addrs) ? addrs : []; - } catch { - return []; - } -} - -async function ssrfGuardOrThrow(urlStr) { - if (!ENABLE_SSRF_GUARD) return; - let u; - try { - u = new URL(urlStr); - } catch { - throw new Error("fetch requires a valid absolute URL"); - } - if (!/^https?:$/.test(u.protocol)) throw new Error("fetch only allows http(s)"); - const host = (u.hostname || "").toLowerCase(); - - if (ALLOW_FETCH_HOSTS.length) { - const ok = ALLOW_FETCH_HOSTS.some((h) => host === h || host.endsWith("." + h)); - if (!ok) throw new Error("fetch host not allowed"); - } - - if (host === "localhost" || host.endsWith(".localhost")) throw new Error("fetch host blocked"); - if (host === "169.254.169.254") throw new Error("fetch host blocked"); - - if (net.isIP(host) && isPrivateIp(host)) throw new Error("fetch to private IP blocked"); - - const addrs = await resolveARecords(host); - if (addrs.some(isPrivateIp)) throw new Error("fetch DNS resolves to private IP (blocked)"); -} - -// ----------------------- -// ENS TXT pubkey fetch (ethers v6) -// ----------------------- -let ensCache = { - fetched_at: 0, - ttl_ms: 10 * 60 * 1000, - pem: null, - error: null, - source: null, -}; - -function hasRpc() { - return !!ETH_RPC_URL; -} - -async function withTimeout(promise, ms, label = "timeout") { - if (!ms || ms <= 0) return await promise; - return await Promise.race([ - promise, - new Promise((_, rej) => setTimeout(() => rej(new Error(label)), ms)), - ]); -} - -async function fetchEnsPubkeyPem({ refresh = false } = {}) { - const now = Date.now(); - if (!refresh && ensCache.pem && now - ensCache.fetched_at < ensCache.ttl_ms) { - return { ok: true, pem: ensCache.pem, source: ensCache.source, cache: { ...ensCache } }; - } - if (!VERIFIER_ENS_NAME) { - ensCache = { ...ensCache, fetched_at: now, pem: null, error: "Missing VERIFIER_ENS_NAME", source: null }; - return { ok: false, pem: null, source: null, error: ensCache.error, cache: { ...ensCache } }; - } - if (!ETH_RPC_URL) { - ensCache = { ...ensCache, fetched_at: now, pem: null, error: "Missing ETH_RPC_URL", source: null }; - return { ok: false, pem: null, source: null, error: ensCache.error, cache: { ...ensCache } }; - } - try { - const provider = new ethers.JsonRpcProvider(ETH_RPC_URL); - const resolver = await withTimeout( - provider.getResolver(VERIFIER_ENS_NAME), - 6000, - "ens_resolver_timeout" - ); - if (!resolver) throw new Error("No resolver for ENS name"); - - const txt = await withTimeout(resolver.getText(ENS_PUBKEY_TEXT_KEY), 6000, "ens_text_timeout"); - const pem = normalizePemLoose(txt); - if (!pem) throw new Error(`ENS text ${ENS_PUBKEY_TEXT_KEY} missing/invalid PEM`); - - ensCache = { ...ensCache, fetched_at: now, pem, error: null, source: "ens" }; - return { ok: true, pem, source: "ens", cache: { ...ensCache } }; - } catch (e) { - ensCache = { - ...ensCache, - fetched_at: now, - pem: null, - error: e?.message || "ens fetch failed", - source: null, - }; - return { ok: false, pem: null, source: null, error: ensCache.error, cache: { ...ensCache } }; - } -} - -// ----------------------- -// AJV schema validation -// ----------------------- -const schemaJsonCache = new Map(); // url -> { fetchedAt, schema } -const validatorCache = new Map(); // verb -> { compiledAt, validate } -const inflightValidator = new Map(); // verb -> Promise - -function cachePrune(map, { ttlMs, maxEntries, tsField = "fetchedAt" } = {}) { - const now = Date.now(); - if (ttlMs && ttlMs > 0) { - for (const [k, v] of map.entries()) { - const t = v?.[tsField] || 0; - if (now - t > ttlMs) map.delete(k); - } - } - if (maxEntries && maxEntries > 0 && map.size > maxEntries) { - const entries = Array.from(map.entries()).sort( - (a, b) => (a[1]?.[tsField] || 0) - (b[1]?.[tsField] || 0) - ); - const toDelete = entries.slice(0, map.size - maxEntries); - for (const [k] of toDelete) map.delete(k); - } -} - -function normalizeSchemaFetchUrl(url) { - if (!url) return url; - let u = String(url); - u = u.replace(/^http:\/\//i, "https://"); - u = u.replace(/^https:\/\/commandlayer\.org/i, "https://www.commandlayer.org"); - u = u.replace(/^https:\/\/www\.commandlayer\.org\/+/, "https://www.commandlayer.org/"); - if (SCHEMA_HOST.startsWith("https://www.commandlayer.org")) { - u = u.replace(/^https:\/\/commandlayer\.org/i, "https://www.commandlayer.org"); - } - return u; -} - -async function fetchJsonWithTimeout(url, timeoutMs) { - const u = normalizeSchemaFetchUrl(url); - cachePrune(schemaJsonCache, { - ttlMs: JSON_CACHE_TTL_MS, - maxEntries: MAX_JSON_CACHE_ENTRIES, - tsField: "fetchedAt", - }); - const cached = schemaJsonCache.get(u); - if (cached) return cached.schema; - - const ac = new AbortController(); - const t = setTimeout(() => ac.abort(), timeoutMs); - try { - const resp = await fetch(u, { - method: "GET", - headers: { accept: "application/json" }, - signal: ac.signal, - redirect: "follow", - }); - if (!resp.ok) throw new Error(`schema fetch failed: ${resp.status} ${resp.statusText}`); - const schema = await resp.json(); - schemaJsonCache.set(u, { fetchedAt: Date.now(), schema }); - return schema; - } finally { - clearTimeout(t); - } -} - -function makeAjv() { - const ajv = new Ajv({ - allErrors: true, - strict: false, - validateSchema: false, - loadSchema: async (uri) => await fetchJsonWithTimeout(uri, SCHEMA_FETCH_TIMEOUT_MS), - }); - addFormats(ajv); - return ajv; -} - -function receiptSchemaUrlForVerb(verb) { - return `${SCHEMA_HOST}/schemas/v1.0.0/commons/${verb}/receipts/${verb}.receipt.schema.json`; -} - -async function getValidatorForVerb(verb) { - cachePrune(validatorCache, { - ttlMs: VALIDATOR_CACHE_TTL_MS, - maxEntries: MAX_VALIDATOR_CACHE_ENTRIES, - tsField: "compiledAt", - }); - - const hit = validatorCache.get(verb); - if (hit?.validate) return hit.validate; - if (inflightValidator.has(verb)) return await inflightValidator.get(verb); - - const build = (async () => { - const ajv = makeAjv(); - const url = receiptSchemaUrlForVerb(verb); - - // Preload shared refs (best effort) - try { - const shared = [ - `${SCHEMA_HOST}/schemas/v1.0.0/_shared/receipt.base.schema.json`, - `${SCHEMA_HOST}/schemas/v1.0.0/_shared/x402.schema.json`, - `${SCHEMA_HOST}/schemas/v1.0.0/_shared/identity.schema.json`, - ]; - await Promise.all(shared.map((u) => fetchJsonWithTimeout(u, SCHEMA_FETCH_TIMEOUT_MS).catch(() => null))); - } catch { - // ignore - } - - const schema = await fetchJsonWithTimeout(url, SCHEMA_FETCH_TIMEOUT_MS); - const validate = await withTimeout( - ajv.compileAsync(schema), - SCHEMA_VALIDATE_BUDGET_MS, - "ajv_compile_budget_exceeded" - ); - - validatorCache.set(verb, { compiledAt: Date.now(), validate }); - return validate; - })().finally(() => inflightValidator.delete(verb)); - - inflightValidator.set(verb, build); - return await build; -} - -function ajvErrorsToSimple(errors) { - if (!errors || !Array.isArray(errors)) return null; - return errors.slice(0, 25).map((e) => ({ - instancePath: e.instancePath, - schemaPath: e.schemaPath, - keyword: e.keyword, - message: e.message, - })); -} - -// ----------------------- -// Warm queue (edge-safe) -// ----------------------- -const warmQueue = new Set(); -let warmRunning = false; - -function hasValidatorCached(verb) { - return !!validatorCache.get(verb)?.validate; -} - -function startWarmWorker() { - if (warmRunning) return; - warmRunning = true; - - setTimeout(() => { - (async () => { - const started = Date.now(); - try { - while (warmQueue.size > 0) { - if (Date.now() - started > PREWARM_TOTAL_BUDGET_MS) break; - - const verb = warmQueue.values().next().value; - warmQueue.delete(verb); - - if (!verb) continue; - if (hasValidatorCached(verb)) continue; - - try { - await withTimeout(getValidatorForVerb(verb), PREWARM_PER_VERB_BUDGET_MS, "prewarm_per_verb_timeout"); - } catch (e) { - console.warn("[prewarm] verb failed", verb, e?.message || e); - } - } - } catch (e) { - console.error("[prewarm] worker crashed", e?.message || e); - } finally { - warmRunning = false; - if (warmQueue.size > 0) startWarmWorker(); - } - })().catch((e) => { - console.error("[prewarm] detached task rejected", e?.message || e); - warmRunning = false; - }); - }, 0); -} - -// ----------------------- -// receipts (runtime-core: single source of truth) -// ----------------------- -function makeReceipt({ - x402, - trace, - result, - status = "success", - error = null, - delegation_result = null, - actor = null, -}) { - let receipt = { - status, - x402, - trace, - ...(delegation_result ? { delegation_result } : {}), - ...(error ? { error } : {}), - ...(status === "success" ? { result } : {}), - metadata: { - proof: { - alg: "ed25519-sha256", - canonical: CANONICAL_ID_SORTED_KEYS_V1, - signer_id: SIGNER_ID, - kid: SIGNER_KID, - hash_sha256: null, - signature_b64: null, - }, - receipt_id: "", - }, - }; - - if (actor) receipt.metadata.actor = actor; - - const privPem = getPrivatePem(); - if (!privPem) throw new Error("Missing RECEIPT_SIGNING_PRIVATE_KEY_PEM or RECEIPT_SIGNING_PRIVATE_KEY_PEM_B64"); - - receipt = signReceiptEd25519Sha256(receipt, { - signer_id: SIGNER_ID, - kid: SIGNER_KID, - canonical: CANONICAL_ID_SORTED_KEYS_V1, - privateKeyPem: privPem, - }); - - return receipt; -} - -// ----------------------- -// deterministic verb implementations -// ----------------------- -async function doFetch(body) { - const url = body?.source || body?.input?.source || body?.input?.url; - if (!url || typeof url !== "string") throw new Error("fetch requires source (url)"); - - await ssrfGuardOrThrow(url); - - const ac = new AbortController(); - const t = setTimeout(() => ac.abort(), FETCH_TIMEOUT_MS); - let resp; - try { - resp = await fetch(url, { method: "GET", signal: ac.signal }); - } finally { - clearTimeout(t); - } - - const reader = resp.body?.getReader?.(); - let received = 0; - const chunks = []; - - if (reader) { - while (true) { - const { value, done } = await reader.read(); - if (done) break; - received += value.byteLength; - if (received > FETCH_MAX_BYTES) break; - chunks.push(value); - } - } - - const buf = chunks.length - ? Buffer.concat(chunks.map((u) => Buffer.from(u))) - : Buffer.from(await resp.text()); - const text = buf.toString("utf8"); - const preview = text.slice(0, 2000); - - return { - items: [ - { - source: url, - query: body?.query ?? null, - include_metadata: body?.include_metadata ?? null, - ok: resp.ok, - http_status: resp.status, - headers: Object.fromEntries(resp.headers.entries()), - body_preview: preview, - bytes_read: Math.min(received || buf.length, FETCH_MAX_BYTES), - truncated: (received || buf.length) > FETCH_MAX_BYTES, - }, - ], - }; -} - -function doDescribe(body) { - const input = body?.input || {}; - const subject = String(input.subject || "").trim(); - if (!subject) throw new Error("describe.input.subject required"); - const detail = input.detail_level || "short"; - - const bullets = [ - "Schemas define meaning (requests + receipts).", - "Runtimes can be swapped without breaking interoperability.", - "Receipts can be independently verified (hash + signature).", - ]; - - const description = - detail === "short" - ? `**${subject}** is a standard “API meaning” contract agents can call using published schemas and receipts.` - : `**${subject}** is a semantic contract for agents. It standardizes verbs, strict JSON Schemas (requests + receipts), and verifiable receipts so different runtimes can execute the same intent without semantic drift.`; - - return { description, bullets, properties: { verb: "describe", version: "1.0.0", detail_level: detail } }; -} - -function doFormat(body) { - const input = body?.input || {}; - const content = String(input.content ?? ""); - const target = input.target_style || "text"; - if (!content.trim()) throw new Error("format.input.content required"); - - let formatted = content; - let style = target; - - if (target === "table") { - const lines = content - .split(/\r?\n/) - .map((s) => s.trim()) - .filter(Boolean); - const rows = []; - for (const ln of lines) { - const m = ln.match(/^([^:]+):\s*(.*)$/); - if (m) rows.push([m[1].trim(), m[2].trim()]); - } - formatted = `| key | value |\n|---|---|\n` + rows.map(([k, v]) => `| ${k} | ${v} |`).join("\n"); - style = "table"; - } - - return { - formatted_content: formatted, - style, - original_length: content.length, - formatted_length: formatted.length, - notes: "Deterministic reference formatter (non-LLM).", - }; -} - -function doClean(body) { - const input = body?.input || {}; - let content = String(input.content ?? ""); - if (!content) throw new Error("clean.input.content required"); - - const ops = Array.isArray(input.operations) ? input.operations : []; - const issues = []; - - const apply = (op) => { - if (op === "normalize_newlines") content = content.replace(/\r\n/g, "\n").replace(/\r/g, "\n"); - if (op === "collapse_whitespace") content = content.replace(/[ \t]+/g, " "); - if (op === "trim") content = content.trim(); - if (op === "remove_empty_lines") content = content.split("\n").filter((l) => l.trim() !== "").join("\n"); - if (op === "redact_emails") { - const before = content; - content = content.replace(/\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}\b/gi, "[redacted-email]"); - if (content !== before) issues.push("emails_redacted"); - } - }; - - for (const op of ops) apply(op); - - return { - cleaned_content: content, - original_length: String(input.content ?? "").length, - cleaned_length: content.length, - operations_applied: ops, - issues_detected: issues, - }; -} - -function parseYamlBestEffort(text) { - const out = {}; - const lines = text.split(/\r?\n/); - for (const ln of lines) { - const m = ln.match(/^\s*([^:#]+)\s*:\s*(.*?)\s*$/); - if (m) out[m[1].trim()] = m[2].trim(); - } - return out; -} - -function doParse(body) { - const input = body?.input || {}; - const content = String(input.content ?? ""); - if (!content.trim()) throw new Error("parse.input.content required"); - - const contentType = (input.content_type || "").toLowerCase(); - const mode = input.mode || "best_effort"; - - let parsed = null; - let confidence = 0.75; - const warnings = []; - - if (contentType === "json") { - try { - parsed = JSON.parse(content); - confidence = 0.98; - } catch { - if (mode === "strict") throw new Error("invalid json"); - warnings.push("Invalid JSON; returned empty object in best_effort."); - parsed = {}; - confidence = 0.2; - } - } else if (contentType === "yaml") { - parsed = parseYamlBestEffort(content); - confidence = 0.75; - } else { - try { - parsed = JSON.parse(content); - confidence = 0.9; - } catch { - parsed = parseYamlBestEffort(content); - confidence = Object.keys(parsed).length ? 0.6 : 0.3; - if (!Object.keys(parsed).length) warnings.push("Could not confidently parse content."); - } - } - - const result = { parsed, confidence }; - if (warnings.length) result.warnings = warnings; - if (input.target_schema) result.target_schema = String(input.target_schema); - return result; -} - -function sha256HexUtf8(str) { - return crypto.createHash("sha256").update(String(str), "utf8").digest("hex"); -} - -function doSummarize(body) { - const input = body?.input || {}; - const content = String(input.content ?? ""); - if (!content.trim()) throw new Error("summarize.input.content required"); - - const style = input.summary_style || "text"; - const format = (input.format_hint || "text").toLowerCase(); - const sentences = content.split(/(?<=[.!?])\s+/).filter(Boolean); - - let summary = ""; - if (style === "bullet_points") { - const picks = sentences.slice(0, 3).map((s) => s.replace(/\s+/g, " ").trim()); - summary = picks.join(" "); - } else { - summary = sentences.slice(0, 2).join(" ").trim(); - } - if (!summary) summary = content.slice(0, 400).trim(); - - const srcHash = sha256HexUtf8(content); - const cr = summary.length ? Number((content.length / summary.length).toFixed(3)) : 0; - - return { - summary, - format: format === "markdown" ? "markdown" : "text", - compression_ratio: cr, - source_hash: srcHash, - }; -} - -function doConvert(body) { - const input = body?.input || {}; - const content = String(input.content ?? ""); - const src = String(input.source_format ?? "").toLowerCase(); - const tgt = String(input.target_format ?? "").toLowerCase(); - if (!content.trim()) throw new Error("convert.input.content required"); - if (!src) throw new Error("convert.input.source_format required"); - if (!tgt) throw new Error("convert.input.target_format required"); - - let converted = content; - const warnings = []; - let lossy = false; - - if (src === "json" && tgt === "csv") { - let obj; - try { - obj = JSON.parse(content); - } catch { - throw new Error("convert json->csv requires valid JSON"); - } - if (obj && typeof obj === "object" && !Array.isArray(obj)) { - const keys = Object.keys(obj); - const vals = keys.map((k) => String(obj[k])); - converted = `${keys.join(",")}\n${vals.join(",")}`; - lossy = true; - warnings.push("JSON->CSV is lossy (types/nesting may be flattened)."); - } else { - throw new Error("convert json->csv supports only flat JSON objects"); - } - } else { - warnings.push(`No deterministic converter for ${src}->${tgt}; echoing content.`); - } - - return { converted_content: converted, source_format: src, target_format: tgt, lossy, warnings }; -} - -function doExplain(body) { - const input = body?.input || {}; - const subject = String(input.subject || "").trim(); - if (!subject) throw new Error("explain.input.subject required"); - - const detail = input.detail_level || "short"; - const core = [ - `A “receipt” is verifiable evidence that an execution happened under a specific verb + schema version.`, - `It includes the structured output plus a cryptographic hash and signature.`, - `Because the schema is public, anyone can independently validate the receipt later.`, - ]; - - const steps = [ - "1) Validate the request against the published request schema.", - "2) Execute the verb and produce structured output.", - "3) Build the receipt (base fields + result).", - "4) Canonicalize + hash the unsigned receipt.", - "5) Sign the hash with the runtime signer key.", - "6) Anyone can verify schema validity + hash match + signature (optionally resolving pubkey from ENS).", - ]; - - const explanation = - `**${subject}** are cryptographically verifiable execution artifacts that bind intent (verb+version), semantics (schema), and output into a signed proof.\n\n` + - core.map((s) => `- ${s}`).join("\n"); - - const result = { explanation, summary: "Receipts are evidence: validate schema + hash + signature." }; - if (detail !== "short") result.steps = steps; - return result; -} - -function doAnalyze(body) { - const input = String(body?.input ?? ""); - if (!input.trim()) throw new Error("analyze.input required (string)"); - - const goal = String(body?.goal ?? "").trim(); - const hints = Array.isArray(body?.hints) ? body.hints.map(String) : []; - const lines = input.split(/\r?\n/).filter((l) => l.trim() !== ""); - const words = input.trim().split(/\s+/).filter(Boolean); - - const containsUrls = /\bhttps?:\/\/[^\s]+/i.test(input); - const containsEmails = /\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}\b/i.test(input); - const containsJsonMarkers = /[{[\]}]/.test(input); - const containsNumbers = /\b\d+(\.\d+)?\b/.test(input); - - const labels = []; - if (containsJsonMarkers) labels.push("structured"); - if (containsUrls) labels.push("contains_urls"); - if (containsEmails) labels.push("contains_emails"); - - let score = 0; - if (containsEmails) score += 0.25; - if (containsUrls) score += 0.2; - if (containsJsonMarkers) score += 0.1; - if (containsNumbers) score += 0.05; - score = Math.min(1, Number(score.toFixed(3))); - - const summary = `Deterministic analysis: ${labels.join(",") || "plain_text"}. Goal="${goal || "n/a"}". Score=${score}.`; - const insights = [ - `Input length: ${input.length} chars; ~${words.length} words; ${lines.length} non-empty lines.`, - goal ? `Goal: ${goal}` : "Goal: (none)", - `Hints provided: ${hints.length}.`, - ]; - - return { summary, insights, labels, score }; -} - -function doClassify(body) { - const actor = String(body?.actor ?? "").trim(); - if (!actor) throw new Error("classify.actor required"); - - const input = body?.input || {}; - const content = String(input.content ?? ""); - if (!content.trim()) throw new Error("classify.input.content required"); - - const maxLabels = Number(body?.limits?.max_labels || 5); - - const labels = []; - const scores = []; - - const hasUrl = /\bhttps?:\/\/[^\s]+/i.test(content); - const hasEmail = /\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}\b/i.test(content); - const hasCode = /\b(error|exception|stack|trace|cannot get|http\/1\.1|curl)\b/i.test(content.toLowerCase()); - const hasFinance = /\b(invoice|payment|usd|\$|bank|wire|crypto)\b/i.test(content.toLowerCase()); - - const push = (lbl, sc) => { - labels.push(lbl); - scores.push(Number(sc.toFixed(6))); - }; - - if (hasUrl) push("contains_urls", 0.733333); - if (hasEmail) push("contains_emails", 0.5); - if (hasCode) push("code_or_logs", 0.4375); - if (hasFinance) push("finance", 0.25); - if (!labels.length) push("general", 0.25); - - const trimmedLabels = labels.slice(0, Math.min(128, maxLabels)); - const trimmedScores = scores.slice(0, trimmedLabels.length); - - return { labels: trimmedLabels, scores: trimmedScores, taxonomy: ["root", trimmedLabels[0] || "general"] }; -} - -// Router: dispatch by verb -const handlers = { - fetch: doFetch, - describe: async (b) => doDescribe(b), - format: async (b) => doFormat(b), - clean: async (b) => doClean(b), - parse: async (b) => doParse(b), - summarize: async (b) => doSummarize(b), - convert: async (b) => doConvert(b), - explain: async (b) => doExplain(b), - analyze: async (b) => doAnalyze(b), - classify: async (b) => doClassify(b), -}; - -async function handleVerb(verb, req, res) { - if (!enabled(verb)) return res.status(404).json({ ...makeError(404, `Verb not enabled: ${verb}`), ...instancePayload() }); - if (!requireBody(req, res)) return; - - const started = Date.now(); - - const rawParent = req.body?.trace?.parent_trace_id ?? req.body?.x402?.extras?.parent_trace_id ?? null; - const parentTraceId = typeof rawParent === "string" && rawParent.trim().length ? rawParent.trim() : null; - - const trace = { - trace_id: randId("trace_"), - ...(parentTraceId ? { parent_trace_id: parentTraceId } : {}), - started_at: nowIso(), - completed_at: null, - duration_ms: null, - provider: process.env.RAILWAY_SERVICE_NAME || "runtime", - }; - - try { - const x402 = req.body?.x402 || { verb, version: "1.0.0", entry: `x402://${verb}agent.eth/${verb}/v1.0.0` }; - - const callerTimeout = Number(req.body?.limits?.timeout_ms || req.body?.limits?.max_latency_ms || 0); - const timeoutMs = Math.min( - SERVER_MAX_HANDLER_MS, - callerTimeout && callerTimeout > 0 ? callerTimeout : SERVER_MAX_HANDLER_MS - ); - - const work = Promise.resolve(handlers[verb](req.body)); - const result = timeoutMs - ? await Promise.race([work, new Promise((_, rej) => setTimeout(() => rej(new Error("timeout")), timeoutMs))]) - : await work; - - trace.completed_at = nowIso(); - trace.duration_ms = Date.now() - started; - - const actor = req.body?.actor - ? { id: String(req.body.actor), role: "user" } - : req.body?.x402?.tenant - ? { id: String(req.body.x402.tenant), role: "tenant" } - : null; - - const receipt = makeReceipt({ x402, trace, result, status: "success", actor }); - return res.json(receipt); - } catch (e) { - trace.completed_at = nowIso(); - trace.duration_ms = Date.now() - started; - - const x402 = req.body?.x402 || { verb, version: "1.0.0", entry: `x402://${verb}agent.eth/${verb}/v1.0.0` }; - - const actor = req.body?.actor - ? { id: String(req.body.actor), role: "user" } - : req.body?.x402?.tenant - ? { id: String(req.body.x402.tenant), role: "tenant" } - : null; - - const err = { - code: String(e?.code || "INTERNAL_ERROR"), - message: String(e?.message || "unknown error").slice(0, 2048), - retryable: String(e?.message || "").includes("timeout"), - details: { verb }, - }; - - const receipt = makeReceipt({ x402, trace, status: "error", error: err, actor }); - return res.status(500).json(receipt); - } -} - -// ----------------------- -// health/index -// ----------------------- -app.get("/", (req, res) => { - res.setHeader("Content-Type", "application/json; charset=utf-8"); - const verbs = (ENABLED_VERBS || []).map((v) => `/${v}/v${API_VERSION}`); - return res.status(200).end( - JSON.stringify({ - ok: true, - service: SERVICE_NAME, - version: SERVICE_VERSION, - api_version: API_VERSION, - base: CANONICAL_BASE, - health: "/health", - verify: "/verify", - verbs, - time: nowIso(), - }) - ); -}); - -app.get("/health", (req, res) => { - res.setHeader("Content-Type", "application/json; charset=utf-8"); - return res.status(200).end( - JSON.stringify({ - ok: true, - service: SERVICE_NAME, - version: SERVICE_VERSION, - api_version: API_VERSION, - base: CANONICAL_BASE, - node: process.version, - port: PORT, - enabled_verbs: ENABLED_VERBS, - signer_id: SIGNER_ID, - signer_ok: !!getPrivatePem(), - time: nowIso(), - ...instancePayload(), - }) - ); -}); - -// ----------------------- -// debug (gated) -// ----------------------- -app.get("/debug/env", requireDebug, (req, res) => { - const privPem = getPrivatePem(); - const pubPem = getPublicPemFromEnv(); - - res.json({ - ok: true, - node: process.version, - port: PORT, - service: process.env.RAILWAY_SERVICE_NAME || "runtime", - enabled_verbs: ENABLED_VERBS, - signer_id: SIGNER_ID, - signer_kid: SIGNER_KID, - signer_ok: !!privPem, - has_priv_b64: !!PRIV_PEM_B64, - has_priv_pem: !!normalizePemLoose(PRIV_PEM_RAW), - derived_priv_pem: !!privPem, - has_pub_b64: !!PUB_PEM_B64, - has_pub_pem: !!normalizePemLoose(PUB_PEM_RAW), - derived_pub_pem: !!pubPem, - verifier_ens_name: VERIFIER_ENS_NAME || null, - ens_pubkey_text_key: ENS_PUBKEY_TEXT_KEY, - has_rpc: hasRpc(), - schema_host: SCHEMA_HOST, - schema_fetch_timeout_ms: SCHEMA_FETCH_TIMEOUT_MS, - schema_validate_budget_ms: SCHEMA_VALIDATE_BUDGET_MS, - verify_schema_cached_only: VERIFY_SCHEMA_CACHED_ONLY, - enable_ssrf_guard: ENABLE_SSRF_GUARD, - fetch_timeout_ms: FETCH_TIMEOUT_MS, - fetch_max_bytes: FETCH_MAX_BYTES, - verify_max_ms: VERIFY_MAX_MS, - cache: { - max_json_cache_entries: MAX_JSON_CACHE_ENTRIES, - json_cache_ttl_ms: JSON_CACHE_TTL_MS, - max_validator_cache_entries: MAX_VALIDATOR_CACHE_ENTRIES, - validator_cache_ttl_ms: VALIDATOR_CACHE_TTL_MS, - }, - server_max_handler_ms: SERVER_MAX_HANDLER_MS, - prewarm: { - max_verbs: PREWARM_MAX_VERBS, - total_budget_ms: PREWARM_TOTAL_BUDGET_MS, - per_verb_budget_ms: PREWARM_PER_VERB_BUDGET_MS, - }, - service_name: SERVICE_NAME, - service_version: SERVICE_VERSION, - api_version: API_VERSION, - canonical_base_url: CANONICAL_BASE, - canonical_id: CANONICAL_ID_SORTED_KEYS_V1, - debug: { enable_debug: ENABLE_DEBUG, has_debug_token: !!DEBUG_TOKEN }, - }); -}); - -app.get("/debug/enskey", requireDebug, async (req, res) => { - const refresh = String(req.query.refresh || "0") === "1"; - const out = await fetchEnsPubkeyPem({ refresh }); - res.json({ - ok: !!out.ok, - pubkey_source: out.source || null, - ens_name: VERIFIER_ENS_NAME || null, - txt_key: ENS_PUBKEY_TEXT_KEY, - cache: out.cache - ? { fetched_at: new Date(out.cache.fetched_at).toISOString(), ttl_ms: out.cache.ttl_ms } - : null, - preview: out.pem ? out.pem.slice(0, 90) + "..." : null, - error: out.error || null, - }); -}); - -app.get("/debug/validators", requireDebug, (req, res) => { - res.json({ - ok: true, - cached: Array.from(validatorCache.keys()), - cache_sizes: { schemaJsonCache: schemaJsonCache.size, validatorCache: validatorCache.size }, - inflight: Array.from(inflightValidator.keys()), - warm_queue_size: warmQueue.size, - warm_running: warmRunning, - ...instancePayload(), - }); -}); - -app.post("/debug/prewarm", requireDebug, (req, res) => { - const verbs = Array.isArray(req.body?.verbs) ? req.body.verbs : []; - const cleaned = verbs - .map((v) => String(v || "").trim()) - .filter(Boolean) - .slice(0, PREWARM_MAX_VERBS); - - const supported = cleaned.filter((v) => handlers[v]); - for (const v of supported) warmQueue.add(v); - - res.json({ - ok: true, - queued: supported, - already_cached: supported.filter(hasValidatorCached), - queue_size: warmQueue.size, - }); - - startWarmWorker(); -}); - -// ----------------------- -// verb routes: //v1.0.0 -// ----------------------- -for (const v of Object.keys(handlers)) { - app.post(`/${v}/v1.0.0`, (req, res) => handleVerb(v, req, res)); -} - -// ----------------------- -// verify endpoint (schema validation + ENS pubkey) -// ----------------------- -app.post("/verify", async (req, res) => { - const work = (async () => { - const receipt = req.body; - - const wantEns = String(req.query.ens || "0") === "1"; - const refresh = String(req.query.refresh || "0") === "1"; - const wantSchema = String(req.query.schema || "0") === "1"; - - const proof = receipt?.metadata?.proof; - if (!proof?.signature_b64 || !proof?.hash_sha256) { - return res.status(400).json({ - ok: false, - checks: { schema_valid: wantSchema ? false : null, hash_matches: null, signature_valid: false }, - error: "missing metadata.proof.signature_b64 or hash_sha256", - ...instancePayload(), - }); - } - - // 1) pick pubkey (env first, then ENS if requested and parseable) - let pubPem = getPublicPemFromEnv(); - let pubSrc = pubPem ? (normalizePemLoose(PUB_PEM_RAW) ? "env-pem" : "env-b64") : null; - - if (wantEns) { - const ensOut = await fetchEnsPubkeyPem({ refresh }); - if (ensOut.ok && ensOut.pem) { - pubPem = ensOut.pem; - pubSrc = "ens"; - } - } - - if (!pubPem) { - return res.status(400).json({ - ok: false, - checks: { schema_valid: wantSchema ? false : null, hash_matches: null, signature_valid: false }, - error: - "no public key available (set RECEIPT_SIGNING_PUBLIC_KEY_PEM/_B64 or use ens=1 with valid ENS TXT)", - ...instancePayload(), - }); - } - - // 2) verify signature/hash via runtime-core - let v; - try { - v = verifyReceiptEd25519Sha256(receipt, { - publicKeyPemOrDer: pubPem, - allowedCanonicals: [CANONICAL_ID_SORTED_KEYS_V1], - }); - } catch (e) { - return res.status(400).json({ - ok: false, - checks: { schema_valid: wantSchema ? false : null, hash_matches: null, signature_valid: false }, - values: { - verb: receipt?.x402?.verb ?? null, - signer_id: proof?.signer_id ?? null, - pubkey_source: pubSrc, - }, - error: e?.message || "public key decode/verify failed", - ...instancePayload(), - }); - } - - const sigOk = !!v.ok; - const sigErr = sigOk ? null : (v.reason || "verify failed"); - - const hashMatches = sigOk ? true : (v.reason === "hash_mismatch" ? false : null); - - // 3) schema validation (optional + edge-safe) - let schemaOk = null; - let schemaErrors = null; - - if (wantSchema) { - schemaOk = false; - const verb = String(receipt?.x402?.verb || "").trim(); - - if (!verb) { - schemaErrors = [{ message: "missing receipt.x402.verb" }]; - } else if (VERIFY_SCHEMA_CACHED_ONLY && !hasValidatorCached(verb)) { - warmQueue.add(verb); - startWarmWorker(); - schemaErrors = [{ message: "validator_not_warmed_yet" }]; - - return res.status(202).json({ - ok: false, - checks: { schema_valid: false, hash_matches: hashMatches, signature_valid: sigOk }, - values: { - verb: receipt?.x402?.verb ?? null, - signer_id: proof?.signer_id ?? null, - pubkey_source: pubSrc, - claimed_hash: proof?.hash_sha256 ?? null, - }, - errors: { schema_errors: schemaErrors, signature_error: sigErr }, - retry_after_ms: 1000, - ...instancePayload(), - }); - } else { - try { - const validate = VERIFY_SCHEMA_CACHED_ONLY - ? validatorCache.get(verb)?.validate - : await getValidatorForVerb(verb); - if (!validate) { - schemaOk = false; - schemaErrors = [{ message: "validator_missing" }]; - } else { - const ok = validate(receipt); - schemaOk = !!ok; - if (!ok) schemaErrors = ajvErrorsToSimple(validate.errors) || [{ message: "schema validation failed" }]; - } - } catch (e) { - schemaOk = false; - schemaErrors = [{ message: e?.message || "schema validation error" }]; - } - } - } - - return res.json({ - ok: hashMatches === true && sigOk === true && (wantSchema ? schemaOk === true : true), - checks: { schema_valid: schemaOk, hash_matches: hashMatches, signature_valid: sigOk }, - values: { - verb: receipt?.x402?.verb ?? null, - signer_id: proof?.signer_id ?? null, - alg: proof?.alg ?? null, - canonical: proof?.canonical ?? null, - claimed_hash: proof?.hash_sha256 ?? null, - pubkey_source: pubSrc, - }, - errors: { schema_errors: schemaErrors, signature_error: sigErr }, - ...instancePayload(), - }); - })(); - - try { - await Promise.race([ - work, - new Promise((_, rej) => setTimeout(() => rej(new Error("verify_timeout")), VERIFY_MAX_MS)), - ]); - } catch (e) { - return res.status(500).json({ - ok: false, - error: e?.message || "verify failed", - checks: { schema_valid: null, hash_matches: null, signature_valid: false }, - ...instancePayload(), - }); - } -}); - -app.listen(PORT, () => { - console.log(`runtime listening on :${PORT}`); -}); +// server.mjs +import express from "express"; +import crypto from "crypto"; +import Ajv from "ajv"; +import addFormats from "ajv-formats"; +import { ethers } from "ethers"; +import net from "net"; + +import { + signReceiptEd25519Sha256, + verifyReceiptEd25519Sha256, + CANONICAL_ID_SORTED_KEYS_V1, +} from "@commandlayer/runtime-core"; + +// ---- instance identity + uptime (add near top) +const INSTANCE_ID = crypto.randomUUID(); +const BOOTED_AT = Date.now(); +const uptimeMs = () => Date.now() - BOOTED_AT; +const instancePayload = () => ({ instance: { id: INSTANCE_ID, uptime_ms: uptimeMs() } }); + +// 2) Add global crash logging so you see the reason in Railway logs +process.on("unhandledRejection", (reason) => { + console.error("[fatal] unhandledRejection", reason); +}); +process.on("uncaughtException", (err) => { + console.error("[fatal] uncaughtException", err); +}); + +const app = express(); +app.use(express.json({ limit: "2mb" })); + +// ---- basic CORS (no dependency) +app.use((req, res, next) => { + res.setHeader("Access-Control-Allow-Origin", "*"); + res.setHeader( + "Access-Control-Allow-Headers", + "Content-Type, Authorization, X-Debug-Token" + ); + res.setHeader("Access-Control-Allow-Methods", "GET,POST,OPTIONS"); + if (req.method === "OPTIONS") return res.status(204).end(); + next(); +}); + +const PORT = Number(process.env.PORT || 8080); + +// ---- runtime config +const ENABLED_VERBS = ( + process.env.ENABLED_VERBS || + "fetch,describe,format,clean,parse,summarize,convert,explain,analyze,classify" +) + .split(",") + .map((s) => s.trim()) + .filter(Boolean); + +const SIGNER_ID = + process.env.RECEIPT_SIGNER_ID || + process.env.ENS_NAME || + process.env.CL_SIGNER_ID || + process.env.SIGNER_ID || + "runtime.commandlayer.eth"; + +const SIGNER_KID = process.env.SIGNER_KID || "v1"; + +// ---- key material (support both raw PEM and base64-PEM) +const PRIV_PEM_RAW = process.env.RECEIPT_SIGNING_PRIVATE_KEY_PEM || ""; +const PUB_PEM_RAW = process.env.RECEIPT_SIGNING_PUBLIC_KEY_PEM || ""; + +const PRIV_PEM_B64 = process.env.RECEIPT_SIGNING_PRIVATE_KEY_PEM_B64 || ""; +const PUB_PEM_B64 = process.env.RECEIPT_SIGNING_PUBLIC_KEY_PEM_B64 || ""; + +// ---- service identity / discovery +const SERVICE_NAME = process.env.SERVICE_NAME || "commandlayer-runtime"; +const SERVICE_VERSION = process.env.SERVICE_VERSION || "1.0.0"; +const CANONICAL_BASE = ( + process.env.CANONICAL_BASE_URL || "https://runtime.commandlayer.org" +).replace(/\/+$/, ""); +const API_VERSION = process.env.API_VERSION || "1.0.0"; + +// ENS verifier config +const ETH_RPC_URL = process.env.ETH_RPC_URL || ""; +const VERIFIER_ENS_NAME = + process.env.VERIFIER_ENS_NAME || process.env.ENS_NAME || SIGNER_ID || ""; +const ENS_PUBKEY_TEXT_KEY = + process.env.ENS_PUBKEY_TEXT_KEY || "cl.receipt.pubkey.pem"; + +// IMPORTANT: AJV should fetch schemas from www, but schemas' $id/refs may be commandlayer.org. +// We normalize fetch URLs to https://www.commandlayer.org to avoid redirect/host mismatches. +const SCHEMA_HOST = (process.env.SCHEMA_HOST || "https://www.commandlayer.org").replace( + /\/+$/, + "" +); +const SCHEMA_FETCH_TIMEOUT_MS = Number(process.env.SCHEMA_FETCH_TIMEOUT_MS || 15000); +const SCHEMA_VALIDATE_BUDGET_MS = Number( + process.env.SCHEMA_VALIDATE_BUDGET_MS || 15000 +); + +// ---- scaling + safety knobs (server-side caps) +const MAX_JSON_CACHE_ENTRIES = Number(process.env.MAX_JSON_CACHE_ENTRIES || 256); +const JSON_CACHE_TTL_MS = Number(process.env.JSON_CACHE_TTL_MS || 10 * 60 * 1000); +const MAX_VALIDATOR_CACHE_ENTRIES = Number( + process.env.MAX_VALIDATOR_CACHE_ENTRIES || 128 +); +const VALIDATOR_CACHE_TTL_MS = Number( + process.env.VALIDATOR_CACHE_TTL_MS || 30 * 60 * 1000 +); +const SERVER_MAX_HANDLER_MS = Number(process.env.SERVER_MAX_HANDLER_MS || 12000); + +// fetch hardening +const FETCH_TIMEOUT_MS = Number(process.env.FETCH_TIMEOUT_MS || 8000); +const FETCH_MAX_BYTES = Number(process.env.FETCH_MAX_BYTES || 256 * 1024); +const ENABLE_SSRF_GUARD = String(process.env.ENABLE_SSRF_GUARD || "1") === "1"; +const ALLOW_FETCH_HOSTS = (process.env.ALLOW_FETCH_HOSTS || "") + .split(",") + .map((s) => s.trim().toLowerCase()) + .filter(Boolean); + +// verify hardening +const VERIFY_MAX_MS = Number(process.env.VERIFY_MAX_MS || 30000); + +// Edge-safe schema verify +const VERIFY_SCHEMA_CACHED_ONLY = + String(process.env.VERIFY_SCHEMA_CACHED_ONLY || "1") === "1"; + +// Prewarm knobs +const PREWARM_MAX_VERBS = Number(process.env.PREWARM_MAX_VERBS || 25); +const PREWARM_TOTAL_BUDGET_MS = Number(process.env.PREWARM_TOTAL_BUDGET_MS || 12000); +const PREWARM_PER_VERB_BUDGET_MS = Number( + process.env.PREWARM_PER_VERB_BUDGET_MS || 5000 +); + +// ---- debug gating +const ENABLE_DEBUG = String(process.env.ENABLE_DEBUG || "0") === "1"; +const DEBUG_TOKEN = process.env.DEBUG_TOKEN || ""; + +function requireDebug(req, res, next) { + // If debug disabled, pretend it doesn't exist. + if (!ENABLE_DEBUG) return res.status(404).end(); + + // Require token + if (!DEBUG_TOKEN) return res.status(404).end(); + + const auth = String(req.headers.authorization || ""); + const xTok = String(req.headers["x-debug-token"] || ""); + const bearer = auth.toLowerCase().startsWith("bearer ") + ? auth.slice(7).trim() + : ""; + + const ok = (bearer && bearer === DEBUG_TOKEN) || (xTok && xTok === DEBUG_TOKEN); + + if (!ok) return res.status(404).end(); // 404 to avoid oracle/probing + next(); +} + +function nowIso() { + return new Date().toISOString(); +} + +function randId(prefix = "trace_") { + return prefix + crypto.randomBytes(6).toString("hex"); +} + +function normalizePemLoose(input) { + if (!input) return null; + let s = String(input).replace(/\\n/g, "\n").trim(); + + const begin = s.match(/-----BEGIN [^-]+-----/); + const end = s.match(/-----END [^-]+-----/); + if (!begin || !end) return null; + + const type = begin[0].replace("-----BEGIN ", "").replace("-----", "").trim(); + const body = s + .replace(/-----BEGIN [^-]+-----/g, "") + .replace(/-----END [^-]+-----/g, "") + .replace(/[\r\n\s]/g, "") + .trim(); + + if (!body || !/^[A-Za-z0-9+/=]+$/.test(body)) return null; + + const wrapped = body.match(/.{1,64}/g)?.join("\n") || body; + return `-----BEGIN ${type}-----\n${wrapped}\n-----END ${type}-----`; +} + +function pemFromB64(b64) { + if (!b64) return null; + try { + const text = Buffer.from(b64, "base64").toString("utf8"); + return normalizePemLoose(text); + } catch { + return null; + } +} + +function getPrivatePem() { + return normalizePemLoose(PRIV_PEM_RAW) || pemFromB64(PRIV_PEM_B64); +} + +function getPublicPemFromEnv() { + return normalizePemLoose(PUB_PEM_RAW) || pemFromB64(PUB_PEM_B64); +} + +function enabled(verb) { + return ENABLED_VERBS.includes(verb); +} + +function makeError(code, message, extra = {}) { + return { status: "error", code, message, ...extra }; +} + +function requireBody(req, res) { + if (!req.body || typeof req.body !== "object") { + res.status(400).json({ ...makeError(400, "Invalid JSON body"), ...instancePayload() }); + return false; + } + return true; +} + +// ----------------------- +// SSRF guard for fetch() +// ----------------------- +function isPrivateIp(ip) { + if (!net.isIP(ip)) return false; + if (net.isIP(ip) === 6) return true; // block ipv6 by default + const parts = ip.split(".").map((n) => Number(n)); + const [a, b] = parts; + if (a === 10) return true; + if (a === 127) return true; + if (a === 169 && b === 254) return true; + if (a === 172 && b >= 16 && b <= 31) return true; + if (a === 192 && b === 168) return true; + if (a === 0) return true; + if (a === 100 && b >= 64 && b <= 127) return true; // CGNAT + return false; +} + +async function resolveARecords(hostname) { + const dns = await import("dns/promises"); + try { + const addrs = await dns.resolve4(hostname); + return Array.isArray(addrs) ? addrs : []; + } catch { + return []; + } +} + +async function ssrfGuardOrThrow(urlStr) { + if (!ENABLE_SSRF_GUARD) return; + let u; + try { + u = new URL(urlStr); + } catch { + throw new Error("fetch requires a valid absolute URL"); + } + if (!/^https?:$/.test(u.protocol)) throw new Error("fetch only allows http(s)"); + const host = (u.hostname || "").toLowerCase(); + + if (ALLOW_FETCH_HOSTS.length) { + const ok = ALLOW_FETCH_HOSTS.some((h) => host === h || host.endsWith("." + h)); + if (!ok) throw new Error("fetch host not allowed"); + } + + if (host === "localhost" || host.endsWith(".localhost")) throw new Error("fetch host blocked"); + if (host === "169.254.169.254") throw new Error("fetch host blocked"); + + if (net.isIP(host) && isPrivateIp(host)) throw new Error("fetch to private IP blocked"); + + const addrs = await resolveARecords(host); + if (addrs.some(isPrivateIp)) throw new Error("fetch DNS resolves to private IP (blocked)"); +} + +// ----------------------- +// ENS TXT pubkey fetch (ethers v6) +// ----------------------- +let ensCache = { + fetched_at: 0, + ttl_ms: 10 * 60 * 1000, + pem: null, + error: null, + source: null, +}; + +function hasRpc() { + return !!ETH_RPC_URL; +} + +async function withTimeout(promise, ms, label = "timeout") { + if (!ms || ms <= 0) return await promise; + return await Promise.race([ + promise, + new Promise((_, rej) => setTimeout(() => rej(new Error(label)), ms)), + ]); +} + +async function fetchEnsPubkeyPem({ refresh = false } = {}) { + const now = Date.now(); + if (!refresh && ensCache.pem && now - ensCache.fetched_at < ensCache.ttl_ms) { + return { ok: true, pem: ensCache.pem, source: ensCache.source, cache: { ...ensCache } }; + } + if (!VERIFIER_ENS_NAME) { + ensCache = { ...ensCache, fetched_at: now, pem: null, error: "Missing VERIFIER_ENS_NAME", source: null }; + return { ok: false, pem: null, source: null, error: ensCache.error, cache: { ...ensCache } }; + } + if (!ETH_RPC_URL) { + ensCache = { ...ensCache, fetched_at: now, pem: null, error: "Missing ETH_RPC_URL", source: null }; + return { ok: false, pem: null, source: null, error: ensCache.error, cache: { ...ensCache } }; + } + try { + const provider = new ethers.JsonRpcProvider(ETH_RPC_URL); + const resolver = await withTimeout( + provider.getResolver(VERIFIER_ENS_NAME), + 6000, + "ens_resolver_timeout" + ); + if (!resolver) throw new Error("No resolver for ENS name"); + + const txt = await withTimeout(resolver.getText(ENS_PUBKEY_TEXT_KEY), 6000, "ens_text_timeout"); + const pem = normalizePemLoose(txt); + if (!pem) throw new Error(`ENS text ${ENS_PUBKEY_TEXT_KEY} missing/invalid PEM`); + + ensCache = { ...ensCache, fetched_at: now, pem, error: null, source: "ens" }; + return { ok: true, pem, source: "ens", cache: { ...ensCache } }; + } catch (e) { + ensCache = { + ...ensCache, + fetched_at: now, + pem: null, + error: e?.message || "ens fetch failed", + source: null, + }; + return { ok: false, pem: null, source: null, error: ensCache.error, cache: { ...ensCache } }; + } +} + +// ----------------------- +// AJV schema validation +// ----------------------- +const schemaJsonCache = new Map(); // url -> { fetchedAt, schema } +const validatorCache = new Map(); // verb -> { compiledAt, validate } +const inflightValidator = new Map(); // verb -> Promise + +function cachePrune(map, { ttlMs, maxEntries, tsField = "fetchedAt" } = {}) { + const now = Date.now(); + if (ttlMs && ttlMs > 0) { + for (const [k, v] of map.entries()) { + const t = v?.[tsField] || 0; + if (now - t > ttlMs) map.delete(k); + } + } + if (maxEntries && maxEntries > 0 && map.size > maxEntries) { + const entries = Array.from(map.entries()).sort( + (a, b) => (a[1]?.[tsField] || 0) - (b[1]?.[tsField] || 0) + ); + const toDelete = entries.slice(0, map.size - maxEntries); + for (const [k] of toDelete) map.delete(k); + } +} + +function normalizeSchemaFetchUrl(url) { + if (!url) return url; + let u = String(url); + u = u.replace(/^http:\/\//i, "https://"); + u = u.replace(/^https:\/\/commandlayer\.org/i, "https://www.commandlayer.org"); + u = u.replace(/^https:\/\/www\.commandlayer\.org\/+/, "https://www.commandlayer.org/"); + if (SCHEMA_HOST.startsWith("https://www.commandlayer.org")) { + u = u.replace(/^https:\/\/commandlayer\.org/i, "https://www.commandlayer.org"); + } + return u; +} + +async function fetchJsonWithTimeout(url, timeoutMs) { + const u = normalizeSchemaFetchUrl(url); + cachePrune(schemaJsonCache, { + ttlMs: JSON_CACHE_TTL_MS, + maxEntries: MAX_JSON_CACHE_ENTRIES, + tsField: "fetchedAt", + }); + const cached = schemaJsonCache.get(u); + if (cached) return cached.schema; + + const ac = new AbortController(); + const t = setTimeout(() => ac.abort(), timeoutMs); + try { + const resp = await fetch(u, { + method: "GET", + headers: { accept: "application/json" }, + signal: ac.signal, + redirect: "follow", + }); + if (!resp.ok) throw new Error(`schema fetch failed: ${resp.status} ${resp.statusText}`); + const schema = await resp.json(); + schemaJsonCache.set(u, { fetchedAt: Date.now(), schema }); + return schema; + } finally { + clearTimeout(t); + } +} + +function makeAjv() { + const ajv = new Ajv({ + allErrors: true, + strict: false, + validateSchema: false, + loadSchema: async (uri) => await fetchJsonWithTimeout(uri, SCHEMA_FETCH_TIMEOUT_MS), + }); + addFormats(ajv); + return ajv; +} + +function receiptSchemaUrlForVerb(verb) { + return `${SCHEMA_HOST}/schemas/v1.0.0/commons/${verb}/receipts/${verb}.receipt.schema.json`; +} + +async function getValidatorForVerb(verb) { + cachePrune(validatorCache, { + ttlMs: VALIDATOR_CACHE_TTL_MS, + maxEntries: MAX_VALIDATOR_CACHE_ENTRIES, + tsField: "compiledAt", + }); + + const hit = validatorCache.get(verb); + if (hit?.validate) return hit.validate; + if (inflightValidator.has(verb)) return await inflightValidator.get(verb); + + const build = (async () => { + const ajv = makeAjv(); + const url = receiptSchemaUrlForVerb(verb); + + // Preload shared refs (best effort) + try { + const shared = [ + `${SCHEMA_HOST}/schemas/v1.0.0/_shared/receipt.base.schema.json`, + `${SCHEMA_HOST}/schemas/v1.0.0/_shared/x402.schema.json`, + `${SCHEMA_HOST}/schemas/v1.0.0/_shared/identity.schema.json`, + ]; + await Promise.all(shared.map((u) => fetchJsonWithTimeout(u, SCHEMA_FETCH_TIMEOUT_MS).catch(() => null))); + } catch { + // ignore + } + + const schema = await fetchJsonWithTimeout(url, SCHEMA_FETCH_TIMEOUT_MS); + const validate = await withTimeout( + ajv.compileAsync(schema), + SCHEMA_VALIDATE_BUDGET_MS, + "ajv_compile_budget_exceeded" + ); + + validatorCache.set(verb, { compiledAt: Date.now(), validate }); + return validate; + })().finally(() => inflightValidator.delete(verb)); + + inflightValidator.set(verb, build); + return await build; +} + +function ajvErrorsToSimple(errors) { + if (!errors || !Array.isArray(errors)) return null; + return errors.slice(0, 25).map((e) => ({ + instancePath: e.instancePath, + schemaPath: e.schemaPath, + keyword: e.keyword, + message: e.message, + })); +} + +// ----------------------- +// Warm queue (edge-safe) +// ----------------------- +const warmQueue = new Set(); +let warmRunning = false; + +function hasValidatorCached(verb) { + return !!validatorCache.get(verb)?.validate; +} + +function startWarmWorker() { + if (warmRunning) return; + warmRunning = true; + + setTimeout(() => { + (async () => { + const started = Date.now(); + try { + while (warmQueue.size > 0) { + if (Date.now() - started > PREWARM_TOTAL_BUDGET_MS) break; + + const verb = warmQueue.values().next().value; + warmQueue.delete(verb); + + if (!verb) continue; + if (hasValidatorCached(verb)) continue; + + try { + await withTimeout(getValidatorForVerb(verb), PREWARM_PER_VERB_BUDGET_MS, "prewarm_per_verb_timeout"); + } catch (e) { + console.warn("[prewarm] verb failed", verb, e?.message || e); + } + } + } catch (e) { + console.error("[prewarm] worker crashed", e?.message || e); + } finally { + warmRunning = false; + if (warmQueue.size > 0) startWarmWorker(); + } + })().catch((e) => { + console.error("[prewarm] detached task rejected", e?.message || e); + warmRunning = false; + }); + }, 0); +} + +// ----------------------- +// receipts (runtime-core: single source of truth) +// ----------------------- +function makeReceipt({ + x402, + trace, + result, + status = "success", + error = null, + delegation_result = null, + actor = null, +}) { + let receipt = { + status, + x402, + trace, + ...(delegation_result ? { delegation_result } : {}), + ...(error ? { error } : {}), + ...(status === "success" ? { result } : {}), + metadata: { + proof: { + alg: "ed25519-sha256", + canonical: CANONICAL_ID_SORTED_KEYS_V1, + signer_id: SIGNER_ID, + kid: SIGNER_KID, + hash_sha256: null, + signature_b64: null, + }, + receipt_id: "", + }, + }; + + if (actor) receipt.metadata.actor = actor; + + const privPem = getPrivatePem(); + if (!privPem) throw new Error("Missing RECEIPT_SIGNING_PRIVATE_KEY_PEM or RECEIPT_SIGNING_PRIVATE_KEY_PEM_B64"); + + receipt = signReceiptEd25519Sha256(receipt, { + signer_id: SIGNER_ID, + kid: SIGNER_KID, + canonical: CANONICAL_ID_SORTED_KEYS_V1, + privateKeyPem: privPem, + }); + + return receipt; +} + +// ----------------------- +// deterministic verb implementations +// ----------------------- +async function doFetch(body) { + const url = body?.source || body?.input?.source || body?.input?.url; + if (!url || typeof url !== "string") throw new Error("fetch requires source (url)"); + + await ssrfGuardOrThrow(url); + + const ac = new AbortController(); + const t = setTimeout(() => ac.abort(), FETCH_TIMEOUT_MS); + let resp; + try { + resp = await fetch(url, { method: "GET", signal: ac.signal }); + } finally { + clearTimeout(t); + } + + const reader = resp.body?.getReader?.(); + let received = 0; + const chunks = []; + + if (reader) { + while (true) { + const { value, done } = await reader.read(); + if (done) break; + received += value.byteLength; + if (received > FETCH_MAX_BYTES) break; + chunks.push(value); + } + } + + const buf = chunks.length + ? Buffer.concat(chunks.map((u) => Buffer.from(u))) + : Buffer.from(await resp.text()); + const text = buf.toString("utf8"); + const preview = text.slice(0, 2000); + + return { + items: [ + { + source: url, + query: body?.query ?? null, + include_metadata: body?.include_metadata ?? null, + ok: resp.ok, + http_status: resp.status, + headers: Object.fromEntries(resp.headers.entries()), + body_preview: preview, + bytes_read: Math.min(received || buf.length, FETCH_MAX_BYTES), + truncated: (received || buf.length) > FETCH_MAX_BYTES, + }, + ], + }; +} + +function doDescribe(body) { + const input = body?.input || {}; + const subject = String(input.subject || "").trim(); + if (!subject) throw new Error("describe.input.subject required"); + const detail = input.detail_level || "short"; + + const bullets = [ + "Schemas define meaning (requests + receipts).", + "Runtimes can be swapped without breaking interoperability.", + "Receipts can be independently verified (hash + signature).", + ]; + + const description = + detail === "short" + ? `**${subject}** is a standard “API meaning” contract agents can call using published schemas and receipts.` + : `**${subject}** is a semantic contract for agents. It standardizes verbs, strict JSON Schemas (requests + receipts), and verifiable receipts so different runtimes can execute the same intent without semantic drift.`; + + return { description, bullets, properties: { verb: "describe", version: "1.0.0", detail_level: detail } }; +} + +function doFormat(body) { + const input = body?.input || {}; + const content = String(input.content ?? ""); + const target = input.target_style || "text"; + if (!content.trim()) throw new Error("format.input.content required"); + + let formatted = content; + let style = target; + + if (target === "table") { + const lines = content + .split(/\r?\n/) + .map((s) => s.trim()) + .filter(Boolean); + const rows = []; + for (const ln of lines) { + const m = ln.match(/^([^:]+):\s*(.*)$/); + if (m) rows.push([m[1].trim(), m[2].trim()]); + } + formatted = `| key | value |\n|---|---|\n` + rows.map(([k, v]) => `| ${k} | ${v} |`).join("\n"); + style = "table"; + } + + return { + formatted_content: formatted, + style, + original_length: content.length, + formatted_length: formatted.length, + notes: "Deterministic reference formatter (non-LLM).", + }; +} + +function doClean(body) { + const input = body?.input || {}; + let content = String(input.content ?? ""); + if (!content) throw new Error("clean.input.content required"); + + const ops = Array.isArray(input.operations) ? input.operations : []; + const issues = []; + + const apply = (op) => { + if (op === "normalize_newlines") content = content.replace(/\r\n/g, "\n").replace(/\r/g, "\n"); + if (op === "collapse_whitespace") content = content.replace(/[ \t]+/g, " "); + if (op === "trim") content = content.trim(); + if (op === "remove_empty_lines") content = content.split("\n").filter((l) => l.trim() !== "").join("\n"); + if (op === "redact_emails") { + const before = content; + content = content.replace(/\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}\b/gi, "[redacted-email]"); + if (content !== before) issues.push("emails_redacted"); + } + }; + + for (const op of ops) apply(op); + + return { + cleaned_content: content, + original_length: String(input.content ?? "").length, + cleaned_length: content.length, + operations_applied: ops, + issues_detected: issues, + }; +} + +function parseYamlBestEffort(text) { + const out = {}; + const lines = text.split(/\r?\n/); + for (const ln of lines) { + const m = ln.match(/^\s*([^:#]+)\s*:\s*(.*?)\s*$/); + if (m) out[m[1].trim()] = m[2].trim(); + } + return out; +} + +function doParse(body) { + const input = body?.input || {}; + const content = String(input.content ?? ""); + if (!content.trim()) throw new Error("parse.input.content required"); + + const contentType = (input.content_type || "").toLowerCase(); + const mode = input.mode || "best_effort"; + + let parsed = null; + let confidence = 0.75; + const warnings = []; + + if (contentType === "json") { + try { + parsed = JSON.parse(content); + confidence = 0.98; + } catch { + if (mode === "strict") throw new Error("invalid json"); + warnings.push("Invalid JSON; returned empty object in best_effort."); + parsed = {}; + confidence = 0.2; + } + } else if (contentType === "yaml") { + parsed = parseYamlBestEffort(content); + confidence = 0.75; + } else { + try { + parsed = JSON.parse(content); + confidence = 0.9; + } catch { + parsed = parseYamlBestEffort(content); + confidence = Object.keys(parsed).length ? 0.6 : 0.3; + if (!Object.keys(parsed).length) warnings.push("Could not confidently parse content."); + } + } + + const result = { parsed, confidence }; + if (warnings.length) result.warnings = warnings; + if (input.target_schema) result.target_schema = String(input.target_schema); + return result; +} + +function sha256HexUtf8(str) { + return crypto.createHash("sha256").update(String(str), "utf8").digest("hex"); +} + +function doSummarize(body) { + const input = body?.input || {}; + const content = String(input.content ?? ""); + if (!content.trim()) throw new Error("summarize.input.content required"); + + const style = input.summary_style || "text"; + const format = (input.format_hint || "text").toLowerCase(); + const sentences = content.split(/(?<=[.!?])\s+/).filter(Boolean); + + let summary = ""; + if (style === "bullet_points") { + const picks = sentences.slice(0, 3).map((s) => s.replace(/\s+/g, " ").trim()); + summary = picks.join(" "); + } else { + summary = sentences.slice(0, 2).join(" ").trim(); + } + if (!summary) summary = content.slice(0, 400).trim(); + + const srcHash = sha256HexUtf8(content); + const cr = summary.length ? Number((content.length / summary.length).toFixed(3)) : 0; + + return { + summary, + format: format === "markdown" ? "markdown" : "text", + compression_ratio: cr, + source_hash: srcHash, + }; +} + +function doConvert(body) { + const input = body?.input || {}; + const content = String(input.content ?? ""); + const src = String(input.source_format ?? "").toLowerCase(); + const tgt = String(input.target_format ?? "").toLowerCase(); + if (!content.trim()) throw new Error("convert.input.content required"); + if (!src) throw new Error("convert.input.source_format required"); + if (!tgt) throw new Error("convert.input.target_format required"); + + let converted = content; + const warnings = []; + let lossy = false; + + if (src === "json" && tgt === "csv") { + let obj; + try { + obj = JSON.parse(content); + } catch { + throw new Error("convert json->csv requires valid JSON"); + } + if (obj && typeof obj === "object" && !Array.isArray(obj)) { + const keys = Object.keys(obj); + const vals = keys.map((k) => String(obj[k])); + converted = `${keys.join(",")}\n${vals.join(",")}`; + lossy = true; + warnings.push("JSON->CSV is lossy (types/nesting may be flattened)."); + } else { + throw new Error("convert json->csv supports only flat JSON objects"); + } + } else { + warnings.push(`No deterministic converter for ${src}->${tgt}; echoing content.`); + } + + return { converted_content: converted, source_format: src, target_format: tgt, lossy, warnings }; +} + +function doExplain(body) { + const input = body?.input || {}; + const subject = String(input.subject || "").trim(); + if (!subject) throw new Error("explain.input.subject required"); + + const detail = input.detail_level || "short"; + const core = [ + `A “receipt” is verifiable evidence that an execution happened under a specific verb + schema version.`, + `It includes the structured output plus a cryptographic hash and signature.`, + `Because the schema is public, anyone can independently validate the receipt later.`, + ]; + + const steps = [ + "1) Validate the request against the published request schema.", + "2) Execute the verb and produce structured output.", + "3) Build the receipt (base fields + result).", + "4) Canonicalize + hash the unsigned receipt.", + "5) Sign the hash with the runtime signer key.", + "6) Anyone can verify schema validity + hash match + signature (optionally resolving pubkey from ENS).", + ]; + + const explanation = + `**${subject}** are cryptographically verifiable execution artifacts that bind intent (verb+version), semantics (schema), and output into a signed proof.\n\n` + + core.map((s) => `- ${s}`).join("\n"); + + const result = { explanation, summary: "Receipts are evidence: validate schema + hash + signature." }; + if (detail !== "short") result.steps = steps; + return result; +} + +function doAnalyze(body) { + const input = String(body?.input ?? ""); + if (!input.trim()) throw new Error("analyze.input required (string)"); + + const goal = String(body?.goal ?? "").trim(); + const hints = Array.isArray(body?.hints) ? body.hints.map(String) : []; + const lines = input.split(/\r?\n/).filter((l) => l.trim() !== ""); + const words = input.trim().split(/\s+/).filter(Boolean); + + const containsUrls = /\bhttps?:\/\/[^\s]+/i.test(input); + const containsEmails = /\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}\b/i.test(input); + const containsJsonMarkers = /[{[\]}]/.test(input); + const containsNumbers = /\b\d+(\.\d+)?\b/.test(input); + + const labels = []; + if (containsJsonMarkers) labels.push("structured"); + if (containsUrls) labels.push("contains_urls"); + if (containsEmails) labels.push("contains_emails"); + + let score = 0; + if (containsEmails) score += 0.25; + if (containsUrls) score += 0.2; + if (containsJsonMarkers) score += 0.1; + if (containsNumbers) score += 0.05; + score = Math.min(1, Number(score.toFixed(3))); + + const summary = `Deterministic analysis: ${labels.join(",") || "plain_text"}. Goal="${goal || "n/a"}". Score=${score}.`; + const insights = [ + `Input length: ${input.length} chars; ~${words.length} words; ${lines.length} non-empty lines.`, + goal ? `Goal: ${goal}` : "Goal: (none)", + `Hints provided: ${hints.length}.`, + ]; + + return { summary, insights, labels, score }; +} + +function doClassify(body) { + const actor = String(body?.actor ?? "").trim(); + if (!actor) throw new Error("classify.actor required"); + + const input = body?.input || {}; + const content = String(input.content ?? ""); + if (!content.trim()) throw new Error("classify.input.content required"); + + const maxLabels = Number(body?.limits?.max_labels || 5); + + const labels = []; + const scores = []; + + const hasUrl = /\bhttps?:\/\/[^\s]+/i.test(content); + const hasEmail = /\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}\b/i.test(content); + const hasCode = /\b(error|exception|stack|trace|cannot get|http\/1\.1|curl)\b/i.test(content.toLowerCase()); + const hasFinance = /\b(invoice|payment|usd|\$|bank|wire|crypto)\b/i.test(content.toLowerCase()); + + const push = (lbl, sc) => { + labels.push(lbl); + scores.push(Number(sc.toFixed(6))); + }; + + if (hasUrl) push("contains_urls", 0.733333); + if (hasEmail) push("contains_emails", 0.5); + if (hasCode) push("code_or_logs", 0.4375); + if (hasFinance) push("finance", 0.25); + if (!labels.length) push("general", 0.25); + + const trimmedLabels = labels.slice(0, Math.min(128, maxLabels)); + const trimmedScores = scores.slice(0, trimmedLabels.length); + + return { labels: trimmedLabels, scores: trimmedScores, taxonomy: ["root", trimmedLabels[0] || "general"] }; +} + +// Router: dispatch by verb +const handlers = { + fetch: doFetch, + describe: async (b) => doDescribe(b), + format: async (b) => doFormat(b), + clean: async (b) => doClean(b), + parse: async (b) => doParse(b), + summarize: async (b) => doSummarize(b), + convert: async (b) => doConvert(b), + explain: async (b) => doExplain(b), + analyze: async (b) => doAnalyze(b), + classify: async (b) => doClassify(b), +}; + +async function handleVerb(verb, req, res) { + if (!enabled(verb)) return res.status(404).json({ ...makeError(404, `Verb not enabled: ${verb}`), ...instancePayload() }); + if (!requireBody(req, res)) return; + + const started = Date.now(); + + const rawParent = req.body?.trace?.parent_trace_id ?? req.body?.x402?.extras?.parent_trace_id ?? null; + const parentTraceId = typeof rawParent === "string" && rawParent.trim().length ? rawParent.trim() : null; + + const trace = { + trace_id: randId("trace_"), + ...(parentTraceId ? { parent_trace_id: parentTraceId } : {}), + started_at: nowIso(), + completed_at: null, + duration_ms: null, + provider: process.env.RAILWAY_SERVICE_NAME || "runtime", + }; + + try { + const x402 = req.body?.x402 || { verb, version: "1.0.0", entry: `x402://${verb}agent.eth/${verb}/v1.0.0` }; + + const callerTimeout = Number(req.body?.limits?.timeout_ms || req.body?.limits?.max_latency_ms || 0); + const timeoutMs = Math.min( + SERVER_MAX_HANDLER_MS, + callerTimeout && callerTimeout > 0 ? callerTimeout : SERVER_MAX_HANDLER_MS + ); + + const work = Promise.resolve(handlers[verb](req.body)); + const result = timeoutMs + ? await Promise.race([work, new Promise((_, rej) => setTimeout(() => rej(new Error("timeout")), timeoutMs))]) + : await work; + + trace.completed_at = nowIso(); + trace.duration_ms = Date.now() - started; + + const actor = req.body?.actor + ? { id: String(req.body.actor), role: "user" } + : req.body?.x402?.tenant + ? { id: String(req.body.x402.tenant), role: "tenant" } + : null; + + const receipt = makeReceipt({ x402, trace, result, status: "success", actor }); + return res.json(receipt); + } catch (e) { + trace.completed_at = nowIso(); + trace.duration_ms = Date.now() - started; + + const x402 = req.body?.x402 || { verb, version: "1.0.0", entry: `x402://${verb}agent.eth/${verb}/v1.0.0` }; + + const actor = req.body?.actor + ? { id: String(req.body.actor), role: "user" } + : req.body?.x402?.tenant + ? { id: String(req.body.x402.tenant), role: "tenant" } + : null; + + const err = { + code: String(e?.code || "INTERNAL_ERROR"), + message: String(e?.message || "unknown error").slice(0, 2048), + retryable: String(e?.message || "").includes("timeout"), + details: { verb }, + }; + + const receipt = makeReceipt({ x402, trace, status: "error", error: err, actor }); + return res.status(500).json(receipt); + } +} + +// ----------------------- +// health/index +// ----------------------- +app.get("/", (req, res) => { + res.setHeader("Content-Type", "application/json; charset=utf-8"); + const verbs = (ENABLED_VERBS || []).map((v) => `/${v}/v${API_VERSION}`); + return res.status(200).end( + JSON.stringify({ + ok: true, + service: SERVICE_NAME, + version: SERVICE_VERSION, + api_version: API_VERSION, + base: CANONICAL_BASE, + health: "/health", + verify: "/verify", + verbs, + time: nowIso(), + }) + ); +}); + +app.get("/health", (req, res) => { + res.setHeader("Content-Type", "application/json; charset=utf-8"); + return res.status(200).end( + JSON.stringify({ + ok: true, + service: SERVICE_NAME, + version: SERVICE_VERSION, + api_version: API_VERSION, + base: CANONICAL_BASE, + node: process.version, + port: PORT, + enabled_verbs: ENABLED_VERBS, + signer_id: SIGNER_ID, + signer_ok: !!getPrivatePem(), + time: nowIso(), + ...instancePayload(), + }) + ); +}); + +// ----------------------- +// debug (gated) +// ----------------------- +app.get("/debug/env", requireDebug, (req, res) => { + const privPem = getPrivatePem(); + const pubPem = getPublicPemFromEnv(); + + res.json({ + ok: true, + node: process.version, + port: PORT, + service: process.env.RAILWAY_SERVICE_NAME || "runtime", + enabled_verbs: ENABLED_VERBS, + signer_id: SIGNER_ID, + signer_kid: SIGNER_KID, + signer_ok: !!privPem, + has_priv_b64: !!PRIV_PEM_B64, + has_priv_pem: !!normalizePemLoose(PRIV_PEM_RAW), + derived_priv_pem: !!privPem, + has_pub_b64: !!PUB_PEM_B64, + has_pub_pem: !!normalizePemLoose(PUB_PEM_RAW), + derived_pub_pem: !!pubPem, + verifier_ens_name: VERIFIER_ENS_NAME || null, + ens_pubkey_text_key: ENS_PUBKEY_TEXT_KEY, + has_rpc: hasRpc(), + schema_host: SCHEMA_HOST, + schema_fetch_timeout_ms: SCHEMA_FETCH_TIMEOUT_MS, + schema_validate_budget_ms: SCHEMA_VALIDATE_BUDGET_MS, + verify_schema_cached_only: VERIFY_SCHEMA_CACHED_ONLY, + enable_ssrf_guard: ENABLE_SSRF_GUARD, + fetch_timeout_ms: FETCH_TIMEOUT_MS, + fetch_max_bytes: FETCH_MAX_BYTES, + verify_max_ms: VERIFY_MAX_MS, + cache: { + max_json_cache_entries: MAX_JSON_CACHE_ENTRIES, + json_cache_ttl_ms: JSON_CACHE_TTL_MS, + max_validator_cache_entries: MAX_VALIDATOR_CACHE_ENTRIES, + validator_cache_ttl_ms: VALIDATOR_CACHE_TTL_MS, + }, + server_max_handler_ms: SERVER_MAX_HANDLER_MS, + prewarm: { + max_verbs: PREWARM_MAX_VERBS, + total_budget_ms: PREWARM_TOTAL_BUDGET_MS, + per_verb_budget_ms: PREWARM_PER_VERB_BUDGET_MS, + }, + service_name: SERVICE_NAME, + service_version: SERVICE_VERSION, + api_version: API_VERSION, + canonical_base_url: CANONICAL_BASE, + canonical_id: CANONICAL_ID_SORTED_KEYS_V1, + debug: { enable_debug: ENABLE_DEBUG, has_debug_token: !!DEBUG_TOKEN }, + }); +}); + +app.get("/debug/enskey", requireDebug, async (req, res) => { + const refresh = String(req.query.refresh || "0") === "1"; + const out = await fetchEnsPubkeyPem({ refresh }); + res.json({ + ok: !!out.ok, + pubkey_source: out.source || null, + ens_name: VERIFIER_ENS_NAME || null, + txt_key: ENS_PUBKEY_TEXT_KEY, + cache: out.cache + ? { fetched_at: new Date(out.cache.fetched_at).toISOString(), ttl_ms: out.cache.ttl_ms } + : null, + preview: out.pem ? out.pem.slice(0, 90) + "..." : null, + error: out.error || null, + }); +}); + +app.get("/debug/validators", requireDebug, (req, res) => { + res.json({ + ok: true, + cached: Array.from(validatorCache.keys()), + cache_sizes: { schemaJsonCache: schemaJsonCache.size, validatorCache: validatorCache.size }, + inflight: Array.from(inflightValidator.keys()), + warm_queue_size: warmQueue.size, + warm_running: warmRunning, + ...instancePayload(), + }); +}); + +app.post("/debug/prewarm", requireDebug, (req, res) => { + const verbs = Array.isArray(req.body?.verbs) ? req.body.verbs : []; + const cleaned = verbs + .map((v) => String(v || "").trim()) + .filter(Boolean) + .slice(0, PREWARM_MAX_VERBS); + + const supported = cleaned.filter((v) => handlers[v]); + for (const v of supported) warmQueue.add(v); + + res.json({ + ok: true, + queued: supported, + already_cached: supported.filter(hasValidatorCached), + queue_size: warmQueue.size, + }); + + startWarmWorker(); +}); + +// ----------------------- +// verb routes: //v1.0.0 +// ----------------------- +for (const v of Object.keys(handlers)) { + app.post(`/${v}/v1.0.0`, (req, res) => handleVerb(v, req, res)); +} + +// ----------------------- +// verify endpoint (schema validation + ENS pubkey) +// ----------------------- +app.post("/verify", async (req, res) => { + const work = (async () => { + const receipt = req.body; + + const wantEns = String(req.query.ens || "0") === "1"; + const refresh = String(req.query.refresh || "0") === "1"; + const wantSchema = String(req.query.schema || "0") === "1"; + + const proof = receipt?.metadata?.proof; + if (!proof?.signature_b64 || !proof?.hash_sha256) { + return res.status(400).json({ + ok: false, + checks: { schema_valid: wantSchema ? false : null, hash_matches: null, signature_valid: false }, + error: "missing metadata.proof.signature_b64 or hash_sha256", + ...instancePayload(), + }); + } + + // 1) pick pubkey (env first, then ENS if requested and parseable) + let pubPem = getPublicPemFromEnv(); + let pubSrc = pubPem ? (normalizePemLoose(PUB_PEM_RAW) ? "env-pem" : "env-b64") : null; + + if (wantEns) { + const ensOut = await fetchEnsPubkeyPem({ refresh }); + if (ensOut.ok && ensOut.pem) { + pubPem = ensOut.pem; + pubSrc = "ens"; + } + } + + if (!pubPem) { + return res.status(400).json({ + ok: false, + checks: { schema_valid: wantSchema ? false : null, hash_matches: null, signature_valid: false }, + error: + "no public key available (set RECEIPT_SIGNING_PUBLIC_KEY_PEM/_B64 or use ens=1 with valid ENS TXT)", + ...instancePayload(), + }); + } + + // 2) verify signature/hash via runtime-core + let v; + try { + v = verifyReceiptEd25519Sha256(receipt, { + publicKeyPemOrDer: pubPem, + allowedCanonicals: [CANONICAL_ID_SORTED_KEYS_V1], + }); + } catch (e) { + return res.status(400).json({ + ok: false, + checks: { schema_valid: wantSchema ? false : null, hash_matches: null, signature_valid: false }, + values: { + verb: receipt?.x402?.verb ?? null, + signer_id: proof?.signer_id ?? null, + pubkey_source: pubSrc, + }, + error: e?.message || "public key decode/verify failed", + ...instancePayload(), + }); + } + + const sigOk = !!v.ok; + const sigErr = sigOk ? null : (v.reason || "verify failed"); + + const hashMatches = sigOk ? true : (v.reason === "hash_mismatch" ? false : null); + + // 3) schema validation (optional + edge-safe) + let schemaOk = null; + let schemaErrors = null; + + if (wantSchema) { + schemaOk = false; + const verb = String(receipt?.x402?.verb || "").trim(); + + if (!verb) { + schemaErrors = [{ message: "missing receipt.x402.verb" }]; + } else if (VERIFY_SCHEMA_CACHED_ONLY && !hasValidatorCached(verb)) { + warmQueue.add(verb); + startWarmWorker(); + schemaErrors = [{ message: "validator_not_warmed_yet" }]; + + return res.status(202).json({ + ok: false, + checks: { schema_valid: false, hash_matches: hashMatches, signature_valid: sigOk }, + values: { + verb: receipt?.x402?.verb ?? null, + signer_id: proof?.signer_id ?? null, + pubkey_source: pubSrc, + claimed_hash: proof?.hash_sha256 ?? null, + }, + errors: { schema_errors: schemaErrors, signature_error: sigErr }, + retry_after_ms: 1000, + ...instancePayload(), + }); + } else { + try { + const validate = VERIFY_SCHEMA_CACHED_ONLY + ? validatorCache.get(verb)?.validate + : await getValidatorForVerb(verb); + if (!validate) { + schemaOk = false; + schemaErrors = [{ message: "validator_missing" }]; + } else { + const ok = validate(receipt); + schemaOk = !!ok; + if (!ok) schemaErrors = ajvErrorsToSimple(validate.errors) || [{ message: "schema validation failed" }]; + } + } catch (e) { + schemaOk = false; + schemaErrors = [{ message: e?.message || "schema validation error" }]; + } + } + } + + return res.json({ + ok: hashMatches === true && sigOk === true && (wantSchema ? schemaOk === true : true), + checks: { schema_valid: schemaOk, hash_matches: hashMatches, signature_valid: sigOk }, + values: { + verb: receipt?.x402?.verb ?? null, + signer_id: proof?.signer_id ?? null, + alg: proof?.alg ?? null, + canonical: proof?.canonical ?? null, + claimed_hash: proof?.hash_sha256 ?? null, + pubkey_source: pubSrc, + }, + errors: { schema_errors: schemaErrors, signature_error: sigErr }, + ...instancePayload(), + }); + })(); + + try { + await Promise.race([ + work, + new Promise((_, rej) => setTimeout(() => rej(new Error("verify_timeout")), VERIFY_MAX_MS)), + ]); + } catch (e) { + return res.status(500).json({ + ok: false, + error: e?.message || "verify failed", + checks: { schema_valid: null, hash_matches: null, signature_valid: false }, + ...instancePayload(), + }); + } +}); + +app.listen(PORT, () => { + console.log(`runtime listening on :${PORT}`); +}); diff --git a/tests/smoke.mjs b/tests/smoke.mjs index 1e0eddb..b977dcb 100644 --- a/tests/smoke.mjs +++ b/tests/smoke.mjs @@ -1,312 +1,312 @@ -import assert from 'node:assert/strict'; -import { generateKeyPairSync } from "node:crypto"; -import { spawn } from 'node:child_process'; -import { mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs'; -import { tmpdir } from 'node:os'; -import { join } from 'node:path'; -import { createPublicKey, randomBytes } from 'node:crypto'; -import { execFileSync } from 'node:child_process'; - -// --- smoke signing keys (ephemeral, per-run) -function __b64(s){ return Buffer.from(s, "utf8").toString("base64"); } - -// Create an ephemeral Ed25519 keypair so the runtime can mint receipts during smoke. -// This avoids depending on developer machine env vars. -const { privateKey: __smokePrivKey, publicKey: __smokePubKey } = generateKeyPairSync("ed25519"); -const __smokePrivPem = __smokePrivKey.export({ format: "pem", type: "pkcs8" }); -const __smokePubPem = __smokePubKey.export({ format: "pem", type: "spki" }); -const __smokePrivB64 = __b64(__smokePrivPem); -const __smokePubB64 = __b64(__smokePubPem); - - - -let __smokeStdout = ""; -let __smokeStderr = ""; - -function hookChildLogs(child){ - if (!child) return; - try { child.stdout?.on("data", (d) => { const t = String(d); __smokeStdout += t; process.stdout.write(t); }); } catch {} - try { child.stderr?.on("data", (d) => { const t = String(d); __smokeStderr += t; process.stderr.write(t); }); } catch {} - child.on("exit", (code, signal) => { - if (code !== 0) { - console.error(`\n[smoke] child exited code=${code} signal=${signal}`); - if (__smokeStdout.trim()) console.error("\n[smoke] child stdout:\n" + __smokeStdout); - if (__smokeStderr.trim()) console.error("\n[smoke] child stderr:\n" + __smokeStderr); - } - }); -} - - -const SMOKE_HOST = "127.0.0.1"; -const SMOKE_PORT = Number(process.env.SMOKE_PORT || 19080); -const SMOKE_BASE = `http://${SMOKE_HOST}:${SMOKE_PORT}`; -process.on("unhandledRejection", (e) => { console.error(e); process.exit(1); }); -process.on("uncaughtException", (e) => { console.error(e); process.exit(1); }); - - - -// --- smoke resources to cleanup -let __smokeChild = null; - -async function __smokeCleanup() { // kill spawned __smokeChild if still around - const c = __smokeChild; - if (c && c.pid && !c.killed) { - try { c.kill("SIGTERM"); } catch {} - // hard kill if it doesn't die quickly - await new Promise((r) => setTimeout(r, 500)); - try { if (!c.killed) c.kill("SIGKILL"); } catch {} - } - - // close child stdio pipes (prevents lingering WriteStream/Socket handles) - try { c?.stdout?.destroy?.(); } catch {} - try { c?.stderr?.destroy?.(); } catch {} -} - -// --- SMOKE watchdog: if this script hangs, kill it and dump active handles/requests -const SMOKE_WATCHDOG_MS = Number(process.env.SMOKE_WATCHDOG_MS || 20000); - -const _watchdog = setTimeout(() => { - console.error("\n[smoke] WATCHDOG TIMEOUT after", SMOKE_WATCHDOG_MS, "ms"); - try { - const handles = process._getActiveHandles?.() || []; - const requests = process._getActiveRequests?.() || []; - console.error("[smoke] active handles:", handles.map(h => h?.constructor?.name || typeof h)); - console.error("[smoke] active requests:", requests.map(r => r?.constructor?.name || typeof r)); - // print a bit more detail for common culprits - for (const h of handles) { - const name = h?.constructor?.name || ""; - if (name.includes("Socket") || name.includes("Server") || name.includes("ChildProcess") || name.includes("Timeout")) { - console.error("[smoke] handle detail:", name, h); - } - } - } catch (e) { - console.error("[smoke] watchdog dump failed:", e?.message || e); - } - process.exit(1); -}, SMOKE_WATCHDOG_MS); -_watchdog.unref(); - -async function verifyWithRetry(base, receipt, { tries = 6 } = {}) { - for (let i = 0; i < tries; i++) { - const resp = await fetch(base + "/verify?schema=1", { - method: "POST", - headers: {"connection":"close", "content-type": "application/json" }, - body: JSON.stringify(receipt), - }); - - // Edge-safe mode can return 202 while validator warms - if (resp.status === 202) { - try { - const body = await resp.json(); - const wait = Math.min(1500, Math.max(250, Number(body?.retry_after_ms || 500))); - await new Promise((r) => setTimeout(r, wait)); - continue; - } catch { - await new Promise((r) => setTimeout(r, 500)); - continue; - } - } - - const body = await resp.json(); - return { resp, body }; - } - throw new Error("verifyWithRetry: exceeded retries (still getting 202?)"); -} - -globalThis.__SMOKE_LAST__ = { label: null, status: null, body: null }; - -function smokeCapture(label, resp, body) { - globalThis.__SMOKE_LAST__ = { - label, - status: resp?.status ?? null, - body - }; -} - -function smokeDump() { - try { - const x = globalThis.__SMOKE_LAST__ || {}; - console.log("\n=== SMOKE LAST RESPONSE ==="); - console.log("label:", x.label); - console.log("status:", x.status); - console.log("body:", JSON.stringify(x.body, null, 2)); - console.log("=== /SMOKE LAST RESPONSE ===\n"); - } catch (e) { - console.log("smokeDump failed:", e?.message || e); - } -} - -function b64(s) { return Buffer.from(String(s), "utf8").toString("base64"); } - -// Deterministic: generate a fresh Ed25519 signer for the __smokeChild process each smoke run. -// This avoids relying on machine env/.env and guarantees /verify works. -const { publicKey, privateKey } = generateKeyPairSync("ed25519"); -const PRIV_PEM = privateKey.export({ format: "pem", type: "pkcs8" }); -const PUB_PEM = publicKey.export({ format: "pem", type: "spki" }); - -const SMOKE_ENV = { - ...process.env, - RECEIPT_SIGNING_PRIVATE_KEY_PEM_B64: b64(PRIV_PEM), - RECEIPT_SIGNING_PUBLIC_KEY_PEM_B64: b64(PUB_PEM), - RECEIPT_SIGNER_ID: process.env.RECEIPT_SIGNER_ID || "runtime.commandlayer.eth", - SIGNER_KID: process.env.SIGNER_KID || "v1", -}; - -const PORT = 19080; -const base = `http://127.0.0.1:${PORT}`; - -function b64File(path) { - return readFileSync(path).toString('base64'); -} - -function ed25519TxtFromPublicPem(path) { - const pem = readFileSync(path, 'utf8'); - const der = createPublicKey(pem).export({ format: 'der', type: 'spki' }); - const raw = Buffer.from(der).subarray(-32); - return `ed25519:${raw.toString('base64')}`; -} - -function sleep(ms) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -async function waitForHealth(timeoutMs = 7000) { - const start = Date.now(); - while (Date.now() - start < timeoutMs) { - if (__smokeChild && __smokeChild.exitCode !== null) { - throw new Error(`__smokeChild exited early (code=${__smokeChild.exitCode}, signal=${__smokeChild.signalCode})`); - } - - try { - const r = await fetch(`${base}/health`, { headers: {"connection":"close"} }); - if (r.ok) return; - } catch {} - await sleep(120); - } - throw new Error('__smokeChild did not become healthy in time'); -} - -const tmp = mkdtempSync(join(tmpdir(), 'runtime-test-')); -const priv = join(tmp, 'private.pem'); -const pub = join(tmp, 'public.pem'); - -try { - execFileSync('openssl', ['genpkey', '-algorithm', 'Ed25519', '-out', priv], { stdio: 'ignore' }); - execFileSync('openssl', ['pkey', '-in', priv, '-pubout', '-out', pub], { stdio: 'ignore' }); - - const env = { - ...process.env, - PORT: String(PORT), - RECEIPT_SIGNING_PRIVATE_KEY_PEM_B64: b64File(priv), - RECEIPT_SIGNING_PUBLIC_KEY: ed25519TxtFromPublicPem(pub), - RECEIPT_SIGNER_ID: 'runtime.test', - DEBUG_ROUTES_ENABLED: '1', - DEBUG_BEARER_TOKEN: 'secret-token', - REQUEST_SCHEMA_VALIDATION: '0', - CORS_ALLOW_ORIGINS: 'http://allowed.local', - }; - - __smokeChild = spawn("node", ["server.mjs"], { - stdio: ["ignore", "pipe", "pipe"], - env: { - ...process.env, - HOST: "127.0.0.1", - PORT: String(process.env.SMOKE_PORT || 19080), - ETH_RPC_URL: process.env.ETH_RPC_URL || "https://cloudflare-eth.com", CL_RECEIPT_SIGNER: process.env.CL_RECEIPT_SIGNER || "describeagent.eth", - CL_SIGNER_ID: process.env.CL_SIGNER_ID || "describeagent.eth", -}, - RECEIPT_SIGNING_PRIVATE_KEY_PEM_B64: __smokePrivB64, - RECEIPT_SIGNING_PUBLIC_KEY_PEM_B64: __smokePubB64, -}); - -hookChildLogs(__smokeChild); - -let logs = ''; - __smokeChild.stdout.on('data', (d) => (logs += d.toString())); - __smokeChild.stderr.on('data', (d) => (logs += d.toString())); - - try { - await waitForHealth(); - - // signer readiness - const healthResp = await fetch(`${base}/health`, { headers: {"connection":"close"} }); - assert.equal(healthResp.ok, true); - const health = await healthResp.json(); - console.log("[smoke] /health:", JSON.stringify(health, null, 2)); -assert.equal(health.ok, true); - if (process.env.ETH_RPC_URL) { - if (process.env.SMOKE_EXPECT_SIGNER_OK === "1") { - if (process.env.SMOKE_EXPECT_SIGNER_OK === "1") { - assert.equal(health.signer_ok, true); - } - - } - - } - - // verb execution - const verbResp = await fetch(`${base}/describe/v1.0.0`, { - method: 'POST', - headers: {"connection":"close", 'content-type': 'application/json' }, - body: JSON.stringify({ - x402: { entry: 'x402://describeagent.eth/describe/v1.0.0', verb: 'describe', version: '1.0.0' }, - input: { subject: 'CommandLayer', detail_level: 'short' }, - }), - }); - assert.equal(verbResp.ok, true); - const receipt = await verbResp.json(); - assert.equal(receipt.status, 'success'); - assert.ok(receipt.metadata?.proof?.signature_b64); - - // verify pass path - const verifyResp = await fetch(`${base}/verify`, { - method: 'POST', - headers: {"connection":"close", 'content-type': 'application/json' }, - body: JSON.stringify(receipt), - }); - assert.equal(verifyResp.ok, true); - const { resp: verifyResp2, body: verify } = await verifyWithRetry(base, receipt); - // keep verifyResp symbol for existing asserts - verifyResp = verifyResp2; - assert.equal(verify.ok, true); - assert.equal(verify.checks.signature_valid, true); - assert.equal(verify.checks.hash_matches, true); - - // verify fail path (tamper hash) - const tampered = structuredClone(receipt); - tampered.metadata.proof.hash_sha256 = randomBytes(32).toString('hex'); - const badVerifyResp = await fetch(`${base}/verify`, { - method: 'POST', - headers: {"connection":"close", 'content-type': 'application/json' }, - body: JSON.stringify(tampered), - }); - assert.equal(badVerifyResp.ok, true); - const badVerify = await badVerifyResp.json(); - assert.equal(badVerify.ok, false); - assert.equal(badVerify.checks.hash_matches, false); - - // debug route auth - const debugNoToken = await fetch(`${base}/debug/env`, { headers: {"connection":"close"} }); - assert.equal(debugNoToken.status, 401); - - const debugWithToken = await fetch(`${base}/debug/env`, { - headers: {"connection":"close", authorization: 'Bearer secret-token' }, - }); - assert.equal(debugWithToken.ok, true); - const debug = await debugWithToken.json(); - assert.equal(debug.debug_routes_enabled, true); - assert.equal(debug.cors.allow_origins.includes('http://allowed.local'), true); - } finally { - __smokeChild.kill('SIGTERM'); - await sleep(150); - if (!__smokeChild.killed) __smokeChild.kill('SIGKILL'); - } -} catch (err) { - writeFileSync('/tmp/runtime-smoke-failure.log', String(err?.stack || err)); - throw err; -} finally { - rmSync(tmp, { recursive: true, force: true }); -} - -// Ensure smoke doesn't hang due to lingering handles -setTimeout(() => process.exit(0), 0).unref(); +import assert from 'node:assert/strict'; +import { generateKeyPairSync } from "node:crypto"; +import { spawn } from 'node:child_process'; +import { mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; +import { createPublicKey, randomBytes } from 'node:crypto'; +import { execFileSync } from 'node:child_process'; + +// --- smoke signing keys (ephemeral, per-run) +function __b64(s){ return Buffer.from(s, "utf8").toString("base64"); } + +// Create an ephemeral Ed25519 keypair so the runtime can mint receipts during smoke. +// This avoids depending on developer machine env vars. +const { privateKey: __smokePrivKey, publicKey: __smokePubKey } = generateKeyPairSync("ed25519"); +const __smokePrivPem = __smokePrivKey.export({ format: "pem", type: "pkcs8" }); +const __smokePubPem = __smokePubKey.export({ format: "pem", type: "spki" }); +const __smokePrivB64 = __b64(__smokePrivPem); +const __smokePubB64 = __b64(__smokePubPem); + + + +let __smokeStdout = ""; +let __smokeStderr = ""; + +function hookChildLogs(child){ + if (!child) return; + try { child.stdout?.on("data", (d) => { const t = String(d); __smokeStdout += t; process.stdout.write(t); }); } catch {} + try { child.stderr?.on("data", (d) => { const t = String(d); __smokeStderr += t; process.stderr.write(t); }); } catch {} + child.on("exit", (code, signal) => { + if (code !== 0) { + console.error(`\n[smoke] child exited code=${code} signal=${signal}`); + if (__smokeStdout.trim()) console.error("\n[smoke] child stdout:\n" + __smokeStdout); + if (__smokeStderr.trim()) console.error("\n[smoke] child stderr:\n" + __smokeStderr); + } + }); +} + + +const SMOKE_HOST = "127.0.0.1"; +const SMOKE_PORT = Number(process.env.SMOKE_PORT || 19080); +const SMOKE_BASE = `http://${SMOKE_HOST}:${SMOKE_PORT}`; +process.on("unhandledRejection", (e) => { console.error(e); process.exit(1); }); +process.on("uncaughtException", (e) => { console.error(e); process.exit(1); }); + + + +// --- smoke resources to cleanup +let __smokeChild = null; + +async function __smokeCleanup() { // kill spawned __smokeChild if still around + const c = __smokeChild; + if (c && c.pid && !c.killed) { + try { c.kill("SIGTERM"); } catch {} + // hard kill if it doesn't die quickly + await new Promise((r) => setTimeout(r, 500)); + try { if (!c.killed) c.kill("SIGKILL"); } catch {} + } + + // close child stdio pipes (prevents lingering WriteStream/Socket handles) + try { c?.stdout?.destroy?.(); } catch {} + try { c?.stderr?.destroy?.(); } catch {} +} + +// --- SMOKE watchdog: if this script hangs, kill it and dump active handles/requests +const SMOKE_WATCHDOG_MS = Number(process.env.SMOKE_WATCHDOG_MS || 20000); + +const _watchdog = setTimeout(() => { + console.error("\n[smoke] WATCHDOG TIMEOUT after", SMOKE_WATCHDOG_MS, "ms"); + try { + const handles = process._getActiveHandles?.() || []; + const requests = process._getActiveRequests?.() || []; + console.error("[smoke] active handles:", handles.map(h => h?.constructor?.name || typeof h)); + console.error("[smoke] active requests:", requests.map(r => r?.constructor?.name || typeof r)); + // print a bit more detail for common culprits + for (const h of handles) { + const name = h?.constructor?.name || ""; + if (name.includes("Socket") || name.includes("Server") || name.includes("ChildProcess") || name.includes("Timeout")) { + console.error("[smoke] handle detail:", name, h); + } + } + } catch (e) { + console.error("[smoke] watchdog dump failed:", e?.message || e); + } + process.exit(1); +}, SMOKE_WATCHDOG_MS); +_watchdog.unref(); + +async function verifyWithRetry(base, receipt, { tries = 6 } = {}) { + for (let i = 0; i < tries; i++) { + const resp = await fetch(base + "/verify?schema=1", { + method: "POST", + headers: {"connection":"close", "content-type": "application/json" }, + body: JSON.stringify(receipt), + }); + + // Edge-safe mode can return 202 while validator warms + if (resp.status === 202) { + try { + const body = await resp.json(); + const wait = Math.min(1500, Math.max(250, Number(body?.retry_after_ms || 500))); + await new Promise((r) => setTimeout(r, wait)); + continue; + } catch { + await new Promise((r) => setTimeout(r, 500)); + continue; + } + } + + const body = await resp.json(); + return { resp, body }; + } + throw new Error("verifyWithRetry: exceeded retries (still getting 202?)"); +} + +globalThis.__SMOKE_LAST__ = { label: null, status: null, body: null }; + +function smokeCapture(label, resp, body) { + globalThis.__SMOKE_LAST__ = { + label, + status: resp?.status ?? null, + body + }; +} + +function smokeDump() { + try { + const x = globalThis.__SMOKE_LAST__ || {}; + console.log("\n=== SMOKE LAST RESPONSE ==="); + console.log("label:", x.label); + console.log("status:", x.status); + console.log("body:", JSON.stringify(x.body, null, 2)); + console.log("=== /SMOKE LAST RESPONSE ===\n"); + } catch (e) { + console.log("smokeDump failed:", e?.message || e); + } +} + +function b64(s) { return Buffer.from(String(s), "utf8").toString("base64"); } + +// Deterministic: generate a fresh Ed25519 signer for the __smokeChild process each smoke run. +// This avoids relying on machine env/.env and guarantees /verify works. +const { publicKey, privateKey } = generateKeyPairSync("ed25519"); +const PRIV_PEM = privateKey.export({ format: "pem", type: "pkcs8" }); +const PUB_PEM = publicKey.export({ format: "pem", type: "spki" }); + +const SMOKE_ENV = { + ...process.env, + RECEIPT_SIGNING_PRIVATE_KEY_PEM_B64: b64(PRIV_PEM), + RECEIPT_SIGNING_PUBLIC_KEY_PEM_B64: b64(PUB_PEM), + RECEIPT_SIGNER_ID: process.env.RECEIPT_SIGNER_ID || "runtime.commandlayer.eth", + SIGNER_KID: process.env.SIGNER_KID || "v1", +}; + +const PORT = 19080; +const base = `http://127.0.0.1:${PORT}`; + +function b64File(path) { + return readFileSync(path).toString('base64'); +} + +function ed25519TxtFromPublicPem(path) { + const pem = readFileSync(path, 'utf8'); + const der = createPublicKey(pem).export({ format: 'der', type: 'spki' }); + const raw = Buffer.from(der).subarray(-32); + return `ed25519:${raw.toString('base64')}`; +} + +function sleep(ms) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +async function waitForHealth(timeoutMs = 7000) { + const start = Date.now(); + while (Date.now() - start < timeoutMs) { + if (__smokeChild && __smokeChild.exitCode !== null) { + throw new Error(`__smokeChild exited early (code=${__smokeChild.exitCode}, signal=${__smokeChild.signalCode})`); + } + + try { + const r = await fetch(`${base}/health`, { headers: {"connection":"close"} }); + if (r.ok) return; + } catch {} + await sleep(120); + } + throw new Error('__smokeChild did not become healthy in time'); +} + +const tmp = mkdtempSync(join(tmpdir(), 'runtime-test-')); +const priv = join(tmp, 'private.pem'); +const pub = join(tmp, 'public.pem'); + +try { + execFileSync('openssl', ['genpkey', '-algorithm', 'Ed25519', '-out', priv], { stdio: 'ignore' }); + execFileSync('openssl', ['pkey', '-in', priv, '-pubout', '-out', pub], { stdio: 'ignore' }); + + const env = { + ...process.env, + PORT: String(PORT), + RECEIPT_SIGNING_PRIVATE_KEY_PEM_B64: b64File(priv), + RECEIPT_SIGNING_PUBLIC_KEY: ed25519TxtFromPublicPem(pub), + RECEIPT_SIGNER_ID: 'runtime.test', + DEBUG_ROUTES_ENABLED: '1', + DEBUG_BEARER_TOKEN: 'secret-token', + REQUEST_SCHEMA_VALIDATION: '0', + CORS_ALLOW_ORIGINS: 'http://allowed.local', + }; + + __smokeChild = spawn("node", ["server.mjs"], { + stdio: ["ignore", "pipe", "pipe"], + env: { + ...process.env, + HOST: "127.0.0.1", + PORT: String(process.env.SMOKE_PORT || 19080), + ETH_RPC_URL: process.env.ETH_RPC_URL || "https://cloudflare-eth.com", CL_RECEIPT_SIGNER: process.env.CL_RECEIPT_SIGNER || "describeagent.eth", + CL_SIGNER_ID: process.env.CL_SIGNER_ID || "describeagent.eth", +}, + RECEIPT_SIGNING_PRIVATE_KEY_PEM_B64: __smokePrivB64, + RECEIPT_SIGNING_PUBLIC_KEY_PEM_B64: __smokePubB64, +}); + +hookChildLogs(__smokeChild); + +let logs = ''; + __smokeChild.stdout.on('data', (d) => (logs += d.toString())); + __smokeChild.stderr.on('data', (d) => (logs += d.toString())); + + try { + await waitForHealth(); + + // signer readiness + const healthResp = await fetch(`${base}/health`, { headers: {"connection":"close"} }); + assert.equal(healthResp.ok, true); + const health = await healthResp.json(); + console.log("[smoke] /health:", JSON.stringify(health, null, 2)); +assert.equal(health.ok, true); + if (process.env.ETH_RPC_URL) { + if (process.env.SMOKE_EXPECT_SIGNER_OK === "1") { + if (process.env.SMOKE_EXPECT_SIGNER_OK === "1") { + assert.equal(health.signer_ok, true); + } + + } + + } + + // verb execution + const verbResp = await fetch(`${base}/describe/v1.0.0`, { + method: 'POST', + headers: {"connection":"close", 'content-type': 'application/json' }, + body: JSON.stringify({ + x402: { entry: 'x402://describeagent.eth/describe/v1.0.0', verb: 'describe', version: '1.0.0' }, + input: { subject: 'CommandLayer', detail_level: 'short' }, + }), + }); + assert.equal(verbResp.ok, true); + const receipt = await verbResp.json(); + assert.equal(receipt.status, 'success'); + assert.ok(receipt.metadata?.proof?.signature_b64); + + // verify pass path + const verifyResp = await fetch(`${base}/verify`, { + method: 'POST', + headers: {"connection":"close", 'content-type': 'application/json' }, + body: JSON.stringify(receipt), + }); + assert.equal(verifyResp.ok, true); + const { resp: verifyResp2, body: verify } = await verifyWithRetry(base, receipt); + // keep verifyResp symbol for existing asserts + verifyResp = verifyResp2; + assert.equal(verify.ok, true); + assert.equal(verify.checks.signature_valid, true); + assert.equal(verify.checks.hash_matches, true); + + // verify fail path (tamper hash) + const tampered = structuredClone(receipt); + tampered.metadata.proof.hash_sha256 = randomBytes(32).toString('hex'); + const badVerifyResp = await fetch(`${base}/verify`, { + method: 'POST', + headers: {"connection":"close", 'content-type': 'application/json' }, + body: JSON.stringify(tampered), + }); + assert.equal(badVerifyResp.ok, true); + const badVerify = await badVerifyResp.json(); + assert.equal(badVerify.ok, false); + assert.equal(badVerify.checks.hash_matches, false); + + // debug route auth + const debugNoToken = await fetch(`${base}/debug/env`, { headers: {"connection":"close"} }); + assert.equal(debugNoToken.status, 401); + + const debugWithToken = await fetch(`${base}/debug/env`, { + headers: {"connection":"close", authorization: 'Bearer secret-token' }, + }); + assert.equal(debugWithToken.ok, true); + const debug = await debugWithToken.json(); + assert.equal(debug.debug_routes_enabled, true); + assert.equal(debug.cors.allow_origins.includes('http://allowed.local'), true); + } finally { + __smokeChild.kill('SIGTERM'); + await sleep(150); + if (!__smokeChild.killed) __smokeChild.kill('SIGKILL'); + } +} catch (err) { + writeFileSync('/tmp/runtime-smoke-failure.log', String(err?.stack || err)); + throw err; +} finally { + rmSync(tmp, { recursive: true, force: true }); +} + +// Ensure smoke doesn't hang due to lingering handles +setTimeout(() => process.exit(0), 0).unref();