From d85320e5d93698fb6a14337c4ff847ea71735a91 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Mon, 23 Feb 2026 10:52:40 +0100 Subject: [PATCH 01/37] Add API Reference section to docs navigation Add OpenAPI-based API Reference anchor to docs.json alongside the existing SDK Reference, pointing to openapi-public.yml spec. --- docs.json | 10 +- openapi-public.yml | 4105 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 4114 insertions(+), 1 deletion(-) create mode 100644 openapi-public.yml diff --git a/docs.json b/docs.json index cac6de66..972455b9 100644 --- a/docs.json +++ b/docs.json @@ -219,9 +219,17 @@ ] }, { - "anchor": "SDK reference", + "anchor": "SDK Reference", "icon": "brackets-curly", "href": "https://e2b.dev/docs/sdk-reference" + }, + { + "anchor": "API Reference", + "icon": "code", + "openapi": { + "source": "openapi-public.yml", + "directory": "docs/api-reference" + } } ], "global": {} diff --git a/openapi-public.yml b/openapi-public.yml new file mode 100644 index 00000000..f1500898 --- /dev/null +++ b/openapi-public.yml @@ -0,0 +1,4105 @@ +openapi: 3.1.0 +info: + title: E2B API + version: 0.1.0 + description: Complete E2B developer API. Platform endpoints are served on api.e2b.app. + Sandbox endpoints (envd) are served on {port}-{sandboxID}.e2b.app. +servers: +- &id006 + url: https://api.e2b.app + description: E2B Platform API +paths: + /health: + get: + summary: Check the health of the service + responses: + '204': + description: The service is healthy + content: &id003 + application/json: + schema: + type: object + description: Empty response + '502': &id001 + description: Sandbox not found + content: + application/json: + schema: + type: object + required: + - sandboxId + - message + - code + properties: + sandboxId: + type: string + description: Identifier of the sandbox + example: i1234abcd5678efgh90jk + message: + type: string + description: Error message + example: The sandbox was not found + code: + type: integer + description: Error code + example: 502 + security: + - &id004 + SandboxAccessTokenAuth: [] + - &id005 {} + operationId: getHealth + servers: + - &id002 + url: https://{port}-{sandboxID}.e2b.app + description: Sandbox API (envd) — runs inside each sandbox + variables: + port: + default: '49983' + description: Port number + sandboxID: + default: '{sandbox-id}' + description: Sandbox identifier + /metrics: + get: + summary: Get the stats of the service + security: + - SandboxAccessTokenAuth: [] + - {} + responses: + '200': + description: The resource usage metrics of the service + content: + application/json: + schema: + $ref: '#/components/schemas/Metrics' + '502': *id001 + operationId: getMetrics + servers: + - *id002 + /envs: + get: + summary: Get the environment variables + security: + - SandboxAccessTokenAuth: [] + - {} + responses: + '200': + description: Environment variables + content: + application/json: + schema: + $ref: '#/components/schemas/EnvVars' + '502': *id001 + operationId: getEnvVars + servers: + - *id002 + /files: + get: + summary: Download a file + tags: + - files + security: + - SandboxAccessTokenAuth: [] + - {} + parameters: + - $ref: '#/components/parameters/FilePath' + - $ref: '#/components/parameters/User' + - $ref: '#/components/parameters/Signature' + - $ref: '#/components/parameters/SignatureExpiration' + responses: + '200': + $ref: '#/components/responses/DownloadSuccess' + content: *id003 + '400': + $ref: '#/components/responses/InvalidPath' + '401': + $ref: '#/components/responses/InvalidUser' + '404': + $ref: '#/components/responses/FileNotFound' + '406': + $ref: '#/components/responses/NotAcceptable' + '500': + $ref: '#/components/responses/InternalServerError' + '502': *id001 + operationId: downloadFile + post: + summary: Upload a file and ensure the parent directories exist. If the file + exists, it will be overwritten. + tags: + - files + security: + - SandboxAccessTokenAuth: [] + - {} + parameters: + - $ref: '#/components/parameters/FilePath' + - $ref: '#/components/parameters/User' + - $ref: '#/components/parameters/Signature' + - $ref: '#/components/parameters/SignatureExpiration' + requestBody: + $ref: '#/components/requestBodies/File' + responses: + '200': + $ref: '#/components/responses/UploadSuccess' + content: *id003 + '400': + $ref: '#/components/responses/InvalidPath' + '401': + $ref: '#/components/responses/InvalidUser' + '500': + $ref: '#/components/responses/InternalServerError' + '507': + $ref: '#/components/responses/NotEnoughDiskSpace' + '502': *id001 + operationId: uploadFile + servers: + - *id002 + /filesystem.Filesystem/CreateWatcher: + post: + tags: + - filesystem.Filesystem + summary: CreateWatcher + description: Non-streaming versions of WatchDir + operationId: filesystem.Filesystem.CreateWatcher + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.CreateWatcherRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.CreateWatcherResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /filesystem.Filesystem/GetWatcherEvents: + post: + tags: + - filesystem.Filesystem + summary: GetWatcherEvents + operationId: filesystem.Filesystem.GetWatcherEvents + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.GetWatcherEventsRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.GetWatcherEventsResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /filesystem.Filesystem/ListDir: + post: + tags: + - filesystem.Filesystem + summary: ListDir + operationId: filesystem.Filesystem.ListDir + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.ListDirRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.ListDirResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /filesystem.Filesystem/MakeDir: + post: + tags: + - filesystem.Filesystem + summary: MakeDir + operationId: filesystem.Filesystem.MakeDir + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.MakeDirRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.MakeDirResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /filesystem.Filesystem/Move: + post: + tags: + - filesystem.Filesystem + summary: Move + operationId: filesystem.Filesystem.Move + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.MoveRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.MoveResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /filesystem.Filesystem/Remove: + post: + tags: + - filesystem.Filesystem + summary: Remove + operationId: filesystem.Filesystem.Remove + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.RemoveRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.RemoveResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /filesystem.Filesystem/RemoveWatcher: + post: + tags: + - filesystem.Filesystem + summary: RemoveWatcher + operationId: filesystem.Filesystem.RemoveWatcher + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.RemoveWatcherRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.RemoveWatcherResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /filesystem.Filesystem/Stat: + post: + tags: + - filesystem.Filesystem + summary: Stat + operationId: filesystem.Filesystem.Stat + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.StatRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.StatResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /filesystem.Filesystem/WatchDir: + post: + tags: + - filesystem.Filesystem + summary: WatchDir + description: Server-streaming RPC. Use the Connect protocol with streaming support. + operationId: filesystem.Filesystem.WatchDir + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.WatchDirRequest' + required: true + responses: + '200': + description: Stream of WatchDirResponse events + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.WatchDirResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /process.Process/CloseStdin: + post: + tags: + - process.Process + summary: CloseStdin + description: "Close stdin to signal EOF to the process.\n Only works for non-PTY\ + \ processes. For PTY, send Ctrl+D (0x04) instead." + operationId: process.Process.CloseStdin + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/process.CloseStdinRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/process.CloseStdinResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /process.Process/Connect: + post: + tags: + - process.Process + summary: Connect + description: Server-streaming RPC. Use the Connect protocol with streaming support. + operationId: process.Process.Connect + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/process.ConnectRequest' + required: true + responses: + '200': + description: Stream of ConnectResponse events + content: + application/json: + schema: + $ref: '#/components/schemas/process.ConnectResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /process.Process/List: + post: + tags: + - process.Process + summary: List + operationId: process.Process.List + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/process.ListRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/process.ListResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /process.Process/SendInput: + post: + tags: + - process.Process + summary: SendInput + operationId: process.Process.SendInput + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/process.SendInputRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/process.SendInputResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /process.Process/SendSignal: + post: + tags: + - process.Process + summary: SendSignal + operationId: process.Process.SendSignal + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/process.SendSignalRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/process.SendSignalResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /process.Process/Start: + post: + tags: + - process.Process + summary: Start + description: Server-streaming RPC. Use the Connect protocol with streaming support. + operationId: process.Process.Start + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/process.StartRequest' + required: true + responses: + '200': + description: Stream of StartResponse events + content: + application/json: + schema: + $ref: '#/components/schemas/process.StartResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /process.Process/StreamInput: + post: + tags: + - process.Process + summary: StreamInput + description: Client-streaming RPC. Client input stream ensures ordering of messages. + Use the Connect protocol with streaming support. + operationId: process.Process.StreamInput + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/process.StreamInputRequest' + required: true + responses: + '200': + description: Stream of StreamInputResponse events + content: + application/json: + schema: + $ref: '#/components/schemas/process.StreamInputResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /process.Process/Update: + post: + tags: + - process.Process + summary: Update + operationId: process.Process.Update + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/process.UpdateRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/process.UpdateResponse' + '502': *id001 + security: + - *id004 + - *id005 + servers: + - *id002 + /teams: + get: + description: List all teams + tags: + - auth + security: + - SandboxAccessTokenAuth: [] + responses: + '200': + description: Successfully returned all teams + content: + application/json: + schema: + type: array + items: + allOf: + - $ref: '#/components/schemas/Team' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /teams/{teamID}/metrics: + get: + description: Get metrics for the team + tags: + - auth + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/teamID' + - in: query + name: start + schema: + type: integer + format: int64 + minimum: 0 + description: Unix timestamp for the start of the interval, in seconds, for + which the metrics + - in: query + name: end + schema: + type: integer + format: int64 + minimum: 0 + description: Unix timestamp for the end of the interval, in seconds, for + which the metrics + responses: + '200': + description: Successfully returned the team metrics + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/TeamMetric' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '403': + $ref: '#/components/responses/403' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /teams/{teamID}/metrics/max: + get: + description: Get the maximum metrics for the team in the given interval + tags: + - auth + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/teamID' + - in: query + name: start + schema: + type: integer + format: int64 + minimum: 0 + description: Unix timestamp for the start of the interval, in seconds, for + which the metrics + - in: query + name: end + schema: + type: integer + format: int64 + minimum: 0 + description: Unix timestamp for the end of the interval, in seconds, for + which the metrics + - in: query + name: metric + required: true + schema: + type: string + enum: + - concurrent_sandboxes + - sandbox_start_rate + description: Metric to retrieve the maximum value for + responses: + '200': + description: Successfully returned the team metrics + content: + application/json: + schema: + $ref: '#/components/schemas/MaxTeamMetric' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '403': + $ref: '#/components/responses/403' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /sandboxes: + get: + description: List all running sandboxes + tags: + - sandboxes + security: + - ApiKeyAuth: [] + parameters: + - name: metadata + in: query + description: Metadata query used to filter the sandboxes (e.g. "user=abc&app=prod"). + Each key and values must be URL encoded. + required: false + schema: + type: string + responses: + '200': + description: Successfully returned all running sandboxes + content: + application/json: + schema: + type: array + items: + allOf: + - $ref: '#/components/schemas/ListedSandbox' + '401': + $ref: '#/components/responses/401' + '400': + $ref: '#/components/responses/400' + '500': + $ref: '#/components/responses/500' + post: + description: Create a sandbox from the template + tags: + - sandboxes + security: + - ApiKeyAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/NewSandbox' + responses: + '201': + description: The sandbox was created successfully + content: + application/json: + schema: + $ref: '#/components/schemas/Sandbox' + '401': + $ref: '#/components/responses/401' + '400': + $ref: '#/components/responses/400' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /v2/sandboxes: + get: + description: List all sandboxes + tags: + - sandboxes + security: + - ApiKeyAuth: [] + parameters: + - name: metadata + in: query + description: Metadata query used to filter the sandboxes (e.g. "user=abc&app=prod"). + Each key and values must be URL encoded. + required: false + schema: + type: string + - name: state + in: query + description: Filter sandboxes by one or more states + required: false + schema: + type: array + items: + $ref: '#/components/schemas/SandboxState' + style: form + explode: false + - $ref: '#/components/parameters/paginationNextToken' + - $ref: '#/components/parameters/paginationLimit' + responses: + '200': + description: Successfully returned all running sandboxes + content: + application/json: + schema: + type: array + items: + allOf: + - $ref: '#/components/schemas/ListedSandbox' + '401': + $ref: '#/components/responses/401' + '400': + $ref: '#/components/responses/400' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /sandboxes/metrics: + get: + description: List metrics for given sandboxes + tags: + - sandboxes + security: + - ApiKeyAuth: [] + parameters: + - name: sandbox_ids + in: query + required: true + description: Comma-separated list of sandbox IDs to get metrics for + explode: false + schema: + type: array + items: + type: string + maxItems: 100 + uniqueItems: true + responses: + '200': + description: Successfully returned all running sandboxes with metrics + content: + application/json: + schema: + $ref: '#/components/schemas/SandboxesWithMetrics' + '401': + $ref: '#/components/responses/401' + '400': + $ref: '#/components/responses/400' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /sandboxes/{sandboxID}/logs: + get: + description: Get sandbox logs. Use /v2/sandboxes/{sandboxID}/logs instead. + deprecated: true + tags: + - sandboxes + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/sandboxID' + - in: query + name: start + schema: + type: integer + format: int64 + minimum: 0 + description: Starting timestamp of the logs that should be returned in milliseconds + - in: query + name: limit + schema: + default: 1000 + format: int32 + minimum: 0 + type: integer + description: Maximum number of logs that should be returned + responses: + '200': + description: Successfully returned the sandbox logs + content: + application/json: + schema: + $ref: '#/components/schemas/SandboxLogs' + '404': + $ref: '#/components/responses/404' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /sandboxes/{sandboxID}: + get: + description: Get a sandbox by id + tags: + - sandboxes + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/sandboxID' + responses: + '200': + description: Successfully returned the sandbox + content: + application/json: + schema: + $ref: '#/components/schemas/SandboxDetail' + '404': + $ref: '#/components/responses/404' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + delete: + description: Kill a sandbox + tags: + - sandboxes + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/sandboxID' + responses: + '204': + description: The sandbox was killed successfully + content: *id003 + '404': + $ref: '#/components/responses/404' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /sandboxes/{sandboxID}/metrics: + get: + description: Get sandbox metrics + tags: + - sandboxes + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/sandboxID' + - in: query + name: start + schema: + type: integer + format: int64 + minimum: 0 + description: Unix timestamp for the start of the interval, in seconds, for + which the metrics + - in: query + name: end + schema: + type: integer + format: int64 + minimum: 0 + description: Unix timestamp for the end of the interval, in seconds, for + which the metrics + responses: + '200': + description: Successfully returned the sandbox metrics + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/SandboxMetric' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /sandboxes/{sandboxID}/pause: + post: + description: Pause the sandbox + tags: + - sandboxes + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/sandboxID' + responses: + '204': + description: The sandbox was paused successfully and can be resumed + content: *id003 + '409': + $ref: '#/components/responses/409' + '404': + $ref: '#/components/responses/404' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /sandboxes/{sandboxID}/resume: + post: + deprecated: true + description: Resume the sandbox + tags: + - sandboxes + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/sandboxID' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ResumedSandbox' + responses: + '201': + description: The sandbox was resumed successfully + content: + application/json: + schema: + $ref: '#/components/schemas/Sandbox' + '409': + $ref: '#/components/responses/409' + '404': + $ref: '#/components/responses/404' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /sandboxes/{sandboxID}/connect: + post: + description: Returns sandbox details. If the sandbox is paused, it will be resumed. + TTL is only extended. + tags: + - sandboxes + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/sandboxID' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectSandbox' + responses: + '200': + description: The sandbox was already running + content: + application/json: + schema: + $ref: '#/components/schemas/Sandbox' + '201': + description: The sandbox was resumed successfully + content: + application/json: + schema: + $ref: '#/components/schemas/Sandbox' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /sandboxes/{sandboxID}/timeout: + post: + description: Set the timeout for the sandbox. The sandbox will expire x seconds + from the time of the request. Calling this method multiple times overwrites + the TTL, each time using the current timestamp as the starting point to measure + the timeout duration. + security: + - ApiKeyAuth: [] + tags: + - sandboxes + requestBody: + content: + application/json: + schema: + type: object + required: + - timeout + properties: + timeout: + description: Timeout in seconds from the current time after which + the sandbox should expire + type: integer + format: int32 + minimum: 0 + parameters: + - $ref: '#/components/parameters/sandboxID' + responses: + '204': + description: Successfully set the sandbox timeout + content: *id003 + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /sandboxes/{sandboxID}/refreshes: + post: + description: Refresh the sandbox extending its time to live + security: + - ApiKeyAuth: [] + tags: + - sandboxes + requestBody: + content: + application/json: + schema: + type: object + properties: + duration: + description: Duration for which the sandbox should be kept alive + in seconds + type: integer + maximum: 3600 + minimum: 0 + parameters: + - $ref: '#/components/parameters/sandboxID' + responses: + '204': + description: Successfully refreshed the sandbox + content: *id003 + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + servers: + - *id006 + /sandboxes/{sandboxID}/snapshots: + post: + description: Create a persistent snapshot from the sandbox's current state. + Snapshots can be used to create new sandboxes and persist beyond the original + sandbox's lifetime. + tags: + - sandboxes + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/sandboxID' + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + name: + type: string + description: Optional name for the snapshot template. If a snapshot + template with this name already exists, a new build will be assigned + to the existing template instead of creating a new one. + responses: + '201': + description: Snapshot created successfully + content: + application/json: + schema: + $ref: '#/components/schemas/SnapshotInfo' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /snapshots: + get: + description: List all snapshots for the team + tags: + - snapshots + security: + - ApiKeyAuth: [] + parameters: + - name: sandboxID + in: query + required: false + schema: + type: string + description: Filter snapshots by source sandbox ID + - $ref: '#/components/parameters/paginationLimit' + - $ref: '#/components/parameters/paginationNextToken' + responses: + '200': + description: Successfully returned snapshots + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/SnapshotInfo' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /v3/templates: + post: + description: Create a new template + tags: + - templates + security: + - ApiKeyAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildRequestV3' + responses: + '202': + description: The build was requested successfully + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateRequestResponseV3' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /v2/templates: + post: + description: Create a new template + deprecated: true + tags: + - templates + security: + - ApiKeyAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildRequestV2' + responses: + '202': + description: The build was requested successfully + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateLegacy' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /templates/{templateID}/files/{hash}: + get: + description: Get an upload link for a tar file containing build layer files + tags: + - templates + security: + - SandboxAccessTokenAuth: [] + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + - in: path + name: hash + required: true + schema: + type: string + description: Hash of the files + responses: + '201': + description: The upload link where to upload the tar file + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildFileUpload' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /templates: + get: + description: List all templates + tags: + - templates + security: + - ApiKeyAuth: [] + - SandboxAccessTokenAuth: [] + parameters: + - in: query + required: false + name: teamID + schema: + type: string + description: Identifier of the team + responses: + '200': + description: Successfully returned all templates + content: + application/json: + schema: + type: array + items: + allOf: + - $ref: '#/components/schemas/Template' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + post: + description: Create a new template + deprecated: true + tags: + - templates + security: + - SandboxAccessTokenAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildRequest' + responses: + '202': + description: The build was accepted + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateLegacy' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /templates/{templateID}: + get: + description: List all builds for a template + tags: + - templates + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + - $ref: '#/components/parameters/paginationNextToken' + - $ref: '#/components/parameters/paginationLimit' + responses: + '200': + description: Successfully returned the template with its builds + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateWithBuilds' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + post: + description: Rebuild an template + deprecated: true + tags: + - templates + security: + - SandboxAccessTokenAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildRequest' + responses: + '202': + description: The build was accepted + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateLegacy' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + delete: + description: Delete a template + tags: + - templates + security: + - ApiKeyAuth: [] + - SandboxAccessTokenAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + responses: + '204': + description: The template was deleted successfully + content: *id003 + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + patch: + description: Update template + deprecated: true + tags: + - templates + security: + - ApiKeyAuth: [] + - SandboxAccessTokenAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateUpdateRequest' + responses: + '200': + description: The template was updated successfully + content: *id003 + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /templates/{templateID}/builds/{buildID}: + post: + description: Start the build + deprecated: true + tags: + - templates + security: + - SandboxAccessTokenAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + - $ref: '#/components/parameters/buildID' + responses: + '202': + description: The build has started + content: *id003 + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /v2/templates/{templateID}/builds/{buildID}: + post: + description: Start the build + tags: + - templates + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + - $ref: '#/components/parameters/buildID' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildStartV2' + responses: + '202': + description: The build has started + content: *id003 + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /v2/templates/{templateID}: + patch: + description: Update template + tags: + - templates + security: + - ApiKeyAuth: [] + - SandboxAccessTokenAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateUpdateRequest' + responses: + '200': + description: The template was updated successfully + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateUpdateResponse' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /templates/{templateID}/builds/{buildID}/status: + get: + description: Get template build info + tags: + - templates + security: + - SandboxAccessTokenAuth: [] + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + - $ref: '#/components/parameters/buildID' + - in: query + name: logsOffset + schema: + default: 0 + type: integer + format: int32 + minimum: 0 + description: Index of the starting build log that should be returned with + the template + - in: query + name: limit + schema: + default: 100 + type: integer + format: int32 + minimum: 0 + maximum: 100 + description: Maximum number of logs that should be returned + - in: query + name: level + schema: + $ref: '#/components/schemas/LogLevel' + responses: + '200': + description: Successfully returned the template + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildInfo' + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /templates/{templateID}/builds/{buildID}/logs: + get: + description: Get template build logs + tags: + - templates + security: + - SandboxAccessTokenAuth: [] + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + - $ref: '#/components/parameters/buildID' + - in: query + name: cursor + schema: + type: integer + format: int64 + minimum: 0 + description: Starting timestamp of the logs that should be returned in milliseconds + - in: query + name: limit + schema: + default: 100 + type: integer + format: int32 + minimum: 0 + maximum: 100 + description: Maximum number of logs that should be returned + - in: query + name: direction + schema: + $ref: '#/components/schemas/LogsDirection' + - in: query + name: level + schema: + $ref: '#/components/schemas/LogLevel' + - in: query + name: source + schema: + $ref: '#/components/schemas/LogsSource' + description: Source of the logs that should be returned from + responses: + '200': + description: Successfully returned the template build logs + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildLogsResponse' + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /templates/tags: + post: + description: Assign tag(s) to a template build + tags: + - tags + security: + - ApiKeyAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/AssignTemplateTagsRequest' + responses: + '201': + description: Tag assigned successfully + content: + application/json: + schema: + $ref: '#/components/schemas/AssignedTemplateTags' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + delete: + description: Delete multiple tags from templates + tags: + - tags + security: + - ApiKeyAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DeleteTemplateTagsRequest' + responses: + '204': + description: Tags deleted successfully + content: *id003 + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /templates/{templateID}/tags: + get: + description: List all tags for a template + tags: + - tags + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + responses: + '200': + description: Successfully returned the template tags + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/TemplateTag' + '401': + $ref: '#/components/responses/401' + '403': + $ref: '#/components/responses/403' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /templates/aliases/{alias}: + get: + description: Check if template with given alias exists + tags: + - templates + security: + - ApiKeyAuth: [] + parameters: + - name: alias + in: path + required: true + schema: + type: string + description: Template alias + responses: + '200': + description: Successfully queried template by alias + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateAliasResponse' + '400': + $ref: '#/components/responses/400' + '403': + $ref: '#/components/responses/403' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /volumes: + get: + description: List all team volumes + tags: + - volumes + security: + - SandboxAccessTokenAuth: [] + - ApiKeyAuth: [] + responses: + '200': + description: Successfully listed all team volumes + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/Volume' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + post: + description: Create a new team volume + tags: + - volumes + security: + - SandboxAccessTokenAuth: [] + - ApiKeyAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/NewVolume' + responses: + '201': + description: Successfully created a new team volume + content: + application/json: + schema: + $ref: '#/components/schemas/Volume' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 + /volumes/{volumeID}: + get: + description: Get team volume info + tags: + - volumes + security: + - SandboxAccessTokenAuth: [] + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/volumeID' + responses: + '200': + description: Successfully retrieved a team volume + content: + application/json: + schema: + $ref: '#/components/schemas/Volume' + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + delete: + description: Delete a team volume + tags: + - volumes + security: + - SandboxAccessTokenAuth: [] + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/volumeID' + responses: + '204': + description: Successfully deleted a team volume + content: *id003 + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + servers: + - *id006 +components: + securitySchemes: + ApiKeyAuth: + type: apiKey + in: header + name: X-API-Key + SandboxAccessTokenAuth: + type: http + scheme: bearer + bearerFormat: access_token + parameters: + FilePath: + name: path + in: query + required: false + description: Path to the file, URL encoded. Can be relative to the user's home + directory (e.g. "file.txt" resolves to ~/file.txt). + schema: + type: string + User: + name: username + in: query + required: false + description: User for setting file ownership and resolving relative paths. Defaults + to the sandbox's default user. + schema: + type: string + Signature: + name: signature + in: query + required: false + description: HMAC signature for access verification. Required when no X-Access-Token + header is provided. Format is "v1_". + schema: + type: string + SignatureExpiration: + name: signature_expiration + in: query + required: false + description: Unix timestamp (seconds) after which the signature expires. Only + used with the signature parameter. + schema: + type: integer + templateID: + name: templateID + in: path + required: true + schema: + type: string + buildID: + name: buildID + in: path + required: true + schema: + type: string + sandboxID: + name: sandboxID + in: path + required: true + schema: + type: string + teamID: + name: teamID + in: path + required: true + schema: + type: string + nodeID: + name: nodeID + in: path + required: true + schema: + type: string + apiKeyID: + name: apiKeyID + in: path + required: true + schema: + type: string + accessTokenID: + name: accessTokenID + in: path + required: true + schema: + type: string + snapshotID: + name: snapshotID + in: path + required: true + schema: + type: string + description: Identifier of the snapshot (template ID) + tag: + name: tag + in: path + required: true + schema: + type: string + description: Tag name + paginationLimit: + name: limit + in: query + description: Maximum number of items to return per page + required: false + schema: + type: integer + format: int32 + minimum: 1 + default: 100 + maximum: 100 + paginationNextToken: + name: nextToken + in: query + description: Cursor to start the list from + required: false + schema: + type: string + volumeID: + name: volumeID + in: path + required: true + schema: + type: string + requestBodies: + File: + required: true + content: + multipart/form-data: + schema: + type: object + properties: + file: + type: string + format: binary + responses: + UploadSuccess: + description: The file was uploaded successfully. + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/EntryInfo' + example: + - path: /home/user/hello.txt + name: hello.txt + type: file + DownloadSuccess: + description: File content. Content-Type is detected from the file extension + (defaults to application/octet-stream). Content-Disposition header contains + the filename. + content: + application/octet-stream: + schema: + type: string + format: binary + description: The raw file content + NotAcceptable: + description: Requested encoding is not supported + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + example: + message: 'no acceptable encoding found, supported: [identity, gzip]' + code: 406 + InvalidPath: + description: Invalid path + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + example: + message: path '/home/user/docs' is a directory + code: 400 + InternalServerError: + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + example: + message: 'error opening file ''/home/user/file.txt'': permission denied' + code: 500 + FileNotFound: + description: File not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + example: + message: path '/home/user/missing.txt' does not exist + code: 404 + InvalidUser: + description: Invalid user + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + example: + message: 'error looking up user ''nonexistent'': user: unknown user nonexistent' + code: 401 + NotEnoughDiskSpace: + description: Not enough disk space + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + example: + message: not enough disk space available + code: 507 + '400': + description: Bad request + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '401': + description: Authentication error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '403': + description: Forbidden + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '404': + description: Not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '409': + description: Conflict + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + '500': + description: Server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + schemas: + Error: + required: + - code + - message + properties: + code: + type: integer + format: int32 + description: Error code + message: + type: string + description: Error + EntryInfo: + required: + - path + - name + - type + properties: + path: + type: string + description: Path to the file + name: + type: string + description: Name of the file + type: + type: string + description: Type of the file + enum: + - file + EnvVars: + additionalProperties: + type: string + description: Environment variables for the sandbox + Metrics: + type: object + description: Resource usage metrics + properties: + ts: + type: integer + format: int64 + description: Unix timestamp in UTC for current sandbox time + cpu_count: + type: integer + description: Number of CPU cores + cpu_used_pct: + type: number + format: float + description: CPU usage percentage + mem_total: + type: integer + description: Total virtual memory in bytes + mem_used: + type: integer + description: Used virtual memory in bytes + mem_total_mib: + type: integer + description: Total virtual memory in MiB + mem_used_mib: + type: integer + description: Used virtual memory in MiB + disk_used: + type: integer + description: Used disk space in bytes + disk_total: + type: integer + description: Total disk space in bytes + connect-protocol-version: + type: number + title: Connect-Protocol-Version + enum: + - 1 + description: Define the version of the Connect protocol + const: 1 + connect-timeout-header: + type: number + title: Connect-Timeout-Ms + description: Define the timeout, in ms + filesystem.CreateWatcherRequest: + type: object + properties: + path: + type: string + title: path + recursive: + type: boolean + title: recursive + title: CreateWatcherRequest + additionalProperties: false + filesystem.CreateWatcherResponse: + type: object + properties: + watcherId: + type: string + title: watcher_id + title: CreateWatcherResponse + additionalProperties: false + filesystem.EntryInfo: + type: object + properties: + name: + type: string + title: name + type: + title: type + $ref: '#/components/schemas/filesystem.FileType' + path: + type: string + title: path + size: + type: + - integer + - string + title: size + format: int64 + mode: + type: integer + title: mode + permissions: + type: string + title: permissions + owner: + type: string + title: owner + group: + type: string + title: group + modifiedTime: + title: modified_time + $ref: '#/components/schemas/google.protobuf.Timestamp' + symlinkTarget: + type: + - string + - 'null' + title: symlink_target + description: If the entry is a symlink, this field contains the target of + the symlink. + title: EntryInfo + additionalProperties: false + filesystem.EventType: + type: string + title: EventType + enum: + - EVENT_TYPE_UNSPECIFIED + - EVENT_TYPE_CREATE + - EVENT_TYPE_WRITE + - EVENT_TYPE_REMOVE + - EVENT_TYPE_RENAME + - EVENT_TYPE_CHMOD + filesystem.FileType: + type: string + title: FileType + enum: + - FILE_TYPE_UNSPECIFIED + - FILE_TYPE_FILE + - FILE_TYPE_DIRECTORY + filesystem.FilesystemEvent: + type: object + properties: + name: + type: string + title: name + type: + title: type + $ref: '#/components/schemas/filesystem.EventType' + title: FilesystemEvent + additionalProperties: false + filesystem.GetWatcherEventsRequest: + type: object + properties: + watcherId: + type: string + title: watcher_id + title: GetWatcherEventsRequest + additionalProperties: false + filesystem.GetWatcherEventsResponse: + type: object + properties: + events: + type: array + items: + $ref: '#/components/schemas/filesystem.FilesystemEvent' + title: events + title: GetWatcherEventsResponse + additionalProperties: false + filesystem.ListDirRequest: + type: object + properties: + path: + type: string + title: path + depth: + type: integer + title: depth + title: ListDirRequest + additionalProperties: false + filesystem.ListDirResponse: + type: object + properties: + entries: + type: array + items: + $ref: '#/components/schemas/filesystem.EntryInfo' + title: entries + title: ListDirResponse + additionalProperties: false + filesystem.MakeDirRequest: + type: object + properties: + path: + type: string + title: path + title: MakeDirRequest + additionalProperties: false + filesystem.MakeDirResponse: + type: object + properties: + entry: + title: entry + $ref: '#/components/schemas/filesystem.EntryInfo' + title: MakeDirResponse + additionalProperties: false + filesystem.MoveRequest: + type: object + properties: + source: + type: string + title: source + destination: + type: string + title: destination + title: MoveRequest + additionalProperties: false + filesystem.MoveResponse: + type: object + properties: + entry: + title: entry + $ref: '#/components/schemas/filesystem.EntryInfo' + title: MoveResponse + additionalProperties: false + filesystem.RemoveRequest: + type: object + properties: + path: + type: string + title: path + title: RemoveRequest + additionalProperties: false + filesystem.RemoveResponse: + type: object + title: RemoveResponse + additionalProperties: false + filesystem.RemoveWatcherRequest: + type: object + properties: + watcherId: + type: string + title: watcher_id + title: RemoveWatcherRequest + additionalProperties: false + filesystem.RemoveWatcherResponse: + type: object + title: RemoveWatcherResponse + additionalProperties: false + filesystem.StatRequest: + type: object + properties: + path: + type: string + title: path + title: StatRequest + additionalProperties: false + filesystem.StatResponse: + type: object + properties: + entry: + title: entry + $ref: '#/components/schemas/filesystem.EntryInfo' + title: StatResponse + additionalProperties: false + filesystem.WatchDirRequest: + type: object + properties: + path: + type: string + title: path + recursive: + type: boolean + title: recursive + title: WatchDirRequest + additionalProperties: false + filesystem.WatchDirResponse: + type: object + oneOf: + - properties: + filesystem: + title: filesystem + $ref: '#/components/schemas/filesystem.FilesystemEvent' + title: filesystem + required: + - filesystem + - properties: + keepalive: + title: keepalive + $ref: '#/components/schemas/filesystem.WatchDirResponse.KeepAlive' + title: keepalive + required: + - keepalive + - properties: + start: + title: start + $ref: '#/components/schemas/filesystem.WatchDirResponse.StartEvent' + title: start + required: + - start + title: WatchDirResponse + additionalProperties: false + filesystem.WatchDirResponse.KeepAlive: + type: object + title: KeepAlive + additionalProperties: false + filesystem.WatchDirResponse.StartEvent: + type: object + title: StartEvent + additionalProperties: false + google.protobuf.Timestamp: + type: string + examples: + - '2023-01-15T01:30:15.01Z' + - '2024-12-25T12:00:00Z' + format: date-time + description: "A Timestamp represents a point in time independent of any time\ + \ zone or local\n calendar, encoded as a count of seconds and fractions of\ + \ seconds at\n nanosecond resolution. The count is relative to an epoch at\ + \ UTC midnight on\n January 1, 1970, in the proleptic Gregorian calendar which\ + \ extends the\n Gregorian calendar backwards to year one.\n\n All minutes\ + \ are 60 seconds long. Leap seconds are \"smeared\" so that no leap\n second\ + \ table is needed for interpretation, using a [24-hour linear\n smear](https://developers.google.com/time/smear).\n\ + \n The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.\ + \ By\n restricting to that range, we ensure that we can convert to and from\ + \ [RFC\n 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.\n\n # Examples\n\ + \n Example 1: Compute Timestamp from POSIX `time()`.\n\n Timestamp timestamp;\n\ + \ timestamp.set_seconds(time(NULL));\n timestamp.set_nanos(0);\n\n\ + \ Example 2: Compute Timestamp from POSIX `gettimeofday()`.\n\n struct\ + \ timeval tv;\n gettimeofday(&tv, NULL);\n\n Timestamp timestamp;\n\ + \ timestamp.set_seconds(tv.tv_sec);\n timestamp.set_nanos(tv.tv_usec\ + \ * 1000);\n\n Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.\n\ + \n FILETIME ft;\n GetSystemTimeAsFileTime(&ft);\n UINT64 ticks\ + \ = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;\n\n // A Windows\ + \ tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z\n // is\ + \ 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.\n Timestamp\ + \ timestamp;\n timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));\n\ + \ timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));\n\n Example\ + \ 4: Compute Timestamp from Java `System.currentTimeMillis()`.\n\n long\ + \ millis = System.currentTimeMillis();\n\n Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis\ + \ / 1000)\n .setNanos((int) ((millis % 1000) * 1000000)).build();\n\ + \n Example 5: Compute Timestamp from Java `Instant.now()`.\n\n Instant\ + \ now = Instant.now();\n\n Timestamp timestamp =\n Timestamp.newBuilder().setSeconds(now.getEpochSecond())\n\ + \ .setNanos(now.getNano()).build();\n\n Example 6: Compute Timestamp\ + \ from current time in Python.\n\n timestamp = Timestamp()\n timestamp.GetCurrentTime()\n\ + \n # JSON Mapping\n\n In JSON format, the Timestamp type is encoded as a string\ + \ in the\n [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is,\ + \ the\n format is \"{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z\"\ + \n where {year} is always expressed using four digits while {month}, {day},\n\ + \ {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional\n\ + \ seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),\n\ + \ are optional. The \"Z\" suffix indicates the timezone (\"UTC\"); the timezone\n\ + \ is required. A proto3 JSON serializer should always use UTC (as indicated\ + \ by\n \"Z\") when printing the Timestamp type and a proto3 JSON parser should\ + \ be\n able to accept both UTC and other timezones (as indicated by an offset).\n\ + \n For example, \"2017-01-15T01:30:15.01Z\" encodes 15.01 seconds past\n 01:30\ + \ UTC on January 15, 2017.\n\n In JavaScript, one can convert a Date object\ + \ to this format using the\n standard\n [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)\n\ + \ method. In Python, a standard `datetime.datetime` object can be converted\n\ + \ to this format using\n [`strftime`](https://docs.python.org/2/library/time.html#time.strftime)\ + \ with\n the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java,\ + \ one can use\n the Joda Time's [`ISODateTimeFormat.dateTime()`](\n http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()\n\ + \ ) to obtain a formatter capable of generating timestamps in this format." + process.CloseStdinRequest: + type: object + properties: + process: + title: process + $ref: '#/components/schemas/process.ProcessSelector' + title: CloseStdinRequest + additionalProperties: false + process.CloseStdinResponse: + type: object + title: CloseStdinResponse + additionalProperties: false + process.ConnectRequest: + type: object + properties: + process: + title: process + $ref: '#/components/schemas/process.ProcessSelector' + title: ConnectRequest + additionalProperties: false + process.ConnectResponse: + type: object + properties: + event: + title: event + $ref: '#/components/schemas/process.ProcessEvent' + title: ConnectResponse + additionalProperties: false + process.ListRequest: + type: object + title: ListRequest + additionalProperties: false + process.ListResponse: + type: object + properties: + processes: + type: array + items: + $ref: '#/components/schemas/process.ProcessInfo' + title: processes + title: ListResponse + additionalProperties: false + process.PTY: + type: object + properties: + size: + title: size + $ref: '#/components/schemas/process.PTY.Size' + title: PTY + additionalProperties: false + process.PTY.Size: + type: object + properties: + cols: + type: integer + title: cols + rows: + type: integer + title: rows + title: Size + additionalProperties: false + process.ProcessConfig: + type: object + properties: + cmd: + type: string + title: cmd + args: + type: array + items: + type: string + title: args + envs: + type: object + title: envs + additionalProperties: + type: string + title: value + cwd: + type: + - string + - 'null' + title: cwd + title: ProcessConfig + additionalProperties: false + process.ProcessEvent: + type: object + oneOf: + - properties: + data: + title: data + $ref: '#/components/schemas/process.ProcessEvent.DataEvent' + title: data + required: + - data + - properties: + end: + title: end + $ref: '#/components/schemas/process.ProcessEvent.EndEvent' + title: end + required: + - end + - properties: + keepalive: + title: keepalive + $ref: '#/components/schemas/process.ProcessEvent.KeepAlive' + title: keepalive + required: + - keepalive + - properties: + start: + title: start + $ref: '#/components/schemas/process.ProcessEvent.StartEvent' + title: start + required: + - start + title: ProcessEvent + additionalProperties: false + process.ProcessEvent.DataEvent: + type: object + oneOf: + - properties: + pty: + type: string + title: pty + format: byte + title: pty + required: + - pty + - properties: + stderr: + type: string + title: stderr + format: byte + title: stderr + required: + - stderr + - properties: + stdout: + type: string + title: stdout + format: byte + title: stdout + required: + - stdout + title: DataEvent + additionalProperties: false + process.ProcessEvent.EndEvent: + type: object + properties: + exitCode: + type: integer + title: exit_code + format: int32 + exited: + type: boolean + title: exited + status: + type: string + title: status + error: + type: + - string + - 'null' + title: error + title: EndEvent + additionalProperties: false + process.ProcessEvent.KeepAlive: + type: object + title: KeepAlive + additionalProperties: false + process.ProcessEvent.StartEvent: + type: object + properties: + pid: + type: integer + title: pid + title: StartEvent + additionalProperties: false + process.ProcessInfo: + type: object + properties: + config: + title: config + $ref: '#/components/schemas/process.ProcessConfig' + pid: + type: integer + title: pid + tag: + type: + - string + - 'null' + title: tag + title: ProcessInfo + additionalProperties: false + process.ProcessInput: + type: object + oneOf: + - properties: + pty: + type: string + title: pty + format: byte + title: pty + required: + - pty + - properties: + stdin: + type: string + title: stdin + format: byte + title: stdin + required: + - stdin + title: ProcessInput + additionalProperties: false + process.ProcessSelector: + type: object + oneOf: + - properties: + pid: + type: integer + title: pid + title: pid + required: + - pid + - properties: + tag: + type: string + title: tag + title: tag + required: + - tag + title: ProcessSelector + additionalProperties: false + process.SendInputRequest: + type: object + properties: + process: + title: process + $ref: '#/components/schemas/process.ProcessSelector' + input: + title: input + $ref: '#/components/schemas/process.ProcessInput' + title: SendInputRequest + additionalProperties: false + process.SendInputResponse: + type: object + title: SendInputResponse + additionalProperties: false + process.SendSignalRequest: + type: object + properties: + process: + title: process + $ref: '#/components/schemas/process.ProcessSelector' + signal: + title: signal + $ref: '#/components/schemas/process.Signal' + title: SendSignalRequest + additionalProperties: false + process.SendSignalResponse: + type: object + title: SendSignalResponse + additionalProperties: false + process.Signal: + type: string + title: Signal + enum: + - SIGNAL_UNSPECIFIED + - SIGNAL_SIGTERM + - SIGNAL_SIGKILL + process.StartRequest: + type: object + properties: + process: + title: process + $ref: '#/components/schemas/process.ProcessConfig' + pty: + oneOf: + - $ref: '#/components/schemas/process.PTY' + - type: 'null' + title: pty + tag: + type: + - string + - 'null' + title: tag + stdin: + type: + - boolean + - 'null' + title: stdin + description: "This is optional for backwards compatibility.\n We default\ + \ to true. New SDK versions will set this to false by default." + title: StartRequest + additionalProperties: false + process.StartResponse: + type: object + properties: + event: + title: event + $ref: '#/components/schemas/process.ProcessEvent' + title: StartResponse + additionalProperties: false + process.StreamInputRequest: + type: object + oneOf: + - properties: + data: + title: data + $ref: '#/components/schemas/process.StreamInputRequest.DataEvent' + title: data + required: + - data + - properties: + keepalive: + title: keepalive + $ref: '#/components/schemas/process.StreamInputRequest.KeepAlive' + title: keepalive + required: + - keepalive + - properties: + start: + title: start + $ref: '#/components/schemas/process.StreamInputRequest.StartEvent' + title: start + required: + - start + title: StreamInputRequest + additionalProperties: false + process.StreamInputRequest.DataEvent: + type: object + properties: + input: + title: input + $ref: '#/components/schemas/process.ProcessInput' + title: DataEvent + additionalProperties: false + process.StreamInputRequest.KeepAlive: + type: object + title: KeepAlive + additionalProperties: false + process.StreamInputRequest.StartEvent: + type: object + properties: + process: + title: process + $ref: '#/components/schemas/process.ProcessSelector' + title: StartEvent + additionalProperties: false + process.StreamInputResponse: + type: object + title: StreamInputResponse + additionalProperties: false + process.UpdateRequest: + type: object + properties: + process: + title: process + $ref: '#/components/schemas/process.ProcessSelector' + pty: + oneOf: + - $ref: '#/components/schemas/process.PTY' + - type: 'null' + title: pty + title: UpdateRequest + additionalProperties: false + process.UpdateResponse: + type: object + title: UpdateResponse + additionalProperties: false + Team: + required: + - teamID + - name + - apiKey + - isDefault + properties: + teamID: + type: string + description: Identifier of the team + name: + type: string + description: Name of the team + apiKey: + type: string + description: API key for the team + isDefault: + type: boolean + description: Whether the team is the default team + TeamUser: + required: + - id + - email + properties: + id: + type: string + format: uuid + description: Identifier of the user + email: + type: string + description: Email of the user + TemplateUpdateRequest: + properties: + public: + type: boolean + description: Whether the template is public or only accessible by the team + TemplateUpdateResponse: + required: + - names + properties: + names: + type: array + description: Names of the template (namespace/alias format when namespaced) + items: + type: string + CPUCount: + type: integer + format: int32 + minimum: 1 + description: CPU cores for the sandbox + MemoryMB: + type: integer + format: int32 + minimum: 128 + description: Memory for the sandbox in MiB + DiskSizeMB: + type: integer + format: int32 + minimum: 0 + description: Disk size for the sandbox in MiB + EnvdVersion: + type: string + description: Version of the envd running in the sandbox + SandboxMetadata: + additionalProperties: + type: string + description: Metadata of the sandbox + SandboxState: + type: string + description: State of the sandbox + enum: + - running + - paused + SnapshotInfo: + type: object + required: + - snapshotID + - names + properties: + snapshotID: + type: string + description: Identifier of the snapshot template including the tag. Uses + namespace/alias when a name was provided (e.g. team-slug/my-snapshot:default), + otherwise falls back to the raw template ID (e.g. abc123:default). + names: + type: array + items: + type: string + description: Full names of the snapshot template including team namespace + and tag (e.g. team-slug/my-snapshot:v2) + Mcp: + type: object + description: MCP configuration for the sandbox + additionalProperties: {} + nullable: true + SandboxNetworkConfig: + type: object + properties: + allowPublicTraffic: + type: boolean + default: true + description: Specify if the sandbox URLs should be accessible only with + authentication. + allowOut: + type: array + description: List of allowed CIDR blocks or IP addresses for egress traffic. + Allowed addresses always take precedence over blocked addresses. + items: + type: string + denyOut: + type: array + description: List of denied CIDR blocks or IP addresses for egress traffic + items: + type: string + maskRequestHost: + type: string + description: Specify host mask which will be used for all sandbox requests + SandboxAutoResumePolicy: + type: string + description: Auto-resume policy for paused sandboxes. Default is off. + default: 'off' + enum: + - any + - 'off' + SandboxAutoResumeConfig: + type: object + description: Auto-resume configuration for paused sandboxes. Default is off. + required: + - policy + properties: + policy: + $ref: '#/components/schemas/SandboxAutoResumePolicy' + SandboxLog: + description: Log entry with timestamp and line + required: + - timestamp + - line + properties: + timestamp: + type: string + format: date-time + description: Timestamp of the log entry + line: + type: string + description: Log line content + SandboxLogEntry: + required: + - timestamp + - level + - message + - fields + properties: + timestamp: + type: string + format: date-time + description: Timestamp of the log entry + message: + type: string + description: Log message content + level: + $ref: '#/components/schemas/LogLevel' + fields: + type: object + additionalProperties: + type: string + SandboxLogs: + required: + - logs + - logEntries + properties: + logs: + description: Logs of the sandbox + type: array + items: + $ref: '#/components/schemas/SandboxLog' + logEntries: + description: Structured logs of the sandbox + type: array + items: + $ref: '#/components/schemas/SandboxLogEntry' + SandboxMetric: + description: Metric entry with timestamp and line + required: + - timestamp + - timestampUnix + - cpuCount + - cpuUsedPct + - memUsed + - memTotal + - diskUsed + - diskTotal + properties: + timestamp: + type: string + format: date-time + deprecated: true + description: Timestamp of the metric entry + timestampUnix: + type: integer + format: int64 + description: Timestamp of the metric entry in Unix time (seconds since epoch) + cpuCount: + type: integer + format: int32 + description: Number of CPU cores + cpuUsedPct: + type: number + format: float + description: CPU usage percentage + memUsed: + type: integer + format: int64 + description: Memory used in bytes + memTotal: + type: integer + format: int64 + description: Total memory in bytes + diskUsed: + type: integer + format: int64 + description: Disk used in bytes + diskTotal: + type: integer + format: int64 + description: Total disk space in bytes + SandboxVolumeMount: + type: object + properties: + name: + type: string + description: Name of the volume + path: + type: string + description: Path of the volume + required: + - name + - path + Sandbox: + required: + - templateID + - sandboxID + - clientID + - envdVersion + properties: + templateID: + type: string + description: Identifier of the template from which is the sandbox created + sandboxID: + type: string + description: Identifier of the sandbox + alias: + type: string + description: Alias of the template + clientID: + type: string + deprecated: true + description: Identifier of the client + envdVersion: + $ref: '#/components/schemas/EnvdVersion' + envdAccessToken: + type: string + description: Access token used for envd communication + trafficAccessToken: + type: string + nullable: true + description: Token required for accessing sandbox via proxy. + domain: + type: string + nullable: true + description: Base domain where the sandbox traffic is accessible + SandboxDetail: + required: + - templateID + - sandboxID + - clientID + - startedAt + - cpuCount + - memoryMB + - diskSizeMB + - endAt + - state + - envdVersion + - volumeMounts + properties: + templateID: + type: string + description: Identifier of the template from which is the sandbox created + alias: + type: string + description: Alias of the template + sandboxID: + type: string + description: Identifier of the sandbox + clientID: + type: string + deprecated: true + description: Identifier of the client + startedAt: + type: string + format: date-time + description: Time when the sandbox was started + endAt: + type: string + format: date-time + description: Time when the sandbox will expire + envdVersion: + $ref: '#/components/schemas/EnvdVersion' + envdAccessToken: + type: string + description: Access token used for envd communication + domain: + type: string + nullable: true + description: Base domain where the sandbox traffic is accessible + cpuCount: + $ref: '#/components/schemas/CPUCount' + memoryMB: + $ref: '#/components/schemas/MemoryMB' + diskSizeMB: + $ref: '#/components/schemas/DiskSizeMB' + metadata: + $ref: '#/components/schemas/SandboxMetadata' + state: + $ref: '#/components/schemas/SandboxState' + volumeMounts: + type: array + items: + $ref: '#/components/schemas/SandboxVolumeMount' + ListedSandbox: + required: + - templateID + - sandboxID + - clientID + - startedAt + - cpuCount + - memoryMB + - diskSizeMB + - endAt + - state + - envdVersion + - volumeMounts + properties: + templateID: + type: string + description: Identifier of the template from which is the sandbox created + alias: + type: string + description: Alias of the template + sandboxID: + type: string + description: Identifier of the sandbox + clientID: + type: string + deprecated: true + description: Identifier of the client + startedAt: + type: string + format: date-time + description: Time when the sandbox was started + endAt: + type: string + format: date-time + description: Time when the sandbox will expire + cpuCount: + $ref: '#/components/schemas/CPUCount' + memoryMB: + $ref: '#/components/schemas/MemoryMB' + diskSizeMB: + $ref: '#/components/schemas/DiskSizeMB' + metadata: + $ref: '#/components/schemas/SandboxMetadata' + state: + $ref: '#/components/schemas/SandboxState' + envdVersion: + $ref: '#/components/schemas/EnvdVersion' + volumeMounts: + type: array + items: + $ref: '#/components/schemas/SandboxVolumeMount' + SandboxesWithMetrics: + required: + - sandboxes + properties: + sandboxes: + additionalProperties: + $ref: '#/components/schemas/SandboxMetric' + NewSandbox: + required: + - templateID + properties: + templateID: + type: string + description: Identifier of the required template + timeout: + type: integer + format: int32 + minimum: 0 + default: 15 + description: Time to live for the sandbox in seconds. + autoPause: + type: boolean + default: false + description: Automatically pauses the sandbox after the timeout + autoResume: + $ref: '#/components/schemas/SandboxAutoResumeConfig' + secure: + type: boolean + description: Secure all system communication with sandbox + allow_internet_access: + type: boolean + description: Allow sandbox to access the internet. When set to false, it + behaves the same as specifying denyOut to 0.0.0.0/0 in the network config. + network: + $ref: '#/components/schemas/SandboxNetworkConfig' + metadata: + $ref: '#/components/schemas/SandboxMetadata' + envVars: + $ref: '#/components/schemas/EnvVars' + mcp: + $ref: '#/components/schemas/Mcp' + volumeMounts: + type: array + items: + $ref: '#/components/schemas/SandboxVolumeMount' + ResumedSandbox: + properties: + timeout: + type: integer + format: int32 + minimum: 0 + default: 15 + description: Time to live for the sandbox in seconds. + autoPause: + type: boolean + deprecated: true + description: Automatically pauses the sandbox after the timeout + ConnectSandbox: + type: object + required: + - timeout + properties: + timeout: + description: Timeout in seconds from the current time after which the sandbox + should expire + type: integer + format: int32 + minimum: 0 + TeamMetric: + description: Team metric with timestamp + required: + - timestamp + - timestampUnix + - concurrentSandboxes + - sandboxStartRate + properties: + timestamp: + type: string + format: date-time + deprecated: true + description: Timestamp of the metric entry + timestampUnix: + type: integer + format: int64 + description: Timestamp of the metric entry in Unix time (seconds since epoch) + concurrentSandboxes: + type: integer + format: int32 + description: The number of concurrent sandboxes for the team + sandboxStartRate: + type: number + format: float + description: Number of sandboxes started per second + MaxTeamMetric: + description: Team metric with timestamp + required: + - timestamp + - timestampUnix + - value + properties: + timestamp: + type: string + format: date-time + deprecated: true + description: Timestamp of the metric entry + timestampUnix: + type: integer + format: int64 + description: Timestamp of the metric entry in Unix time (seconds since epoch) + value: + type: number + description: The maximum value of the requested metric in the given interval + Template: + required: + - templateID + - buildID + - cpuCount + - memoryMB + - diskSizeMB + - public + - createdAt + - updatedAt + - createdBy + - lastSpawnedAt + - spawnCount + - buildCount + - envdVersion + - aliases + - names + - buildStatus + properties: + templateID: + type: string + description: Identifier of the template + buildID: + type: string + description: Identifier of the last successful build for given template + cpuCount: + $ref: '#/components/schemas/CPUCount' + memoryMB: + $ref: '#/components/schemas/MemoryMB' + diskSizeMB: + $ref: '#/components/schemas/DiskSizeMB' + public: + type: boolean + description: Whether the template is public or only accessible by the team + aliases: + type: array + description: Aliases of the template + deprecated: true + items: + type: string + names: + type: array + description: Names of the template (namespace/alias format when namespaced) + items: + type: string + createdAt: + type: string + format: date-time + description: Time when the template was created + updatedAt: + type: string + format: date-time + description: Time when the template was last updated + createdBy: + allOf: + - $ref: '#/components/schemas/TeamUser' + nullable: true + lastSpawnedAt: + type: string + nullable: true + format: date-time + description: Time when the template was last used + spawnCount: + type: integer + format: int64 + description: Number of times the template was used + buildCount: + type: integer + format: int32 + description: Number of times the template was built + envdVersion: + $ref: '#/components/schemas/EnvdVersion' + buildStatus: + $ref: '#/components/schemas/TemplateBuildStatus' + TemplateRequestResponseV3: + required: + - templateID + - buildID + - public + - aliases + - names + - tags + properties: + templateID: + type: string + description: Identifier of the template + buildID: + type: string + description: Identifier of the last successful build for given template + public: + type: boolean + description: Whether the template is public or only accessible by the team + names: + type: array + description: Names of the template + items: + type: string + tags: + type: array + description: Tags assigned to the template build + items: + type: string + aliases: + type: array + description: Aliases of the template + deprecated: true + items: + type: string + TemplateLegacy: + required: + - templateID + - buildID + - cpuCount + - memoryMB + - diskSizeMB + - public + - createdAt + - updatedAt + - createdBy + - lastSpawnedAt + - spawnCount + - buildCount + - envdVersion + - aliases + properties: + templateID: + type: string + description: Identifier of the template + buildID: + type: string + description: Identifier of the last successful build for given template + cpuCount: + $ref: '#/components/schemas/CPUCount' + memoryMB: + $ref: '#/components/schemas/MemoryMB' + diskSizeMB: + $ref: '#/components/schemas/DiskSizeMB' + public: + type: boolean + description: Whether the template is public or only accessible by the team + aliases: + type: array + description: Aliases of the template + items: + type: string + createdAt: + type: string + format: date-time + description: Time when the template was created + updatedAt: + type: string + format: date-time + description: Time when the template was last updated + createdBy: + allOf: + - $ref: '#/components/schemas/TeamUser' + nullable: true + lastSpawnedAt: + type: string + nullable: true + format: date-time + description: Time when the template was last used + spawnCount: + type: integer + format: int64 + description: Number of times the template was used + buildCount: + type: integer + format: int32 + description: Number of times the template was built + envdVersion: + $ref: '#/components/schemas/EnvdVersion' + TemplateBuild: + required: + - buildID + - status + - createdAt + - updatedAt + - cpuCount + - memoryMB + properties: + buildID: + type: string + format: uuid + description: Identifier of the build + status: + $ref: '#/components/schemas/TemplateBuildStatus' + createdAt: + type: string + format: date-time + description: Time when the build was created + updatedAt: + type: string + format: date-time + description: Time when the build was last updated + finishedAt: + type: string + format: date-time + description: Time when the build was finished + cpuCount: + $ref: '#/components/schemas/CPUCount' + memoryMB: + $ref: '#/components/schemas/MemoryMB' + diskSizeMB: + $ref: '#/components/schemas/DiskSizeMB' + envdVersion: + $ref: '#/components/schemas/EnvdVersion' + TemplateWithBuilds: + required: + - templateID + - public + - aliases + - names + - createdAt + - updatedAt + - lastSpawnedAt + - spawnCount + - builds + properties: + templateID: + type: string + description: Identifier of the template + public: + type: boolean + description: Whether the template is public or only accessible by the team + aliases: + type: array + description: Aliases of the template + deprecated: true + items: + type: string + names: + type: array + description: Names of the template (namespace/alias format when namespaced) + items: + type: string + createdAt: + type: string + format: date-time + description: Time when the template was created + updatedAt: + type: string + format: date-time + description: Time when the template was last updated + lastSpawnedAt: + type: string + nullable: true + format: date-time + description: Time when the template was last used + spawnCount: + type: integer + format: int64 + description: Number of times the template was used + builds: + type: array + description: List of builds for the template + items: + $ref: '#/components/schemas/TemplateBuild' + TemplateAliasResponse: + required: + - templateID + - public + properties: + templateID: + type: string + description: Identifier of the template + public: + type: boolean + description: Whether the template is public or only accessible by the team + TemplateBuildRequest: + required: + - dockerfile + properties: + alias: + description: Alias of the template + type: string + dockerfile: + description: Dockerfile for the template + type: string + teamID: + type: string + description: Identifier of the team + startCmd: + description: Start command to execute in the template after the build + type: string + readyCmd: + description: Ready check command to execute in the template after the build + type: string + cpuCount: + $ref: '#/components/schemas/CPUCount' + memoryMB: + $ref: '#/components/schemas/MemoryMB' + TemplateStep: + description: Step in the template build process + required: + - type + properties: + type: + type: string + description: Type of the step + args: + default: [] + type: array + description: Arguments for the step + items: + type: string + filesHash: + type: string + description: Hash of the files used in the step + force: + default: false + type: boolean + description: Whether the step should be forced to run regardless of the + cache + TemplateBuildRequestV3: + properties: + name: + description: Name of the template. Can include a tag with colon separator + (e.g. "my-template" or "my-template:v1"). If tag is included, it will + be treated as if the tag was provided in the tags array. + type: string + tags: + type: array + description: Tags to assign to the template build + items: + type: string + alias: + description: Alias of the template. Deprecated, use name instead. + type: string + deprecated: true + teamID: + deprecated: true + type: string + description: Identifier of the team + cpuCount: + $ref: '#/components/schemas/CPUCount' + memoryMB: + $ref: '#/components/schemas/MemoryMB' + TemplateBuildRequestV2: + required: + - alias + properties: + alias: + description: Alias of the template + type: string + teamID: + deprecated: true + type: string + description: Identifier of the team + cpuCount: + $ref: '#/components/schemas/CPUCount' + memoryMB: + $ref: '#/components/schemas/MemoryMB' + FromImageRegistry: + oneOf: + - $ref: '#/components/schemas/AWSRegistry' + - $ref: '#/components/schemas/GCPRegistry' + - $ref: '#/components/schemas/GeneralRegistry' + discriminator: + propertyName: type + mapping: + aws: '#/components/schemas/AWSRegistry' + gcp: '#/components/schemas/GCPRegistry' + registry: '#/components/schemas/GeneralRegistry' + AWSRegistry: + type: object + required: + - type + - awsAccessKeyId + - awsSecretAccessKey + - awsRegion + properties: + type: + type: string + enum: + - aws + description: Type of registry authentication + awsAccessKeyId: + type: string + description: AWS Access Key ID for ECR authentication + awsSecretAccessKey: + type: string + description: AWS Secret Access Key for ECR authentication + awsRegion: + type: string + description: AWS Region where the ECR registry is located + GCPRegistry: + type: object + required: + - type + - serviceAccountJson + properties: + type: + type: string + enum: + - gcp + description: Type of registry authentication + serviceAccountJson: + type: string + description: Service Account JSON for GCP authentication + GeneralRegistry: + type: object + required: + - type + - username + - password + properties: + type: + type: string + enum: + - registry + description: Type of registry authentication + username: + type: string + description: Username to use for the registry + password: + type: string + description: Password to use for the registry + TemplateBuildStartV2: + type: object + properties: + fromImage: + type: string + description: Image to use as a base for the template build + fromTemplate: + type: string + description: Template to use as a base for the template build + fromImageRegistry: + $ref: '#/components/schemas/FromImageRegistry' + force: + default: false + type: boolean + description: Whether the whole build should be forced to run regardless + of the cache + steps: + default: [] + description: List of steps to execute in the template build + type: array + items: + $ref: '#/components/schemas/TemplateStep' + startCmd: + description: Start command to execute in the template after the build + type: string + readyCmd: + description: Ready check command to execute in the template after the build + type: string + TemplateBuildFileUpload: + required: + - present + properties: + present: + type: boolean + description: Whether the file is already present in the cache + url: + description: Url where the file should be uploaded to + type: string + LogLevel: + type: string + description: State of the sandbox + enum: + - debug + - info + - warn + - error + BuildLogEntry: + required: + - timestamp + - message + - level + properties: + timestamp: + type: string + format: date-time + description: Timestamp of the log entry + message: + type: string + description: Log message content + level: + $ref: '#/components/schemas/LogLevel' + step: + type: string + description: Step in the build process related to the log entry + BuildStatusReason: + required: + - message + properties: + message: + type: string + description: Message with the status reason, currently reporting only for + error status + step: + type: string + description: Step that failed + logEntries: + default: [] + description: Log entries related to the status reason + type: array + items: + $ref: '#/components/schemas/BuildLogEntry' + TemplateBuildStatus: + type: string + description: Status of the template build + enum: + - building + - waiting + - ready + - error + TemplateBuildInfo: + required: + - templateID + - buildID + - status + - logs + - logEntries + properties: + logs: + default: [] + description: Build logs + type: array + items: + type: string + logEntries: + default: [] + description: Build logs structured + type: array + items: + $ref: '#/components/schemas/BuildLogEntry' + templateID: + type: string + description: Identifier of the template + buildID: + type: string + description: Identifier of the build + status: + $ref: '#/components/schemas/TemplateBuildStatus' + reason: + $ref: '#/components/schemas/BuildStatusReason' + TemplateBuildLogsResponse: + required: + - logs + properties: + logs: + default: [] + description: Build logs structured + type: array + items: + $ref: '#/components/schemas/BuildLogEntry' + LogsDirection: + type: string + description: Direction of the logs that should be returned + enum: + - forward + - backward + x-enum-varnames: + - LogsDirectionForward + - LogsDirectionBackward + LogsSource: + type: string + description: Source of the logs that should be returned + enum: + - temporary + - persistent + x-enum-varnames: + - LogsSourceTemporary + - LogsSourcePersistent + AssignedTemplateTags: + required: + - tags + - buildID + properties: + tags: + type: array + items: + type: string + description: Assigned tags of the template + buildID: + type: string + format: uuid + description: Identifier of the build associated with these tags + TemplateTag: + required: + - tag + - buildID + - createdAt + properties: + tag: + type: string + description: The tag name + buildID: + type: string + format: uuid + description: Identifier of the build associated with this tag + createdAt: + type: string + format: date-time + description: Time when the tag was assigned + AssignTemplateTagsRequest: + required: + - target + - tags + properties: + target: + type: string + description: Target template in "name:tag" format + tags: + description: Tags to assign to the template + type: array + items: + type: string + DeleteTemplateTagsRequest: + required: + - name + - tags + properties: + name: + type: string + description: Name of the template + tags: + description: Tags to delete + type: array + items: + type: string + Volume: + type: object + properties: + volumeID: + type: string + description: ID of the volume + name: + type: string + description: Name of the volume + required: + - volumeID + - name + NewVolume: + type: object + properties: + name: + type: string + description: Name of the volume + pattern: ^[a-zA-Z0-9_-]+$ + required: + - name +tags: +- name: files +- name: filesystem.Filesystem +- name: process.Process +- name: templates +- name: sandboxes +- name: auth +- name: access-tokens +- name: api-keys +- name: tags +- name: volumes +security: [] From b4058d4d5118ccb911b60ff002b96961beeeb411 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Mon, 23 Feb 2026 17:22:28 +0100 Subject: [PATCH 02/37] Add OpenAPI generation and validation scripts - scripts/envd.py: Merges proto-generated and hand-written specs into a single openapi-public.yml - scripts/validate_api_reference.py: Validates the spec against the live API across 12 phases --- scripts/envd.py | 758 +++++++++ scripts/validate_api_reference.py | 2376 +++++++++++++++++++++++++++++ 2 files changed, 3134 insertions(+) create mode 100755 scripts/envd.py create mode 100755 scripts/validate_api_reference.py diff --git a/scripts/envd.py b/scripts/envd.py new file mode 100755 index 00000000..f55c104d --- /dev/null +++ b/scripts/envd.py @@ -0,0 +1,758 @@ +#!/usr/bin/env python3 +"""Generate a merged OpenAPI spec for the full E2B developer-facing API. + +Combines multiple sources into a single e2b-openapi.yml: + + Sandbox API (served on -.e2b.app): + - Proto-generated OpenAPI for process/filesystem Connect RPC + - Hand-written REST spec (packages/envd/spec/envd.yaml) + - Auto-generated stubs for streaming RPCs (parsed from .proto files) + + Platform API (served on api.e2b.app): + - Main E2B API spec (spec/openapi.yml) + +Usage: + python3 scripts/envd.py + +Outputs e2b-openapi.yml in the current working directory. +Requires: Docker, PyYAML (pip install pyyaml). +""" + +from __future__ import annotations + +import os +import re +import subprocess +import sys +import tempfile +from dataclasses import dataclass +from glob import glob +from typing import Any + +import yaml + +# --------------------------------------------------------------------------- +# Configuration +# --------------------------------------------------------------------------- + +SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) +REPO_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, "..")) + +# Sandbox (envd) specs +ENVD_SPEC_DIR = os.path.join(REPO_ROOT, "packages/envd/spec") +ENVD_REST_SPEC = os.path.join(ENVD_SPEC_DIR, "envd.yaml") + +# Platform API specs +API_SPEC = os.path.join(REPO_ROOT, "spec/openapi.yml") + +DOCKER_IMAGE = "protoc-gen-connect-openapi" + +DOCKERFILE = """\ +FROM golang:1.25-alpine +RUN apk add --no-cache git +RUN go install github.com/bufbuild/buf/cmd/buf@v1.50.0 +RUN go install github.com/sudorandom/protoc-gen-connect-openapi@v0.25.3 +ENV PATH="/go/bin:${PATH}" +""" + +BUF_GEN_YAML = """\ +version: v1 +plugins: + - plugin: connect-openapi + out: /output + opt: + - format=yaml +""" + +# Server definitions for the two API surfaces +SANDBOX_SERVER = { + "url": "https://{port}-{sandboxID}.e2b.app", + "description": "Sandbox API (envd) — runs inside each sandbox", + "variables": { + "port": {"default": "49983", "description": "Port number"}, + "sandboxID": {"default": "{sandbox-id}", "description": "Sandbox identifier"}, + }, +} + +PLATFORM_SERVER = { + "url": "https://api.e2b.app", + "description": "E2B Platform API", +} + +# Tag used to mark sandbox-specific paths so we can attach the right server +SANDBOX_TAG = "x-e2b-server" + +# Security scheme name for envd endpoints (must not collide with platform's AccessTokenAuth) +SANDBOX_AUTH_SCHEME = "SandboxAccessTokenAuth" + +# --------------------------------------------------------------------------- +# Proto parsing — auto-detect streaming RPCs +# --------------------------------------------------------------------------- + +@dataclass +class RpcMethod: + """An RPC method parsed from a .proto file.""" + + package: str + service: str + method: str + request_type: str + response_type: str + client_streaming: bool + server_streaming: bool + comment: str + + @property + def path(self) -> str: + return f"/{self.package}.{self.service}/{self.method}" + + @property + def tag(self) -> str: + return f"{self.package}.{self.service}" + + @property + def operation_id(self) -> str: + return f"{self.package}.{self.service}.{self.method}" + + @property + def request_schema_ref(self) -> str: + return f"#/components/schemas/{self.package}.{self.request_type}" + + @property + def response_schema_ref(self) -> str: + return f"#/components/schemas/{self.package}.{self.response_type}" + + @property + def is_streaming(self) -> bool: + return self.client_streaming or self.server_streaming + + @property + def streaming_label(self) -> str: + if self.client_streaming and self.server_streaming: + return "Bidirectional-streaming" + if self.client_streaming: + return "Client-streaming" + if self.server_streaming: + return "Server-streaming" + return "Unary" + + +_PACKAGE_RE = re.compile(r"^package\s+(\w+)\s*;", re.MULTILINE) +_SERVICE_RE = re.compile(r"service\s+(\w+)\s*\{", re.MULTILINE) +_RPC_RE = re.compile( + r"rpc\s+(\w+)\s*\(\s*(stream\s+)?(\w+)\s*\)\s*returns\s*\(\s*(stream\s+)?(\w+)\s*\)" +) + + +def parse_proto_file(path: str) -> list[RpcMethod]: + """Parse a .proto file and return all RPC methods found.""" + with open(path) as f: + content = f.read() + + pkg_match = _PACKAGE_RE.search(content) + if not pkg_match: + return [] + package = pkg_match.group(1) + + methods: list[RpcMethod] = [] + + for svc_match in _SERVICE_RE.finditer(content): + service_name = svc_match.group(1) + brace_start = content.index("{", svc_match.start()) + depth, pos = 1, brace_start + 1 + while depth > 0 and pos < len(content): + if content[pos] == "{": + depth += 1 + elif content[pos] == "}": + depth -= 1 + pos += 1 + service_body = content[brace_start:pos] + + for rpc_match in _RPC_RE.finditer(service_body): + rpc_start = service_body.rfind("\n", 0, rpc_match.start()) + comment = _extract_comment(service_body, rpc_start) + + methods.append(RpcMethod( + package=package, + service=service_name, + method=rpc_match.group(1), + request_type=rpc_match.group(3), + response_type=rpc_match.group(5), + client_streaming=bool(rpc_match.group(2)), + server_streaming=bool(rpc_match.group(4)), + comment=comment, + )) + + return methods + + +def _extract_comment(text: str, before_pos: int) -> str: + """Extract // comment lines immediately above a position in text.""" + lines = text[:before_pos].rstrip().split("\n") + comment_lines: list[str] = [] + for line in reversed(lines): + stripped = line.strip() + if stripped.startswith("//"): + comment_lines.append(stripped.lstrip("/ ")) + elif stripped == "": + continue + else: + break + comment_lines.reverse() + return " ".join(comment_lines) + + +def find_streaming_rpcs(spec_dir: str) -> list[RpcMethod]: + """Scan all .proto files under spec_dir and return streaming RPCs.""" + streaming: list[RpcMethod] = [] + for proto_path in sorted(glob(os.path.join(spec_dir, "**/*.proto"), recursive=True)): + for rpc in parse_proto_file(proto_path): + if rpc.is_streaming: + streaming.append(rpc) + return streaming + + +def build_streaming_path(rpc: RpcMethod) -> dict[str, Any]: + """Build an OpenAPI path item for a streaming RPC.""" + description = ( + f"{rpc.streaming_label} RPC. " + f"{rpc.comment + '. ' if rpc.comment else ''}" + f"Use the Connect protocol with streaming support." + ) + return { + "post": { + "tags": [rpc.tag], + "summary": rpc.method, + "description": description, + "operationId": rpc.operation_id, + "requestBody": { + "content": { + "application/json": { + "schema": {"$ref": rpc.request_schema_ref} + } + }, + "required": True, + }, + "responses": { + "200": { + "description": f"Stream of {rpc.response_type} events", + "content": { + "application/json": { + "schema": {"$ref": rpc.response_schema_ref} + } + }, + }, + }, + } + } + + +# --------------------------------------------------------------------------- +# Docker build & proto generation +# --------------------------------------------------------------------------- + +def docker_build_image() -> None: + """Build the Docker image with buf + protoc-gen-connect-openapi.""" + print("==> Building Docker image") + with tempfile.NamedTemporaryFile(mode="w", suffix=".Dockerfile", delete=False) as f: + f.write(DOCKERFILE) + dockerfile_path = f.name + try: + subprocess.run( + ["docker", "build", "-t", DOCKER_IMAGE, "-f", dockerfile_path, "."], + check=True, + cwd=REPO_ROOT, + ) + finally: + os.unlink(dockerfile_path) + + +def docker_generate_specs() -> list[str]: + """Run buf generate inside Docker, return list of generated YAML strings.""" + print("==> Generating OpenAPI specs from proto files") + with tempfile.TemporaryDirectory() as tmpdir: + buf_gen_path = os.path.join(tmpdir, "buf.gen.yaml") + with open(buf_gen_path, "w") as f: + f.write(BUF_GEN_YAML) + + output_dir = os.path.join(tmpdir, "output") + os.makedirs(output_dir) + + subprocess.run( + [ + "docker", "run", "--rm", + "-v", f"{ENVD_SPEC_DIR}:/spec:ro", + "-v", f"{buf_gen_path}:/config/buf.gen.yaml:ro", + "-v", f"{output_dir}:/output", + DOCKER_IMAGE, + "sh", "-c", + "cd /spec && buf generate --template /config/buf.gen.yaml", + ], + check=True, + ) + + generated: list[str] = [] + for root, _, files in os.walk(output_dir): + for name in sorted(files): + if name.endswith((".yaml", ".yml")): + path = os.path.join(root, name) + rel = os.path.relpath(path, output_dir) + print(f" Generated: {rel}") + with open(path) as f: + generated.append(f.read()) + + if not generated: + print("ERROR: No files were generated", file=sys.stderr) + sys.exit(1) + + return generated + + +# --------------------------------------------------------------------------- +# OpenAPI merging & post-processing +# --------------------------------------------------------------------------- + +def load_yaml_file(path: str) -> str: + """Load a YAML file and return its raw content.""" + print(f"==> Loading spec: {os.path.relpath(path, REPO_ROOT)}") + with open(path) as f: + return f.read() + + +def merge_specs(raw_docs: list[str], protected_paths: set[str] | None = None) -> dict[str, Any]: + """Merge multiple raw YAML OpenAPI docs into a single spec. + + Args: + raw_docs: Raw YAML strings to merge (order matters — later docs + overwrite earlier ones for paths and component entries). + protected_paths: Paths that should not be overwritten once set. + Used to prevent the platform API from overwriting + envd paths that share the same name (e.g. /health). + """ + merged: dict[str, Any] = { + "openapi": "3.1.0", + "info": { + "title": "E2B API", + "version": "0.1.0", + "description": ( + "Complete E2B developer API. " + "Platform endpoints are served on api.e2b.app. " + "Sandbox endpoints (envd) are served on {port}-{sandboxID}.e2b.app." + ), + }, + "servers": [PLATFORM_SERVER], + "paths": {}, + "components": {}, + } + + for raw in raw_docs: + doc = yaml.safe_load(raw) + if not doc: + continue + + for path, methods in doc.get("paths", {}).items(): + if protected_paths and path in protected_paths and path in merged["paths"]: + continue + merged["paths"][path] = methods + + for section, entries in doc.get("components", {}).items(): + if isinstance(entries, dict): + merged["components"].setdefault(section, {}).update(entries) + + if "tags" in doc: + merged.setdefault("tags", []).extend(doc["tags"]) + + if "security" in doc: + existing = merged.setdefault("security", []) + for entry in doc["security"]: + if entry not in existing: + existing.append(entry) + + return merged + + +def tag_paths_with_server( + spec: dict[str, Any], + paths: set[str], + server: dict[str, Any], +) -> None: + """Attach a specific server override to a set of paths. + + OpenAPI 3.1 allows per-path server overrides so clients know which + base URL to use for each endpoint. + """ + for path, path_item in spec["paths"].items(): + if path in paths: + path_item["servers"] = [server] + + +def fill_streaming_endpoints(spec: dict[str, Any], streaming_rpcs: list[RpcMethod]) -> None: + """Replace empty {} streaming path items with proper OpenAPI definitions. + + protoc-gen-connect-openapi emits {} for streaming RPCs because OpenAPI + has no native streaming representation. We detect these from the proto + files and fill them in with proper request/response schemas. + """ + for rpc in streaming_rpcs: + if rpc.path in spec["paths"]: + print(f" Filling streaming endpoint: {rpc.path} ({rpc.streaming_label})") + spec["paths"][rpc.path] = build_streaming_path(rpc) + + +def apply_sandbox_auth(spec: dict[str, Any], envd_paths: set[str]) -> None: + """Ensure all envd/sandbox endpoints declare the SandboxAccessTokenAuth security. + + The hand-written envd.yaml already has security declarations, but the + proto-generated Connect RPC endpoints don't. Add optional auth + (SandboxAccessTokenAuth or anonymous) to any envd endpoint missing it. + """ + auth_security = [{SANDBOX_AUTH_SCHEME: []}, {}] + for path in envd_paths: + path_item = spec["paths"].get(path) + if not path_item: + continue + for method in ("get", "post", "put", "patch", "delete"): + op = path_item.get(method) + if op and "security" not in op: + op["security"] = auth_security + + +def fix_security_schemes(spec: dict[str, Any]) -> None: + """Fix invalid apiKey securityScheme syntax. + + The source envd.yaml uses `scheme: header` which is wrong for + type: apiKey — OpenAPI requires `in: header` instead. + """ + for scheme in spec.get("components", {}).get("securitySchemes", {}).values(): + if scheme.get("type") == "apiKey" and "scheme" in scheme: + scheme["in"] = scheme.pop("scheme") + + +def rename_envd_auth_scheme(spec: dict[str, Any]) -> None: + """Rename AccessTokenAuth → SandboxAccessTokenAuth in the merged spec. + + The source envd.yaml uses AccessTokenAuth for code generation compatibility, + but the public docs need SandboxAccessTokenAuth to avoid collisions with + the platform API's AccessTokenAuth scheme. + """ + old_name = "AccessTokenAuth" + new_name = SANDBOX_AUTH_SCHEME + schemes = spec.get("components", {}).get("securitySchemes", {}) + if old_name in schemes: + schemes[new_name] = schemes.pop(old_name) + # Update all security references in operations + for path_item in spec.get("paths", {}).values(): + for method in ("get", "post", "put", "patch", "delete", "head", "options"): + op = path_item.get(method) + if not op or "security" not in op: + continue + for sec_req in op["security"]: + if old_name in sec_req: + sec_req[new_name] = sec_req.pop(old_name) + # Update top-level security + for sec_req in spec.get("security", []): + if old_name in sec_req: + sec_req[new_name] = sec_req.pop(old_name) + + +# Mapping of (path, method) to desired operationId for the public docs. +# These are added at post-processing time to avoid breaking Go code generation +# (oapi-codegen derives type names from operationIds). +ENVD_OPERATION_IDS: dict[tuple[str, str], str] = { + ("/health", "get"): "getHealth", + ("/metrics", "get"): "getMetrics", + ("/init", "post"): "initSandbox", + ("/envs", "get"): "getEnvVars", + ("/files", "get"): "downloadFile", + ("/files", "post"): "uploadFile", +} + + +def add_operation_ids(spec: dict[str, Any]) -> None: + """Add operationIds to envd endpoints for clean documentation. + + These are added at post-processing time (not in the source spec) to + avoid changing generated Go type names. + """ + count = 0 + for (path, method), op_id in ENVD_OPERATION_IDS.items(): + path_item = spec.get("paths", {}).get(path) + if not path_item: + continue + op = path_item.get(method) + if op and "operationId" not in op: + op["operationId"] = op_id + count += 1 + if count: + print(f"==> Added {count} operationIds to envd endpoints") + + +def _strip_supabase_security(path_item: dict[str, Any]) -> None: + """Remove Supabase security entries from all operations in a path item. + + Each operation's security list is an OR of auth options. We remove + any option that references a Supabase scheme, keeping the rest. + """ + for method in ("get", "post", "put", "patch", "delete", "head", "options"): + op = path_item.get(method) + if not op or "security" not in op: + continue + op["security"] = [ + sec_req for sec_req in op["security"] + if not any("supabase" in key.lower() for key in sec_req) + ] + + +def _has_admin_token_security(path_item: dict[str, Any]) -> bool: + """Check if any operation in a path item references AdminToken auth.""" + for method in ("get", "post", "put", "patch", "delete", "head", "options"): + op = path_item.get(method) + if not op: + continue + for sec_req in op.get("security", []): + if any("admin" in key.lower() for key in sec_req): + return True + return False + + +def filter_paths(spec: dict[str, Any]) -> None: + """Clean up paths that should not appear in the public spec. + + - Removes access-token and api-key endpoints + - Removes endpoints using AdminToken auth + - Strips Supabase auth entries from all operations + - Removes Supabase and AdminToken securityScheme definitions + """ + # Remove excluded paths + excluded_prefixes = ("/access-tokens", "/api-keys") + excluded_exact = {"/v2/sandboxes/{sandboxID}/logs", "/init"} + to_remove = [ + p for p in spec["paths"] + if p.startswith(excluded_prefixes) or p in excluded_exact + ] + + # Remove admin-only paths + for path, path_item in spec["paths"].items(): + if path not in to_remove and _has_admin_token_security(path_item): + to_remove.append(path) + + for path in to_remove: + del spec["paths"][path] + if to_remove: + print(f"==> Removed {len(to_remove)} paths (volumes + admin)") + + # Strip supabase security entries from all operations + for path_item in spec["paths"].values(): + _strip_supabase_security(path_item) + + # Remove supabase and admin security scheme definitions + schemes = spec.get("components", {}).get("securitySchemes", {}) + remove_keys = [k for k in schemes if "supabase" in k.lower() or "admin" in k.lower()] + for key in remove_keys: + del schemes[key] + if remove_keys: + print(f"==> Removed {len(remove_keys)} internal security schemes") + + +def remove_orphaned_schemas(spec: dict[str, Any]) -> None: + """Remove component schemas that are not referenced anywhere in the spec. + Runs iteratively since removing schemas may orphan others.""" + all_orphaned: list[str] = [] + + while True: + spec_text = "" + # Serialize paths + top-level refs (excluding components.schemas itself) + for section in ("paths", "security"): + if section in spec: + spec_text += yaml.dump(spec[section], default_flow_style=False) + for section, entries in spec.get("components", {}).items(): + if section != "schemas": + spec_text += yaml.dump(entries, default_flow_style=False) + # Also check cross-references within schemas + schemas = spec.get("components", {}).get("schemas", {}) + schema_text = yaml.dump(schemas, default_flow_style=False) + + orphaned = [] + for name in list(schemas.keys()): + # Use exact ref pattern to avoid substring collisions + # (e.g. "schemas/Foo" matching inside "schemas/FooBar") + ref_pattern = f"schemas/{name}'" + # Referenced from paths/responses/params + if ref_pattern in spec_text: + continue + # Referenced from other schemas (exclude self-definition) + used = False + for other_name, other_schema in schemas.items(): + if other_name == name: + continue + if ref_pattern in yaml.dump(other_schema, default_flow_style=False): + used = True + break + if not used: + orphaned.append(name) + + if not orphaned: + break + + for name in orphaned: + del schemas[name] + all_orphaned.extend(orphaned) + + if all_orphaned: + print(f"==> Removed {len(all_orphaned)} orphaned schemas: {', '.join(sorted(all_orphaned))}") + + +SANDBOX_NOT_FOUND_RESPONSE = { + "description": "Sandbox not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "required": ["sandboxId", "message", "code"], + "properties": { + "sandboxId": { + "type": "string", + "description": "Identifier of the sandbox", + "example": "i1234abcd5678efgh90jk", + }, + "message": { + "type": "string", + "description": "Error message", + "example": "The sandbox was not found", + }, + "code": { + "type": "integer", + "description": "Error code", + "example": 502, + }, + }, + } + } + }, +} + + +EMPTY_RESPONSE_CONTENT = { + "application/json": { + "schema": {"type": "object", "description": "Empty response"} + } +} + + +def add_sandbox_not_found(spec: dict[str, Any], envd_paths: set[str]) -> None: + """Add a 502 response to all sandbox/envd endpoints. + + The load balancer returns 502 when a sandbox is not found. + """ + count = 0 + for path in envd_paths: + path_item = spec["paths"].get(path) + if not path_item: + continue + for method in ("get", "post", "put", "patch", "delete"): + op = path_item.get(method) + if op and "502" not in op.get("responses", {}): + op.setdefault("responses", {})["502"] = SANDBOX_NOT_FOUND_RESPONSE + count += 1 + if count: + print(f"==> Added 502 sandbox-not-found response to {count} operations") + + +def fill_empty_responses(spec: dict[str, Any]) -> None: + """Add an empty content block to any 2xx response that lacks one. + + Mintlify requires a content block on every response to render correctly. + """ + filled = 0 + stripped = 0 + for path, path_item in spec.get("paths", {}).items(): + for method in ("get", "post", "put", "patch", "delete", "head", "options"): + op = path_item.get(method) + if not op: + continue + responses = op.get("responses", {}) + # Remove "default" responses (generic Connect error envelopes) + if "default" in responses: + del responses["default"] + stripped += 1 + for status, resp in responses.items(): + if isinstance(resp, dict) and str(status).startswith("2") and "content" not in resp: + resp["content"] = EMPTY_RESPONSE_CONTENT + filled += 1 + if filled: + print(f"==> Added empty content block to {filled} responses") + if stripped: + print(f"==> Removed {stripped} default error responses") + + +# --------------------------------------------------------------------------- +# Entrypoint +# --------------------------------------------------------------------------- + +def main() -> None: + docker_build_image() + + # --- Sandbox API (envd) --- + proto_docs = docker_generate_specs() + envd_rest_doc = load_yaml_file(ENVD_REST_SPEC) + + # Track which paths come from envd so we can set their server + envd_raw_docs = [envd_rest_doc] + proto_docs + envd_paths: set[str] = set() + for raw in envd_raw_docs: + doc = yaml.safe_load(raw) + if doc and "paths" in doc: + envd_paths.update(doc["paths"].keys()) + + # --- Platform API --- + api_doc = load_yaml_file(API_SPEC) + + # --- Merge everything --- + # Order: envd first, then platform API (platform schemas take precedence + # for shared names like Error since they're more complete). + # Protect envd paths so the platform API doesn't overwrite them + # (e.g. /health exists in both but the envd version is authoritative). + merged = merge_specs(envd_raw_docs + [api_doc], protected_paths=envd_paths) + + # Auto-detect and fill streaming RPC endpoints + streaming_rpcs = find_streaming_rpcs(ENVD_SPEC_DIR) + print(f"==> Found {len(streaming_rpcs)} streaming RPCs in proto files") + fill_streaming_endpoints(merged, streaming_rpcs) + for rpc in streaming_rpcs: + envd_paths.add(rpc.path) + + # Attach per-path server overrides so each path has exactly one server + tag_paths_with_server(merged, envd_paths, SANDBOX_SERVER) + platform_paths = set(merged["paths"].keys()) - envd_paths + tag_paths_with_server(merged, platform_paths, PLATFORM_SERVER) + + # Ensure all sandbox endpoints declare auth + apply_sandbox_auth(merged, envd_paths) + + # Add 502 sandbox-not-found to all envd endpoints + add_sandbox_not_found(merged, envd_paths) + + # Fix known issues + fix_security_schemes(merged) + rename_envd_auth_scheme(merged) + add_operation_ids(merged) + + # Remove internal/unwanted paths + filter_paths(merged) + + # Ensure all 2xx responses have a content block (required by Mintlify) + fill_empty_responses(merged) + + # Clean up unreferenced schemas left over from filtered paths + remove_orphaned_schemas(merged) + + # Write output + output_path = os.path.join(os.getcwd(), "e2b-openapi.yml") + with open(output_path, "w") as f: + yaml.dump(merged, f, default_flow_style=False, sort_keys=False, allow_unicode=True) + + print(f"==> Written to {output_path}") + + +if __name__ == "__main__": + main() diff --git a/scripts/validate_api_reference.py b/scripts/validate_api_reference.py new file mode 100755 index 00000000..a88dcf4a --- /dev/null +++ b/scripts/validate_api_reference.py @@ -0,0 +1,2376 @@ +#!/usr/bin/env python3 +""" +E2B OpenAPI Specification Validator + +Validates the openapi-public.yml spec against the live E2B API by calling +every endpoint and deeply comparing response schemas. + +Usage: + E2B_API_KEY=e2b_... python3 scripts/validate_api_reference.py [options] + +Options: + --output FILE Report output path (default: openapi-validation-report.md) + --verbose Show detailed request/response logs + --skip-sandbox Skip phases requiring sandbox creation + --phase N Run only phase N (1-12) + --timeout SECS HTTP request timeout (default: 15) + --help Show this help message + +Dependencies: stdlib + PyYAML +""" + +import json +import os +import re +import ssl +import struct +import sys +import time +import urllib.error +import urllib.parse +import urllib.request +from dataclasses import dataclass, field +from datetime import datetime, timezone +from pathlib import Path + +import yaml + +# --------------------------------------------------------------------------- +# CONFIG +# --------------------------------------------------------------------------- + +PLATFORM_URL = "https://api.e2b.app" +ENVD_PORT = 49983 +SPEC_PATH = Path(__file__).resolve().parent.parent / "openapi-public.yml" + +FAKE_SANDBOX_ID = "nonexistent-sandbox-000000" +FAKE_TEMPLATE_ID = "nonexistent-template-000000" +FAKE_BUILD_ID = "00000000-0000-0000-0000-000000000000" +FAKE_ALIAS = "nonexistent-alias-000000" +FAKE_HASH = "0" * 64 +FAKE_TEAM_ID = "00000000-0000-0000-0000-000000000000" + +# RFC3339 regex for date-time format validation +RFC3339_RE = re.compile( + r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?(Z|[+-]\d{2}:\d{2})$" +) +UUID_RE = re.compile( + r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$", re.I +) + + +# --------------------------------------------------------------------------- +# DATA STRUCTURES +# --------------------------------------------------------------------------- + + +@dataclass +class Finding: + severity: str # "critical" | "minor" + category: str # "schema" | "status_code" | "extra_field" | "missing_field" | "type_mismatch" | "auth" | "format" + endpoint: str # "POST /sandboxes" + message: str + expected: str = "" + actual: str = "" + + +@dataclass +class EndpointResult: + method: str + path: str + tested: bool = False + expected_status: int = 0 + actual_status: int = 0 + findings: list[Finding] = field(default_factory=list) + skip_reason: str | None = None + response_body: object = None + surface: str = "platform" # "platform" | "sandbox" + + +@dataclass +class SpecIssue: + category: str + description: str + location: str = "" + + +# --------------------------------------------------------------------------- +# HTTP HELPERS +# --------------------------------------------------------------------------- + +_ctx = ssl.create_default_context() +VERBOSE = False + + +def _redact(headers: dict | None) -> dict: + if not headers: + return {} + out = {} + for k, v in headers.items(): + if k.lower() in ("x-api-key", "x-access-token", "authorization"): + out[k] = v[:12] + "..." if len(v) > 12 else "***" + else: + out[k] = v + return out + + +def _parse_connect_frames(data: bytes): + """Parse Connect streaming envelope frames. Returns first data frame as parsed JSON, or list of frames.""" + frames = [] + offset = 0 + while offset + 5 <= len(data): + flags = data[offset] + length = struct.unpack(">I", data[offset + 1:offset + 5])[0] + offset += 5 + if offset + length > len(data): + break + payload = data[offset:offset + length] + offset += length + if flags & 0x02: + # End-of-stream / trailers frame + try: + frames.append({"_trailers": json.loads(payload.decode("utf-8", errors="replace"))}) + except json.JSONDecodeError: + frames.append({"_trailers_raw": payload.decode("utf-8", errors="replace")}) + else: + # Data frame + try: + frames.append(json.loads(payload.decode("utf-8", errors="replace"))) + except json.JSONDecodeError: + pass + if not frames: + return None + if len(frames) == 1: + return frames[0] + return frames + + +def http_request( + method: str, + url: str, + headers: dict | None = None, + params: dict | None = None, + body: dict | None = None, + raw_body: bytes | None = None, + content_type: str | None = None, + timeout: int = 15, +) -> tuple[int, dict | str | list | None, dict]: + """Make HTTP request. Returns (status, parsed_body, response_headers).""" + if params: + url += "?" + urllib.parse.urlencode(params, doseq=True) + + if VERBOSE: + print(f" >>> {method} {url}") + if headers: + print(f" Headers: {_redact(headers)}") + if body is not None: + s = json.dumps(body, indent=2) + print(f" Body: {s[:300]}{'...' if len(s) > 300 else ''}") + + if raw_body is not None: + data = raw_body + elif body is not None: + data = json.dumps(body).encode("utf-8") + else: + data = None + + req = urllib.request.Request(url, data=data, method=method) + for k, v in (headers or {}).items(): + req.add_header(k, v) + if content_type: + req.add_header("Content-Type", content_type) + elif body is not None and not req.has_header("Content-type"): + req.add_header("Content-Type", "application/json") + + resp_headers = {} + raw_bytes = b"" + raw = "" + status = 0 + try: + resp = urllib.request.urlopen(req, timeout=timeout, context=_ctx) + status = resp.status + resp_headers = dict(resp.headers) + # Read in chunks to capture partial streaming data on timeout. + # For streaming responses, read() blocks until stream ends or timeout. + # Reading in small chunks allows us to capture early frames. + chunks = [] + import socket as _sock + try: + while True: + # read1() returns after a single system read call, unlike read() + # which blocks until it gets the full amount. This is essential + # for streaming responses where data arrives in small frames. + chunk = resp.read1(65536) if hasattr(resp, "read1") else resp.read(65536) + if not chunk: + break + chunks.append(chunk) + except (_sock.timeout, TimeoutError, OSError): + pass # Timeout during streaming read — use whatever we got + raw_bytes = b"".join(chunks) + raw = raw_bytes.decode("utf-8", errors="replace") + except urllib.error.HTTPError as e: + status = e.code + resp_headers = dict(e.headers) if e.headers else {} + raw_bytes = e.read() if e.fp else b"" + raw = raw_bytes.decode("utf-8", errors="replace") + except Exception as e: + if VERBOSE: + print(f" <<< ERROR: {e}") + return 0, f"Connection error: {e}", {} + + # Try to decode Connect streaming envelopes if content type suggests it + resp_ct = resp_headers.get("Content-Type", "") or resp_headers.get("content-type", "") + parsed = None + if "application/connect+" in resp_ct and len(raw_bytes) >= 5: + parsed = _parse_connect_frames(raw_bytes) + if parsed is None: + try: + parsed = json.loads(raw) if raw else None + except json.JSONDecodeError: + parsed = raw[:500] if raw else None + + if VERBOSE: + print(f" <<< {status}") + if parsed is not None: + s = json.dumps(parsed, indent=2) if isinstance(parsed, (dict, list)) else str(parsed) + print(f" Response: {s[:500]}{'...' if len(s) > 500 else ''}") + + return status, parsed, resp_headers + + +def ctrl(method: str, path: str, **kwargs): + """Platform API request.""" + return http_request(method, f"{PLATFORM_URL}{path}", **kwargs) + + +def envd(method: str, sandbox_id: str, path: str, **kwargs): + """Sandbox (envd) API request.""" + url = f"https://{ENVD_PORT}-{sandbox_id}.e2b.app{path}" + return http_request(method, url, **kwargs) + + +def api_key_hdr(api_key: str) -> dict: + return {"X-API-Key": api_key} + + +def bearer_hdr(token: str) -> dict: + return {"Authorization": f"Bearer {token}"} + + +def connect_hdr(token: str | None = None) -> dict: + """Headers for Connect RPC unary calls.""" + h = { + "Connect-Protocol-Version": "1", + "Content-Type": "application/json", + } + if token: + h["Authorization"] = f"Bearer {token}" + return h + + +def connect_stream_hdr(token: str | None = None) -> dict: + """Headers for Connect RPC streaming calls (server-stream / client-stream).""" + h = { + "Connect-Protocol-Version": "1", + "Content-Type": "application/connect+json", + } + if token: + h["Authorization"] = f"Bearer {token}" + return h + + +def connect_envelope(payload: dict) -> bytes: + """Wrap a JSON payload in a Connect streaming envelope (flags + uint32 length + data).""" + data = json.dumps(payload).encode("utf-8") + return struct.pack(">BI", 0, len(data)) + data + + +def multipart_upload(sandbox_id: str, file_path: str, content: bytes, token: str | None = None) -> tuple[int, object, dict]: + """Upload file via multipart/form-data.""" + boundary = "----E2BValidation" + str(int(time.time())) + body_parts = [] + body_parts.append(f"--{boundary}".encode()) + body_parts.append(f'Content-Disposition: form-data; name="file"; filename="{file_path.split("/")[-1]}"'.encode()) + body_parts.append(b"Content-Type: application/octet-stream") + body_parts.append(b"") + body_parts.append(content) + body_parts.append(f"--{boundary}--".encode()) + raw_body = b"\r\n".join(body_parts) + + headers = {} + if token: + headers["Authorization"] = f"Bearer {token}" + + url = f"https://{ENVD_PORT}-{sandbox_id}.e2b.app/files" + params = {"path": file_path} + return http_request( + "POST", url, headers=headers, params=params, + raw_body=raw_body, + content_type=f"multipart/form-data; boundary={boundary}", + ) + + +# --------------------------------------------------------------------------- +# SPEC LOADING & REF RESOLUTION +# --------------------------------------------------------------------------- + +def load_spec(path: Path) -> dict: + with open(path) as f: + return yaml.safe_load(f) + + +def resolve_ref(spec: dict, ref: str, _seen: set | None = None) -> dict: + """Resolve a $ref like '#/components/schemas/Foo' into the actual schema.""" + if _seen is None: + _seen = set() + if ref in _seen: + return {} # cycle + _seen.add(ref) + + parts = ref.lstrip("#/").split("/") + node = spec + for p in parts: + node = node.get(p, {}) + if not isinstance(node, dict): + return {} + + # If resolved node itself has a $ref, follow it + if "$ref" in node: + return resolve_ref(spec, node["$ref"], _seen) + return node + + +def resolve_schema(spec: dict, schema: dict) -> dict: + """Resolve a schema, following $ref if present.""" + if not isinstance(schema, dict): + return schema + if "$ref" in schema: + return resolve_ref(spec, schema["$ref"]) + return schema + + +# --------------------------------------------------------------------------- +# SCHEMA VALIDATION ENGINE +# --------------------------------------------------------------------------- + +def validate_schema(actual, schema: dict, spec: dict, path: str = "$") -> list[Finding]: + """ + Validate actual data against an OpenAPI 3.1 schema. + Returns list of findings (mismatches). + """ + findings = [] + if not isinstance(schema, dict): + return findings + + # Resolve $ref + schema = resolve_schema(spec, schema) + + # Handle nullable + is_nullable = schema.get("nullable", False) + schema_type = schema.get("type") + + # OpenAPI 3.1 nullable as type list: type: [string, "null"] + if isinstance(schema_type, list): + if actual is None: + if "null" in schema_type: + return findings + findings.append(Finding( + "critical", "type_mismatch", "", + f"At {path}: got null but type {schema_type} doesn't include null", + str(schema_type), "null", + )) + return findings + # Filter out "null" to get the real type + real_types = [t for t in schema_type if t != "null"] + if len(real_types) == 1: + schema_type = real_types[0] + else: + # Multiple types (union) - check if any match + for rt in real_types: + test_schema = {**schema, "type": rt} + test_findings = validate_schema(actual, test_schema, spec, path) + if not test_findings: + return [] + findings.append(Finding( + "critical", "type_mismatch", "", + f"At {path}: value doesn't match any of types {real_types}", + str(real_types), type(actual).__name__, + )) + return findings + + if actual is None: + if is_nullable: + return findings + # Some fields may legitimately be absent (not required) — handled by caller + return findings + + # Handle allOf + if "allOf" in schema: + for i, sub in enumerate(schema["allOf"]): + findings.extend(validate_schema(actual, sub, spec, f"{path}/allOf[{i}]")) + return findings + + # Handle oneOf + if "oneOf" in schema: + matched = False + for sub in schema["oneOf"]: + sub_resolved = resolve_schema(spec, sub) + sub_findings = validate_schema(actual, sub_resolved, spec, path) + if not sub_findings: + matched = True + break + if not matched: + # Don't report as critical for oneOf - just note it + findings.append(Finding( + "minor", "schema", "", + f"At {path}: value didn't match any oneOf variant", + "one of oneOf variants", str(type(actual).__name__), + )) + return findings + + # Handle anyOf + if "anyOf" in schema: + for sub in schema["anyOf"]: + sub_resolved = resolve_schema(spec, sub) + sub_findings = validate_schema(actual, sub_resolved, spec, path) + if not sub_findings: + return findings + findings.append(Finding( + "minor", "schema", "", + f"At {path}: value didn't match any anyOf variant", + "one of anyOf variants", str(type(actual).__name__), + )) + return findings + + # Type checking + if schema_type: + type_ok = _check_type(actual, schema_type) + if not type_ok: + findings.append(Finding( + "critical", "type_mismatch", "", + f"At {path}: expected type '{schema_type}', got {type(actual).__name__}", + schema_type, type(actual).__name__, + )) + return findings # Skip deeper checks if type is wrong + + # Format checking + fmt = schema.get("format") + if fmt and isinstance(actual, str): + fmt_err = _check_format(actual, fmt, path) + if fmt_err: + findings.append(fmt_err) + + # Enum checking + enum_vals = schema.get("enum") + if enum_vals is not None and actual not in enum_vals: + findings.append(Finding( + "critical", "schema", "", + f"At {path}: value '{actual}' not in enum {enum_vals}", + str(enum_vals), str(actual), + )) + + # Object validation + if schema_type == "object" or (schema_type is None and "properties" in schema): + if isinstance(actual, dict): + findings.extend(_validate_object(actual, schema, spec, path)) + + # Array validation + if schema_type == "array" and isinstance(actual, list): + items_schema = schema.get("items") + if items_schema: + for i, item in enumerate(actual[:10]): # Validate first 10 items + findings.extend(validate_schema(item, items_schema, spec, f"{path}[{i}]")) + + return findings + + +def _check_type(value, expected_type: str) -> bool: + """Check if Python value matches OpenAPI type.""" + if expected_type == "string": + return isinstance(value, str) + elif expected_type == "integer": + return isinstance(value, int) and not isinstance(value, bool) + elif expected_type == "number": + return isinstance(value, (int, float)) and not isinstance(value, bool) + elif expected_type == "boolean": + return isinstance(value, bool) + elif expected_type == "array": + return isinstance(value, list) + elif expected_type == "object": + return isinstance(value, dict) + return True # Unknown type, don't fail + + +def _check_format(value: str, fmt: str, path: str) -> Finding | None: + """Check string format. Returns Finding if invalid.""" + if fmt == "date-time": + if not RFC3339_RE.match(value): + return Finding( + "minor", "format", "", + f"At {path}: '{value}' doesn't match date-time format (RFC3339)", + "RFC3339 date-time", value, + ) + elif fmt == "uuid": + if not UUID_RE.match(value): + return Finding( + "minor", "format", "", + f"At {path}: '{value}' doesn't match UUID format", + "UUID", value, + ) + return None + + +def _validate_object(actual: dict, schema: dict, spec: dict, path: str) -> list[Finding]: + """Validate object properties, required fields, and extra fields.""" + findings = [] + properties = schema.get("properties", {}) + required = schema.get("required", []) + + # Check required fields + for req_field in required: + if req_field not in actual: + findings.append(Finding( + "critical", "missing_field", "", + f"At {path}: required field '{req_field}' is missing", + f"field '{req_field}'", "absent", + )) + + # Validate each property that exists + for prop_name, prop_schema in properties.items(): + if prop_name in actual: + resolved = resolve_schema(spec, prop_schema) + findings.extend(validate_schema(actual[prop_name], resolved, spec, f"{path}.{prop_name}")) + + # Check for extra undocumented fields + if properties: + additional = schema.get("additionalProperties") + if additional is False or (additional is None and schema.get("additionalProperties") is not None): + pass # additionalProperties: false — strict + known_fields = set(properties.keys()) + extra = set(actual.keys()) - known_fields + if extra and properties: + for ef in extra: + findings.append(Finding( + "minor", "extra_field", "", + f"At {path}: undocumented field '{ef}'", + "not in spec", str(type(actual[ef]).__name__), + )) + + return findings + + +# --------------------------------------------------------------------------- +# SANDBOX LIFECYCLE MANAGER +# --------------------------------------------------------------------------- + + +class SandboxManager: + def __init__(self, api_key: str): + self.api_key = api_key + self.sandbox_id: str | None = None + self.access_token: str | None = None + + def create(self) -> bool: + """Create a sandbox. Returns True on success.""" + print("\n Creating test sandbox...") + h = api_key_hdr(self.api_key) + status, body, _ = ctrl("POST", "/sandboxes", headers=h, + body={"templateID": "base", "timeout": 600}) + if status != 201 or not isinstance(body, dict): + print(f" FAILED to create sandbox: {status}") + return False + self.sandbox_id = body.get("sandboxID") + self.access_token = body.get("envdAccessToken") or body.get("trafficAccessToken") or "" + + if not self.access_token: + # Try /connect to get token + c_status, c_body, _ = ctrl("POST", f"/sandboxes/{self.sandbox_id}/connect", + headers=h, body={"timeout": 600}) + if c_status in (200, 201) and isinstance(c_body, dict): + self.access_token = (c_body.get("envdAccessToken") + or c_body.get("accessToken") + or c_body.get("trafficAccessToken") or "") + + print(f" Sandbox ID: {self.sandbox_id}") + print(f" Token: {self.access_token[:20]}..." if self.access_token else " No access token!") + time.sleep(2) # Allow boot + return True + + def ensure_alive(self) -> bool: + """Check if sandbox is alive. Returns False if dead.""" + if not self.sandbox_id: + return False + try: + status, _, _ = envd("GET", self.sandbox_id, "/health", timeout=5) + return status in (200, 204) + except Exception: + return False + + def set_timeout(self, seconds: int = 600): + if self.sandbox_id: + ctrl("POST", f"/sandboxes/{self.sandbox_id}/timeout", + headers=api_key_hdr(self.api_key), body={"timeout": seconds}) + + def cleanup(self): + if self.sandbox_id: + print(f"\n Cleaning up sandbox {self.sandbox_id}...") + ctrl("DELETE", f"/sandboxes/{self.sandbox_id}", + headers=api_key_hdr(self.api_key)) + self.sandbox_id = None + + +# --------------------------------------------------------------------------- +# TEAM ID DISCOVERY +# --------------------------------------------------------------------------- + +def discover_team_id(api_key: str, env_team_id: str | None) -> str | None: + if env_team_id: + return env_team_id + h = api_key_hdr(api_key) + status, body, _ = ctrl("GET", "/templates", headers=h) + if status == 200 and isinstance(body, list): + for tpl in body: + for key in ("teamID", "team_id", "teamId"): + tid = tpl.get(key) + if tid: + return tid + status, body, _ = ctrl("GET", "/sandboxes", headers=h) + if status == 200 and isinstance(body, list): + for sbx in body: + for key in ("teamID", "team_id", "teamId"): + tid = sbx.get(key) + if tid: + return tid + return None + + +# --------------------------------------------------------------------------- +# SPEC-LEVEL ANALYSIS +# --------------------------------------------------------------------------- + +def analyze_spec(spec: dict) -> list[SpecIssue]: + """Analyze the spec for best-practice issues.""" + issues = [] + paths = spec.get("paths", {}) + schemas = spec.get("components", {}).get("schemas", {}) + + # Collect all $ref targets used + all_refs = set() + _collect_refs(spec, all_refs) + + # Track operation details + for path_str, methods in paths.items(): + for method, op in methods.items(): + if not isinstance(op, dict) or "responses" not in op: + continue + op_label = f"{method.upper()} {path_str}" + + # 1. Missing operationId + if "operationId" not in op: + issues.append(SpecIssue( + "missing_operationId", + f"Operation '{op_label}' has no operationId", + op_label, + )) + + # 2. Missing summary + if "summary" not in op and "description" not in op: + issues.append(SpecIssue( + "missing_summary", + f"Operation '{op_label}' has no summary or description", + op_label, + )) + + # 3. Check parameters for missing descriptions + for param in op.get("parameters", []): + if isinstance(param, dict) and "$ref" not in param: + if "description" not in param: + issues.append(SpecIssue( + "missing_param_description", + f"Parameter '{param.get('name', '?')}' in '{op_label}' has no description", + op_label, + )) + + # 10. Deprecated without migration note + if op.get("deprecated"): + desc = op.get("description", "") + if not any(w in desc.lower() for w in ["use ", "replaced", "instead", "v2", "v3", "migration"]): + issues.append(SpecIssue( + "deprecated_no_migration", + f"Deprecated operation '{op_label}' has no migration note in description", + op_label, + )) + + # 4. Schema properties missing descriptions + for schema_name, schema_def in schemas.items(): + if not isinstance(schema_def, dict): + continue + for prop_name, prop_def in schema_def.get("properties", {}).items(): + if isinstance(prop_def, dict) and "$ref" not in prop_def: + if "description" not in prop_def and "title" not in prop_def: + issues.append(SpecIssue( + "missing_schema_description", + f"Property '{prop_name}' in schema '{schema_name}' has no description", + f"schemas/{schema_name}", + )) + + # 7. Naming inconsistencies (camelCase vs snake_case in parameters) + param_names = set() + for path_str, methods in paths.items(): + for method, op in methods.items(): + if not isinstance(op, dict): + continue + for param in op.get("parameters", []): + if isinstance(param, dict): + name = param.get("name", "") + if name: + param_names.add(name) + camel = [n for n in param_names if "_" not in n and n[0].islower()] + snake = [n for n in param_names if "_" in n] + if camel and snake: + issues.append(SpecIssue( + "naming_inconsistency", + f"Mixed naming: camelCase params ({', '.join(sorted(camel)[:5])}) and " + f"snake_case params ({', '.join(sorted(snake)[:5])})", + "parameters", + )) + + # 8. Orphaned schemas + for schema_name in schemas: + ref_str = f"#/components/schemas/{schema_name}" + if ref_str not in all_refs: + issues.append(SpecIssue( + "orphaned_schema", + f"Schema '{schema_name}' is defined but never referenced", + f"schemas/{schema_name}", + )) + + # 9. Truncated descriptions + for path_str, methods in paths.items(): + for method, op in methods.items(): + if not isinstance(op, dict): + continue + for param in op.get("parameters", []): + if isinstance(param, dict): + desc = param.get("description", "") + if isinstance(desc, str) and desc and not desc.rstrip().endswith(('.', ')', '"', ':', ']', '`')): + last_word = desc.rstrip().split()[-1] if desc.strip() else "" + if last_word and last_word[0].islower() and len(desc) > 30: + issues.append(SpecIssue( + "truncated_description", + f"Possible truncated description: '...{desc[-40:]}'", + f"{method.upper()} {path_str}", + )) + + # 11. Check LogLevel description correctness + log_level = schemas.get("LogLevel", {}) + if log_level.get("description", "").lower() == "state of the sandbox": + issues.append(SpecIssue( + "wrong_description", + "LogLevel description says 'State of the sandbox' — should describe log severity levels", + "schemas/LogLevel", + )) + + # 13. Server override correctness + for path_str, methods in paths.items(): + servers = methods.get("servers", []) + # Check if any operation on this path is a sandbox endpoint + sandbox_tags = {"files", "filesystem.Filesystem", "process.Process"} + is_sandbox_path = False + for method, op in methods.items(): + if not isinstance(op, dict): + continue + for tag in (op.get("tags") or []): + if tag in sandbox_tags: + is_sandbox_path = True + break + if is_sandbox_path and not servers: + issues.append(SpecIssue( + "missing_server_override", + f"Sandbox endpoint '{path_str}' has no server override", + path_str, + )) + + return issues + + +def _collect_refs(node, refs: set): + """Recursively collect all $ref values in the spec.""" + if isinstance(node, dict): + if "$ref" in node: + refs.add(node["$ref"]) + for v in node.values(): + _collect_refs(v, refs) + elif isinstance(node, list): + for item in node: + _collect_refs(item, refs) + + +# --------------------------------------------------------------------------- +# TEST PHASES +# --------------------------------------------------------------------------- + +def run_phase_1_teams(api_key: str, team_id: str | None, spec: dict) -> list[EndpointResult]: + """Phase 1: Platform — Teams.""" + results = [] + h = api_key_hdr(api_key) + + # GET /teams (requires AccessTokenAuth — not ApiKeyAuth, will likely fail) + print("\n Phase 1: Platform — Teams") + print(" GET /teams") + ep = EndpointResult("GET", "/teams", surface="platform") + status, body, _ = ctrl("GET", "/teams", headers=h) + ep.tested = True + ep.expected_status = 200 + ep.actual_status = status + ep.response_body = body + if status == 401: + ep.findings.append(Finding( + "minor", "auth", "GET /teams", + "GET /teams requires AccessTokenAuth (Bearer), not ApiKeyAuth — expected behavior with API key", + )) + elif status == 200 and isinstance(body, list): + schema = {"type": "array", "items": {"allOf": [{"$ref": "#/components/schemas/Team"}]}} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /teams")) + results.append(ep) + + if not team_id: + print(" [SKIP] No team ID — skipping team metrics") + return results + + now = int(time.time()) + + # GET /teams/{teamID}/metrics + print(f" GET /teams/{team_id[:16]}../metrics") + ep = EndpointResult("GET", "/teams/{teamID}/metrics", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", f"/teams/{team_id}/metrics", headers=h, + params={"start": now - 3600, "end": now}) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"type": "array", "items": {"$ref": "#/components/schemas/TeamMetric"}} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /teams/{teamID}/metrics")) + elif status != 200: + ep.findings.append(Finding("critical", "status_code", "GET /teams/{teamID}/metrics", + f"Expected 200, got {status}", "200", str(status))) + results.append(ep) + + # Error case: missing params + ep2 = EndpointResult("GET", "/teams/{teamID}/metrics", surface="platform") + ep2.tested = True + ep2.expected_status = 400 + status, body, _ = ctrl("GET", f"/teams/{team_id}/metrics", headers=h) + ep2.actual_status = status + if status != 400: + ep2.findings.append(Finding("minor", "status_code", "GET /teams/{teamID}/metrics", + f"Missing params: expected 400, got {status}", "400", str(status))) + results.append(ep2) + + # GET /teams/{teamID}/metrics/max + print(f" GET /teams/{team_id[:16]}../metrics/max") + ep = EndpointResult("GET", "/teams/{teamID}/metrics/max", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", f"/teams/{team_id}/metrics/max", headers=h, + params={"start": now - 3600, "end": now, "metric": "concurrent_sandboxes"}) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"$ref": "#/components/schemas/MaxTeamMetric"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /teams/{teamID}/metrics/max")) + elif status != 200: + ep.findings.append(Finding("critical", "status_code", "GET /teams/{teamID}/metrics/max", + f"Expected 200, got {status}", "200", str(status))) + results.append(ep) + + # Error: 403 wrong team + ep2 = EndpointResult("GET", "/teams/{teamID}/metrics/max", surface="platform") + ep2.tested = True + ep2.expected_status = 403 + status, body, _ = ctrl("GET", f"/teams/{FAKE_TEAM_ID}/metrics/max", headers=h, + params={"start": now - 3600, "end": now, "metric": "concurrent_sandboxes"}) + ep2.actual_status = status + if status != 403: + ep2.findings.append(Finding("minor", "status_code", "GET /teams/{teamID}/metrics/max", + f"Wrong team: expected 403, got {status}", "403", str(status))) + results.append(ep2) + + return results + + +def run_phase_2_templates_read(api_key: str, spec: dict) -> tuple[list[EndpointResult], str | None, str | None, str | None]: + """Phase 2: Templates read-only. Returns (results, template_id, build_id, alias).""" + results = [] + h = api_key_hdr(api_key) + template_id = None + build_id = None + alias = None + + print("\n Phase 2: Platform — Templates (read-only)") + + # GET /templates + print(" GET /templates") + ep = EndpointResult("GET", "/templates", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", "/templates", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, list): + schema = {"type": "array", "items": {"allOf": [{"$ref": "#/components/schemas/Template"}]}} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /templates")) + for tpl in body: + if not template_id: + template_id = tpl.get("templateID") + if not build_id: + build_id = tpl.get("buildID") + aliases = tpl.get("aliases") + if aliases and isinstance(aliases, list) and aliases and not alias: + alias = aliases[0] + if template_id and build_id and alias: + break + results.append(ep) + + # GET /templates/{templateID} + if template_id: + print(f" GET /templates/{template_id}") + ep = EndpointResult("GET", "/templates/{templateID}", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", f"/templates/{template_id}", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"$ref": "#/components/schemas/TemplateWithBuilds"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /templates/{templateID}")) + # Extract build_id from builds list + if isinstance(body, dict) and not build_id: + builds = body.get("builds", []) + if builds and isinstance(builds, list): + build_id = builds[0].get("buildID") + results.append(ep) + + # GET /templates/{templateID} 404 + print(" GET /templates/{templateID} -> 404") + ep = EndpointResult("GET", "/templates/{templateID}", surface="platform") + ep.tested = True + ep.expected_status = 404 + status, body, _ = ctrl("GET", f"/templates/{FAKE_TEMPLATE_ID}", headers=h) + ep.actual_status = status + if status != 404: + ep.findings.append(Finding("minor", "status_code", "GET /templates/{templateID}", + f"Non-existent: expected 404, got {status}", "404", str(status))) + results.append(ep) + + # GET /templates/aliases/{alias} + print(f" GET /templates/aliases/base") + ep = EndpointResult("GET", "/templates/aliases/{alias}", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", "/templates/aliases/base", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"$ref": "#/components/schemas/TemplateAliasResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /templates/aliases/{alias}")) + elif status == 404: + ep.findings.append(Finding("minor", "status_code", "GET /templates/aliases/{alias}", + "Alias 'base' not found (404)", "200", "404")) + results.append(ep) + + # GET /templates/{templateID}/builds/{buildID}/status + if template_id and build_id: + print(f" GET .../builds/{build_id[:16]}../status") + ep = EndpointResult("GET", "/templates/{templateID}/builds/{buildID}/status", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", f"/templates/{template_id}/builds/{build_id}/status", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"$ref": "#/components/schemas/TemplateBuildInfo"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "GET /templates/{templateID}/builds/{buildID}/status")) + results.append(ep) + + # GET /templates/{templateID}/builds/{buildID}/logs + if template_id and build_id: + print(f" GET .../builds/{build_id[:16]}../logs") + ep = EndpointResult("GET", "/templates/{templateID}/builds/{buildID}/logs", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", f"/templates/{template_id}/builds/{build_id}/logs", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"$ref": "#/components/schemas/TemplateBuildLogsResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "GET /templates/{templateID}/builds/{buildID}/logs")) + results.append(ep) + + # GET /templates/{templateID}/files/{hash} — expect 404 + print(f" GET /templates/{{templateID}}/files/{{hash}} -> 404") + ep = EndpointResult("GET", "/templates/{templateID}/files/{hash}", surface="platform") + ep.tested = True + ep.expected_status = 404 + tid = template_id or FAKE_TEMPLATE_ID + status, body, _ = ctrl("GET", f"/templates/{tid}/files/{FAKE_HASH}", headers=h) + ep.actual_status = status + if status not in (400, 404): + ep.findings.append(Finding("minor", "status_code", "GET /templates/{templateID}/files/{hash}", + f"Expected 404, got {status}", "404", str(status))) + results.append(ep) + + return results, template_id, build_id, alias + + +def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | None) -> list[EndpointResult]: + """Phase 3: Templates write operations.""" + results = [] + h = api_key_hdr(api_key) + + print("\n Phase 3: Platform — Templates (write)") + + # POST /v3/templates + print(" POST /v3/templates (400 — empty)") + ep = EndpointResult("POST", "/v3/templates", surface="platform") + ep.tested = True + ep.expected_status = 400 + status, body, _ = ctrl("POST", "/v3/templates", headers=h, body={}) + ep.actual_status = status + if status != 400: + ep.findings.append(Finding("minor", "status_code", "POST /v3/templates", + f"Empty body: expected 400, got {status}", "400", str(status))) + results.append(ep) + + # POST /v2/templates (deprecated) + print(" POST /v2/templates (400 — empty)") + ep = EndpointResult("POST", "/v2/templates", surface="platform") + ep.tested = True + ep.expected_status = 400 + status, body, _ = ctrl("POST", "/v2/templates", headers=h, body={}) + ep.actual_status = status + if status != 400: + ep.findings.append(Finding("minor", "status_code", "POST /v2/templates", + f"Empty body: expected 400, got {status}", "400", str(status))) + results.append(ep) + + # POST /templates (deprecated, uses AccessTokenAuth) + print(" POST /templates (deprecated, 401 with API key)") + ep = EndpointResult("POST", "/templates", surface="platform") + ep.tested = True + ep.expected_status = 401 + status, body, _ = ctrl("POST", "/templates", headers=h, body={"dockerfile": "FROM ubuntu"}) + ep.actual_status = status + if status not in (400, 401): + ep.findings.append(Finding("minor", "auth", "POST /templates", + f"Expected 401 (needs Bearer), got {status}", "401", str(status))) + results.append(ep) + + # POST /templates/{templateID} (deprecated rebuild, uses AccessTokenAuth) + print(" POST /templates/{templateID} (deprecated, 401)") + ep = EndpointResult("POST", "/templates/{templateID}", surface="platform") + ep.tested = True + ep.expected_status = 401 + tid = template_id or FAKE_TEMPLATE_ID + status, body, _ = ctrl("POST", f"/templates/{tid}", headers=h, body={"dockerfile": "FROM ubuntu"}) + ep.actual_status = status + if status not in (400, 401, 404): + ep.findings.append(Finding("minor", "status_code", "POST /templates/{templateID}", + f"Expected 401/404, got {status}", "401 or 404", str(status))) + results.append(ep) + + # PATCH /templates/{templateID} (deprecated) + print(" PATCH /templates/{templateID} (deprecated, 404)") + ep = EndpointResult("PATCH", "/templates/{templateID}", surface="platform") + ep.tested = True + ep.expected_status = 404 + status, body, _ = ctrl("PATCH", f"/templates/{FAKE_TEMPLATE_ID}", headers=h, body={}) + ep.actual_status = status + if status not in (400, 404): + ep.findings.append(Finding("minor", "status_code", "PATCH /templates/{templateID}", + f"Expected 404, got {status}", "404", str(status))) + results.append(ep) + + # PATCH /v2/templates/{templateID} + print(" PATCH /v2/templates/{templateID} (404)") + ep = EndpointResult("PATCH", "/v2/templates/{templateID}", surface="platform") + ep.tested = True + ep.expected_status = 404 + status, body, _ = ctrl("PATCH", f"/v2/templates/{FAKE_TEMPLATE_ID}", headers=h, body={}) + ep.actual_status = status + if status not in (400, 404): + ep.findings.append(Finding("minor", "status_code", "PATCH /v2/templates/{templateID}", + f"Expected 404, got {status}", "404", str(status))) + results.append(ep) + + # POST /templates/{templateID}/builds/{buildID} (deprecated, AccessTokenAuth) + print(" POST .../builds/{buildID} (deprecated, 401)") + ep = EndpointResult("POST", "/templates/{templateID}/builds/{buildID}", surface="platform") + ep.tested = True + ep.expected_status = 401 + status, body, _ = ctrl("POST", f"/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}", headers=h, body={}) + ep.actual_status = status + if status not in (400, 401, 404): + ep.findings.append(Finding("minor", "status_code", "POST .../builds/{buildID}", + f"Expected 401/404, got {status}", "401", str(status))) + results.append(ep) + + # POST /v2/templates/{templateID}/builds/{buildID} + print(" POST /v2/.../builds/{buildID} (404)") + ep = EndpointResult("POST", "/v2/templates/{templateID}/builds/{buildID}", surface="platform") + ep.tested = True + ep.expected_status = 404 + status, body, _ = ctrl("POST", f"/v2/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}", headers=h, body={}) + ep.actual_status = status + if status not in (400, 404): + ep.findings.append(Finding("minor", "status_code", "POST /v2/.../builds/{buildID}", + f"Expected 404, got {status}", "404", str(status))) + results.append(ep) + + # POST /templates/tags (400) + print(" POST /templates/tags (400)") + ep = EndpointResult("POST", "/templates/tags", surface="platform") + ep.tested = True + ep.expected_status = 400 + status, body, _ = ctrl("POST", "/templates/tags", headers=h, body={}) + ep.actual_status = status + if status != 400: + ep.findings.append(Finding("minor", "status_code", "POST /templates/tags", + f"Empty body: expected 400, got {status}", "400", str(status))) + results.append(ep) + + # DELETE /templates/tags (400) + print(" DELETE /templates/tags (400)") + ep = EndpointResult("DELETE", "/templates/tags", surface="platform") + ep.tested = True + ep.expected_status = 400 + status, body, _ = ctrl("DELETE", "/templates/tags", headers=h, body={}) + ep.actual_status = status + if status != 400: + ep.findings.append(Finding("minor", "status_code", "DELETE /templates/tags", + f"Empty body: expected 400, got {status}", "400", str(status))) + results.append(ep) + + # DELETE /templates/{templateID} (404) + print(" DELETE /templates/{templateID} (404)") + ep = EndpointResult("DELETE", "/templates/{templateID}", surface="platform") + ep.tested = True + ep.expected_status = 404 + status, body, _ = ctrl("DELETE", f"/templates/{FAKE_TEMPLATE_ID}", headers=h) + ep.actual_status = status + if status not in (400, 404): + ep.findings.append(Finding("minor", "status_code", "DELETE /templates/{templateID}", + f"Expected 404, got {status}", "404", str(status))) + results.append(ep) + + return results + + +def run_phase_4_sandboxes_read(api_key: str, spec: dict, sbx: SandboxManager) -> list[EndpointResult]: + """Phase 4: Sandboxes create + read.""" + results = [] + h = api_key_hdr(api_key) + + print("\n Phase 4: Platform — Sandboxes (create + read)") + + # POST /sandboxes -> 201 (already created, validate the response shape) + # We re-create to capture schema + print(" POST /sandboxes (validate schema)") + ep = EndpointResult("POST", "/sandboxes", surface="platform") + ep.tested = True + ep.expected_status = 201 + status, body, _ = ctrl("POST", "/sandboxes", headers=h, + body={"templateID": "base", "timeout": 30}) + ep.actual_status = status + ep.response_body = body + if status == 201 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/Sandbox"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /sandboxes")) + # Clean up the extra sandbox + extra_id = body.get("sandboxID") + if extra_id: + ctrl("DELETE", f"/sandboxes/{extra_id}", headers=h) + elif status != 201: + ep.findings.append(Finding("critical", "status_code", "POST /sandboxes", + f"Expected 201, got {status}", "201", str(status))) + results.append(ep) + + # POST /sandboxes 400 (empty body) + print(" POST /sandboxes (400 — empty)") + ep = EndpointResult("POST", "/sandboxes", surface="platform") + ep.tested = True + ep.expected_status = 400 + status, body, _ = ctrl("POST", "/sandboxes", headers=h, body={}) + ep.actual_status = status + if status != 400: + ep.findings.append(Finding("minor", "status_code", "POST /sandboxes", + f"Empty body: expected 400, got {status}", "400", str(status))) + results.append(ep) + + # GET /sandboxes + print(" GET /sandboxes") + ep = EndpointResult("GET", "/sandboxes", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", "/sandboxes", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, list): + schema = {"type": "array", "items": {"allOf": [{"$ref": "#/components/schemas/ListedSandbox"}]}} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /sandboxes")) + results.append(ep) + + # GET /v2/sandboxes + print(" GET /v2/sandboxes") + ep = EndpointResult("GET", "/v2/sandboxes", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", "/v2/sandboxes", headers=h, params={"state": "running"}) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, list): + schema = {"type": "array", "items": {"allOf": [{"$ref": "#/components/schemas/ListedSandbox"}]}} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /v2/sandboxes")) + results.append(ep) + + # GET /sandboxes/{sandboxID} + if sbx.sandbox_id: + print(f" GET /sandboxes/{sbx.sandbox_id}") + ep = EndpointResult("GET", "/sandboxes/{sandboxID}", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", f"/sandboxes/{sbx.sandbox_id}", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"$ref": "#/components/schemas/SandboxDetail"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /sandboxes/{sandboxID}")) + results.append(ep) + + # GET /sandboxes/{sandboxID} 404 + print(" GET /sandboxes/{sandboxID} (404)") + ep = EndpointResult("GET", "/sandboxes/{sandboxID}", surface="platform") + ep.tested = True + ep.expected_status = 404 + status, body, _ = ctrl("GET", f"/sandboxes/{FAKE_SANDBOX_ID}", headers=h) + ep.actual_status = status + if status != 404: + ep.findings.append(Finding("minor", "status_code", "GET /sandboxes/{sandboxID}", + f"Non-existent: expected 404, got {status}", "404", str(status))) + results.append(ep) + + return results + + +def run_phase_5_sandbox_actions(api_key: str, spec: dict, sbx: SandboxManager) -> list[EndpointResult]: + """Phase 5: Sandbox actions (timeout, refreshes, connect, logs, metrics).""" + results = [] + h = api_key_hdr(api_key) + sid = sbx.sandbox_id + + print("\n Phase 5: Platform — Sandbox actions") + + if not sid: + print(" [SKIP] No sandbox") + return results + + # POST /sandboxes/{sandboxID}/timeout -> 204 + print(" POST .../timeout") + ep = EndpointResult("POST", "/sandboxes/{sandboxID}/timeout", surface="platform") + ep.tested = True + ep.expected_status = 204 + status, body, _ = ctrl("POST", f"/sandboxes/{sid}/timeout", headers=h, body={"timeout": 600}) + ep.actual_status = status + if status != 204: + ep.findings.append(Finding("critical", "status_code", "POST /sandboxes/{sandboxID}/timeout", + f"Expected 204, got {status}", "204", str(status))) + results.append(ep) + + # POST /sandboxes/{sandboxID}/refreshes -> 204 + print(" POST .../refreshes") + ep = EndpointResult("POST", "/sandboxes/{sandboxID}/refreshes", surface="platform") + ep.tested = True + ep.expected_status = 204 + status, body, _ = ctrl("POST", f"/sandboxes/{sid}/refreshes", headers=h, body={"duration": 60}) + ep.actual_status = status + if status not in (200, 204): + ep.findings.append(Finding("critical", "status_code", "POST /sandboxes/{sandboxID}/refreshes", + f"Expected 204, got {status}", "204", str(status))) + results.append(ep) + + # POST /sandboxes/{sandboxID}/connect -> 200/201 + print(" POST .../connect") + ep = EndpointResult("POST", "/sandboxes/{sandboxID}/connect", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("POST", f"/sandboxes/{sid}/connect", headers=h, body={"timeout": 600}) + ep.actual_status = status + ep.response_body = body + if status in (200, 201) and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/Sandbox"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /sandboxes/{sandboxID}/connect")) + elif status not in (200, 201): + ep.findings.append(Finding("critical", "status_code", "POST /sandboxes/{sandboxID}/connect", + f"Expected 200/201, got {status}", "200 or 201", str(status))) + results.append(ep) + + # GET /sandboxes/{sandboxID}/logs (deprecated) + print(" GET .../logs (deprecated)") + ep = EndpointResult("GET", "/sandboxes/{sandboxID}/logs", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", f"/sandboxes/{sid}/logs", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"$ref": "#/components/schemas/SandboxLogs"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /sandboxes/{sandboxID}/logs")) + results.append(ep) + + # GET /v2/sandboxes/{sandboxID}/logs + print(" GET /v2/.../logs") + ep = EndpointResult("GET", "/v2/sandboxes/{sandboxID}/logs", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", f"/v2/sandboxes/{sid}/logs", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"$ref": "#/components/schemas/SandboxLogsV2Response"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /v2/sandboxes/{sandboxID}/logs")) + elif status != 200: + ep.findings.append(Finding("critical", "status_code", "GET /v2/sandboxes/{sandboxID}/logs", + f"Expected 200, got {status}", "200", str(status))) + results.append(ep) + + # GET /sandboxes/{sandboxID}/metrics + now = int(time.time()) + print(" GET .../metrics") + ep = EndpointResult("GET", "/sandboxes/{sandboxID}/metrics", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", f"/sandboxes/{sid}/metrics", headers=h, + params={"start": now - 300, "end": now}) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, list): + schema = {"type": "array", "items": {"$ref": "#/components/schemas/SandboxMetric"}} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /sandboxes/{sandboxID}/metrics")) + results.append(ep) + + # GET /sandboxes/metrics + print(" GET /sandboxes/metrics") + ep = EndpointResult("GET", "/sandboxes/metrics", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", "/sandboxes/metrics", headers=h, + params={"sandbox_ids": sid}) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"$ref": "#/components/schemas/SandboxesWithMetrics"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /sandboxes/metrics")) + results.append(ep) + + return results + + +def run_phase_6_health_system(spec: dict, sbx: SandboxManager) -> list[EndpointResult]: + """Phase 6: Sandbox — Health & System endpoints.""" + results = [] + sid = sbx.sandbox_id + token = sbx.access_token + + print("\n Phase 6: Sandbox — Health & System") + + if not sid: + print(" [SKIP] No sandbox") + return results + + # GET /health — KEY EDGE CASE: spec says 200, original envd says 204 + print(" GET /health") + ep = EndpointResult("GET", "/health", surface="sandbox") + ep.tested = True + ep.expected_status = 200 # What the merged spec says + status, body, _ = envd("GET", sid, "/health") + ep.actual_status = status + if status == 204: + ep.findings.append(Finding( + "critical", "status_code", "GET /health", + "Spec says 200, API returns 204. The original envd source spec says 204 — spec should be updated.", + "200", "204", + )) + elif status != 200: + ep.findings.append(Finding("critical", "status_code", "GET /health", + f"Expected 200, got {status}", "200", str(status))) + results.append(ep) + + # GET /metrics + print(" GET /metrics") + ep = EndpointResult("GET", "/metrics", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + h = bearer_hdr(token) if token else {} + status, body, _ = envd("GET", sid, "/metrics", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/Metrics"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /metrics")) + results.append(ep) + + # Test /metrics without auth (should also work per spec: security: [{}, {AccessTokenAuth}]) + print(" GET /metrics (no auth)") + ep2 = EndpointResult("GET", "/metrics", surface="sandbox") + ep2.tested = True + ep2.expected_status = 200 + status2, body2, _ = envd("GET", sid, "/metrics") + ep2.actual_status = status2 + if status2 != 200: + ep2.findings.append(Finding("critical", "auth", "GET /metrics", + f"Spec says no auth required ({{}}) but got {status2}", "200", str(status2))) + results.append(ep2) + + # POST /init — EDGE CASE: what happens on already-initialized sandbox? + print(" POST /init (already initialized)") + ep = EndpointResult("POST", "/init", surface="sandbox") + ep.tested = True + ep.expected_status = 204 + h = bearer_hdr(token) if token else {} + status, body, _ = envd("POST", sid, "/init", headers=h, body={}) + ep.actual_status = status + ep.response_body = body + if status == 204: + pass # Expected + elif status == 200: + ep.findings.append(Finding("minor", "status_code", "POST /init", + "Spec says 204, API returns 200 on re-init", "204", "200")) + else: + ep.findings.append(Finding("critical", "status_code", "POST /init", + f"Expected 204, got {status}", "204", str(status))) + results.append(ep) + + # GET /envs + print(" GET /envs") + ep = EndpointResult("GET", "/envs", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + h = bearer_hdr(token) if token else {} + status, body, _ = envd("GET", sid, "/envs", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/EnvVars"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /envs")) + results.append(ep) + + return results + + +def run_phase_7_filesystem_rpc(spec: dict, sbx: SandboxManager) -> list[EndpointResult]: + """Phase 7: Filesystem Connect RPC endpoints.""" + results = [] + sid = sbx.sandbox_id + token = sbx.access_token + + print("\n Phase 7: Sandbox — Filesystem (Connect RPC)") + + if not sid: + print(" [SKIP] No sandbox") + return results + + h = connect_hdr(token) + + # MakeDir + print(" MakeDir /tmp/test-validation-dir") + ep = EndpointResult("POST", "/filesystem.Filesystem/MakeDir", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + status, body, _ = envd("POST", sid, "/filesystem.Filesystem/MakeDir", + headers=h, body={"path": "/tmp/test-validation-dir"}) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/filesystem.MakeDirResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "POST /filesystem.Filesystem/MakeDir")) + results.append(ep) + + # Stat + print(" Stat /tmp/test-validation-dir") + ep = EndpointResult("POST", "/filesystem.Filesystem/Stat", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + status, body, _ = envd("POST", sid, "/filesystem.Filesystem/Stat", + headers=h, body={"path": "/tmp/test-validation-dir"}) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/filesystem.StatResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "POST /filesystem.Filesystem/Stat")) + results.append(ep) + + # ListDir + print(" ListDir /tmp") + ep = EndpointResult("POST", "/filesystem.Filesystem/ListDir", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + status, body, _ = envd("POST", sid, "/filesystem.Filesystem/ListDir", + headers=h, body={"path": "/tmp"}) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/filesystem.ListDirResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "POST /filesystem.Filesystem/ListDir")) + results.append(ep) + + # Move + print(" Move /tmp/test-validation-dir -> /tmp/test-validation-moved") + ep = EndpointResult("POST", "/filesystem.Filesystem/Move", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + status, body, _ = envd("POST", sid, "/filesystem.Filesystem/Move", + headers=h, body={"source": "/tmp/test-validation-dir", + "destination": "/tmp/test-validation-moved"}) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/filesystem.MoveResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "POST /filesystem.Filesystem/Move")) + results.append(ep) + + # Remove + print(" Remove /tmp/test-validation-moved") + ep = EndpointResult("POST", "/filesystem.Filesystem/Remove", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + status, body, _ = envd("POST", sid, "/filesystem.Filesystem/Remove", + headers=h, body={"path": "/tmp/test-validation-moved"}) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"$ref": "#/components/schemas/filesystem.RemoveResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "POST /filesystem.Filesystem/Remove")) + results.append(ep) + + # Error case: Stat non-existent + print(" Stat /nonexistent -> error") + ep = EndpointResult("POST", "/filesystem.Filesystem/Stat", surface="sandbox") + ep.tested = True + ep.expected_status = 404 + status, body, _ = envd("POST", sid, "/filesystem.Filesystem/Stat", + headers=h, body={"path": "/nonexistent/path/xyz"}) + ep.actual_status = status + if isinstance(body, dict) and "code" in body: + # Validate connect.error schema + schema = {"$ref": "#/components/schemas/connect.error"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "POST /filesystem.Filesystem/Stat (error)")) + results.append(ep) + + return results + + +def run_phase_8_files_rest(spec: dict, sbx: SandboxManager) -> list[EndpointResult]: + """Phase 8: Files REST endpoints.""" + results = [] + sid = sbx.sandbox_id + token = sbx.access_token + + print("\n Phase 8: Sandbox — Files (REST)") + + if not sid: + print(" [SKIP] No sandbox") + return results + + # POST /files — upload + print(" POST /files (upload test-file.txt)") + ep = EndpointResult("POST", "/files", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + test_content = b"Hello from E2B validation script" + status, body, _ = multipart_upload(sid, "/tmp/test-file.txt", test_content, token) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"type": "array", "items": {"$ref": "#/components/schemas/EntryInfo"}} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /files")) + results.append(ep) + + # GET /files — download + print(" GET /files (download test-file.txt)") + ep = EndpointResult("GET", "/files", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + h = bearer_hdr(token) if token else {} + status, body, resp_headers = envd("GET", sid, "/files", headers=h, + params={"path": "/tmp/test-file.txt"}) + ep.actual_status = status + if status == 200: + # Verify content + ct = resp_headers.get("Content-Type", "") + if "octet-stream" not in ct and "text" not in ct: + ep.findings.append(Finding("minor", "schema", "GET /files", + f"Expected application/octet-stream, got Content-Type: {ct}", + "application/octet-stream", ct)) + results.append(ep) + + # GET /files 404 + print(" GET /files (404)") + ep = EndpointResult("GET", "/files", surface="sandbox") + ep.tested = True + ep.expected_status = 404 + status, body, _ = envd("GET", sid, "/files", headers=h, + params={"path": "/nonexistent/file.txt"}) + ep.actual_status = status + if status != 404: + ep.findings.append(Finding("minor", "status_code", "GET /files", + f"Non-existent file: expected 404, got {status}", "404", str(status))) + results.append(ep) + + return results + + +def run_phase_9_watcher(spec: dict, sbx: SandboxManager) -> list[EndpointResult]: + """Phase 9: Filesystem Watcher.""" + results = [] + sid = sbx.sandbox_id + token = sbx.access_token + + print("\n Phase 9: Sandbox — Filesystem Watcher") + + if not sid: + print(" [SKIP] No sandbox") + return results + + h = connect_hdr(token) + watcher_id = None + + # CreateWatcher + print(" CreateWatcher /tmp") + ep = EndpointResult("POST", "/filesystem.Filesystem/CreateWatcher", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + status, body, _ = envd("POST", sid, "/filesystem.Filesystem/CreateWatcher", + headers=h, body={"path": "/tmp", "recursive": False}) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/filesystem.CreateWatcherResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "POST /filesystem.Filesystem/CreateWatcher")) + watcher_id = body.get("watcherId") + results.append(ep) + + # GetWatcherEvents + if watcher_id: + print(f" GetWatcherEvents (watcher: {watcher_id})") + ep = EndpointResult("POST", "/filesystem.Filesystem/GetWatcherEvents", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + status, body, _ = envd("POST", sid, "/filesystem.Filesystem/GetWatcherEvents", + headers=h, body={"watcherId": watcher_id}) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/filesystem.GetWatcherEventsResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "POST /filesystem.Filesystem/GetWatcherEvents")) + results.append(ep) + + # RemoveWatcher + print(f" RemoveWatcher {watcher_id}") + ep = EndpointResult("POST", "/filesystem.Filesystem/RemoveWatcher", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + status, body, _ = envd("POST", sid, "/filesystem.Filesystem/RemoveWatcher", + headers=h, body={"watcherId": watcher_id}) + ep.actual_status = status + if status == 200: + schema = {"$ref": "#/components/schemas/filesystem.RemoveWatcherResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "POST /filesystem.Filesystem/RemoveWatcher")) + results.append(ep) + else: + # Still record them as tested + for op_name in ("GetWatcherEvents", "RemoveWatcher"): + ep = EndpointResult("POST", f"/filesystem.Filesystem/{op_name}", surface="sandbox") + ep.tested = False + ep.skip_reason = "No watcher_id from CreateWatcher" + results.append(ep) + + return results + + +def run_phase_10_processes(spec: dict, sbx: SandboxManager) -> list[EndpointResult]: + """Phase 10: Process Management.""" + results = [] + sid = sbx.sandbox_id + token = sbx.access_token + + print("\n Phase 10: Sandbox — Process Management") + + if not sid: + print(" [SKIP] No sandbox") + return results + + h = connect_hdr(token) + h_stream = connect_stream_hdr(token) + + # Start echo hello (streaming — uses application/connect+json) + print(" Start: echo hello") + ep = EndpointResult("POST", "/process.Process/Start", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + start_payload = {"process": {"cmd": "/bin/echo", "args": ["hello"], "envs": {}}, "tag": "test-echo"} + status, body, _ = envd("POST", sid, "/process.Process/Start", + headers=h_stream, raw_body=connect_envelope(start_payload), timeout=5) + ep.actual_status = status + ep.response_body = body + if status == 200: + # Streaming response may be NDJSON (newline-delimited JSON) + # Try to parse as regular JSON first, then as NDJSON + if isinstance(body, dict): + schema = {"$ref": "#/components/schemas/process.StartResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /process.Process/Start")) + results.append(ep) + + # List processes (unary — application/json) + print(" List") + ep = EndpointResult("POST", "/process.Process/List", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + status, body, _ = envd("POST", sid, "/process.Process/List", + headers=h, body={}) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/process.ListResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /process.Process/List")) + results.append(ep) + + # Start sleep 60 (long-running, streaming) + print(" Start: sleep 60") + sleep_payload = {"process": {"cmd": "/bin/sleep", "args": ["60"], "envs": {}}, "tag": "test-sleep"} + status_start, body_start, _ = envd("POST", sid, "/process.Process/Start", + headers=h_stream, + raw_body=connect_envelope(sleep_payload), timeout=5) + sleep_pid = None + if isinstance(body_start, dict): + event = body_start.get("event", {}) + start_event = event.get("start", {}) + sleep_pid = start_event.get("pid") + elif isinstance(body_start, str): + # NDJSON: try to parse first line + for line in body_start.strip().split("\n"): + line = line.strip() + if line: + try: + parsed = json.loads(line) + if isinstance(parsed, dict): + event = parsed.get("event", {}) + start_event = event.get("start", {}) + if start_event.get("pid"): + sleep_pid = start_event["pid"] + break + except json.JSONDecodeError: + pass + print(f" sleep PID: {sleep_pid}") + + # Connect to process (streaming) + print(" Connect to sleep process") + ep = EndpointResult("POST", "/process.Process/Connect", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + connect_payload = {"process": {"pid": sleep_pid}} if sleep_pid else {"process": {"tag": "test-sleep"}} + status, body, _ = envd("POST", sid, "/process.Process/Connect", + headers=h_stream, raw_body=connect_envelope(connect_payload), timeout=3) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/process.ConnectResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /process.Process/Connect")) + results.append(ep) + + # SendInput (unary) + print(" SendInput") + ep = EndpointResult("POST", "/process.Process/SendInput", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + sel = {"pid": sleep_pid} if sleep_pid else {"tag": "test-sleep"} + status, body, _ = envd("POST", sid, "/process.Process/SendInput", + headers=h, body={"process": sel, "input": {"stdin": "dGVzdA=="}}) # base64 "test" + ep.actual_status = status + if status == 200: + schema = {"$ref": "#/components/schemas/process.SendInputResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /process.Process/SendInput")) + results.append(ep) + + # StreamInput (client-streaming — uses application/connect+json) + print(" StreamInput (client-streaming — limited test)") + ep = EndpointResult("POST", "/process.Process/StreamInput", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + sel = {"pid": sleep_pid} if sleep_pid else {"tag": "test-sleep"} + stream_input_payload = {"start": {"process": sel}} + status, body, _ = envd("POST", sid, "/process.Process/StreamInput", + headers=h_stream, raw_body=connect_envelope(stream_input_payload), timeout=3) + ep.actual_status = status + ep.response_body = body + results.append(ep) + + # CloseStdin + print(" CloseStdin") + ep = EndpointResult("POST", "/process.Process/CloseStdin", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + sel = {"pid": sleep_pid} if sleep_pid else {"tag": "test-sleep"} + status, body, _ = envd("POST", sid, "/process.Process/CloseStdin", + headers=h, body={"process": sel}) + ep.actual_status = status + if status == 200: + schema = {"$ref": "#/components/schemas/process.CloseStdinResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /process.Process/CloseStdin")) + results.append(ep) + + # Update PTY (will likely error since process wasn't started with PTY) + print(" Update (PTY resize)") + ep = EndpointResult("POST", "/process.Process/Update", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + sel = {"pid": sleep_pid} if sleep_pid else {"tag": "test-sleep"} + status, body, _ = envd("POST", sid, "/process.Process/Update", + headers=h, body={"process": sel, "pty": {"size": {"cols": 120, "rows": 40}}}) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"$ref": "#/components/schemas/process.UpdateResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /process.Process/Update")) + results.append(ep) + + # SendSignal — kill the sleep process + print(" SendSignal SIGTERM") + ep = EndpointResult("POST", "/process.Process/SendSignal", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + sel = {"pid": sleep_pid} if sleep_pid else {"tag": "test-sleep"} + status, body, _ = envd("POST", sid, "/process.Process/SendSignal", + headers=h, body={"process": sel, "signal": "SIGNAL_SIGTERM"}) + ep.actual_status = status + if status == 200: + schema = {"$ref": "#/components/schemas/process.SendSignalResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /process.Process/SendSignal")) + results.append(ep) + + return results + + +def run_phase_11_streaming(spec: dict, sbx: SandboxManager) -> list[EndpointResult]: + """Phase 11: Streaming (best-effort).""" + results = [] + sid = sbx.sandbox_id + token = sbx.access_token + + print("\n Phase 11: Sandbox — Streaming (best-effort)") + + if not sid: + print(" [SKIP] No sandbox") + return results + + h = connect_stream_hdr(token) + + # WatchDir (server-streaming) + print(" WatchDir /tmp (server-streaming)") + ep = EndpointResult("POST", "/filesystem.Filesystem/WatchDir", surface="sandbox") + ep.tested = True + ep.expected_status = 200 + watchdir_payload = {"path": "/tmp", "recursive": False} + status, body, _ = envd("POST", sid, "/filesystem.Filesystem/WatchDir", + headers=h, raw_body=connect_envelope(watchdir_payload), timeout=3) + ep.actual_status = status + ep.response_body = body + ep.findings.append(Finding("minor", "schema", "POST /filesystem.Filesystem/WatchDir", + "Server-streaming: only initial frame captured (stdlib limitation)")) + results.append(ep) + + return results + + +def run_phase_12_destructive(api_key: str, spec: dict, sbx: SandboxManager) -> list[EndpointResult]: + """Phase 12: Destructive (last).""" + results = [] + h = api_key_hdr(api_key) + sid = sbx.sandbox_id + + print("\n Phase 12: Platform — Destructive") + + if not sid: + print(" [SKIP] No sandbox") + return results + + # POST /sandboxes/{sandboxID}/pause + print(" POST .../pause") + ep = EndpointResult("POST", "/sandboxes/{sandboxID}/pause", surface="platform") + ep.tested = True + ep.expected_status = 204 + status, body, _ = ctrl("POST", f"/sandboxes/{sid}/pause", headers=h) + ep.actual_status = status + if status not in (204, 409): + ep.findings.append(Finding("critical", "status_code", "POST /sandboxes/{sandboxID}/pause", + f"Expected 204, got {status}", "204", str(status))) + results.append(ep) + + # POST /sandboxes/{sandboxID}/resume (deprecated) + if status == 204: + time.sleep(1) + print(" POST .../resume (deprecated)") + ep = EndpointResult("POST", "/sandboxes/{sandboxID}/resume", surface="platform") + ep.tested = True + ep.expected_status = 201 + status, body, _ = ctrl("POST", f"/sandboxes/{sid}/resume", headers=h, + body={"timeout": 60}) + ep.actual_status = status + ep.response_body = body + if status in (200, 201) and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/Sandbox"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "POST /sandboxes/{sandboxID}/resume")) + elif status not in (200, 201): + ep.findings.append(Finding("minor", "status_code", "POST /sandboxes/{sandboxID}/resume", + f"Expected 201, got {status}", "201", str(status))) + results.append(ep) + else: + ep = EndpointResult("POST", "/sandboxes/{sandboxID}/resume", surface="platform") + ep.tested = False + ep.skip_reason = "Pause failed, cannot test resume" + results.append(ep) + + # DELETE /sandboxes/{sandboxID} + print(f" DELETE /sandboxes/{sid}") + ep = EndpointResult("DELETE", "/sandboxes/{sandboxID}", surface="platform") + ep.tested = True + ep.expected_status = 204 + status, body, _ = ctrl("DELETE", f"/sandboxes/{sid}", headers=h) + ep.actual_status = status + if status != 204: + ep.findings.append(Finding("critical", "status_code", "DELETE /sandboxes/{sandboxID}", + f"Expected 204, got {status}", "204", str(status))) + results.append(ep) + sbx.sandbox_id = None # Mark as cleaned up + + return results + + +def run_auth_tests(api_key: str) -> list[EndpointResult]: + """Test 401 for all control plane endpoints without auth.""" + results = [] + + print("\n Auth Tests: 401 for control plane without API key") + + endpoints = [ + ("GET", "/sandboxes", None), + ("POST", "/sandboxes", {"templateID": "base"}), + ("GET", "/v2/sandboxes", None), + ("GET", f"/sandboxes/{FAKE_SANDBOX_ID}", None), + ("DELETE", f"/sandboxes/{FAKE_SANDBOX_ID}", None), + ("POST", f"/sandboxes/{FAKE_SANDBOX_ID}/pause", None), + ("POST", f"/sandboxes/{FAKE_SANDBOX_ID}/resume", None), + ("POST", f"/sandboxes/{FAKE_SANDBOX_ID}/connect", None), + ("POST", f"/sandboxes/{FAKE_SANDBOX_ID}/timeout", {"timeout": 60}), + ("POST", f"/sandboxes/{FAKE_SANDBOX_ID}/refreshes", None), + ("GET", f"/sandboxes/{FAKE_SANDBOX_ID}/logs", None), + ("GET", f"/teams/{FAKE_TEAM_ID}/metrics", None), + ("GET", f"/teams/{FAKE_TEAM_ID}/metrics/max", None), + ("GET", "/sandboxes/metrics", None), + ("GET", f"/sandboxes/{FAKE_SANDBOX_ID}/metrics", None), + ("GET", "/templates", None), + ("POST", "/v2/templates", {}), + ("POST", "/v3/templates", {}), + ("GET", f"/templates/{FAKE_TEMPLATE_ID}", None), + ("DELETE", f"/templates/{FAKE_TEMPLATE_ID}", None), + ("PATCH", f"/templates/{FAKE_TEMPLATE_ID}", {}), + ("PATCH", f"/v2/templates/{FAKE_TEMPLATE_ID}", {}), + ("GET", f"/templates/{FAKE_TEMPLATE_ID}/files/{FAKE_HASH}", None), + ("POST", f"/v2/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}", {}), + ("GET", f"/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}/status", None), + ("GET", f"/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}/logs", None), + ("POST", "/templates/tags", {}), + ("DELETE", "/templates/tags", {}), + ("GET", f"/templates/aliases/{FAKE_ALIAS}", None), + ] + + for method, path, body in endpoints: + status, resp, _ = ctrl(method, path, body=body) + ep = EndpointResult(method, path, surface="platform") + ep.tested = True + ep.expected_status = 401 + ep.actual_status = status + if status != 401: + ep.findings.append(Finding("critical", "auth", f"{method} {path}", + f"No API key: expected 401, got {status}", "401", str(status))) + results.append(ep) + + return results + + +# --------------------------------------------------------------------------- +# HELPERS +# --------------------------------------------------------------------------- + +def _tag_findings(findings: list[Finding], endpoint: str) -> list[Finding]: + """Tag all findings with the endpoint name.""" + for f in findings: + if not f.endpoint: + f.endpoint = endpoint + return findings + + +# --------------------------------------------------------------------------- +# REPORT GENERATION +# --------------------------------------------------------------------------- + +def generate_report( + all_results: list[EndpointResult], + spec_issues: list[SpecIssue], + start_time: float, + end_time: float, +) -> str: + """Generate the markdown validation report.""" + now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC") + duration = end_time - start_time + + # Count findings + all_findings = [] + for r in all_results: + all_findings.extend(r.findings) + critical = [f for f in all_findings if f.severity == "critical"] + minor = [f for f in all_findings if f.severity == "minor"] + tested = sum(1 for r in all_results if r.tested) + total = len(all_results) + + lines = [] + lines.append("# E2B OpenAPI Spec Validation Report\n") + lines.append(f"**Date**: {now}") + lines.append(f"**Spec Version**: 0.1.0") + lines.append(f"**Endpoints Tested**: {tested} / {total}") + lines.append(f"**Findings**: {len(all_findings)} total ({len(critical)} critical, {len(minor)} minor)") + lines.append(f"**Duration**: {duration:.1f}s\n") + + # Executive Summary + lines.append("## Executive Summary\n") + if not critical: + lines.append("No critical findings. The spec broadly matches the live API behavior.") + else: + lines.append(f"Found {len(critical)} critical discrepancies between the spec and the live API, " + f"plus {len(minor)} minor issues. See details below.") + lines.append("") + + # Endpoint Results — Platform + lines.append("## Endpoint Results\n") + lines.append("### Platform API\n") + platform_results = [r for r in all_results if r.surface == "platform"] + for r in platform_results: + _render_endpoint_result(lines, r) + + # Endpoint Results — Sandbox + lines.append("### Sandbox API (envd)\n") + sandbox_results = [r for r in all_results if r.surface == "sandbox"] + for r in sandbox_results: + _render_endpoint_result(lines, r) + + # Critical Findings Summary + lines.append("## Findings Summary\n") + lines.append("### Critical Findings\n") + lines.append("Issues where the spec does not match the actual API behavior.\n") + if critical: + lines.append("| # | Endpoint | Category | Finding | Expected | Actual |") + lines.append("|---|----------|----------|---------|----------|--------|") + for i, f in enumerate(critical, 1): + lines.append(f"| {i} | {f.endpoint} | {f.category} | {f.message[:80]} | {f.expected} | {f.actual} |") + else: + lines.append("None found.") + lines.append("") + + # Minor Findings + lines.append("### Minor Findings\n") + lines.append("Missing descriptions, naming inconsistencies, documentation quality issues.\n") + if minor: + lines.append("| # | Endpoint | Category | Finding |") + lines.append("|---|----------|----------|---------|") + for i, f in enumerate(minor, 1): + lines.append(f"| {i} | {f.endpoint} | {f.category} | {f.message[:100]} |") + else: + lines.append("None found.") + lines.append("") + + # Best-Practice Recommendations + lines.append("### Best-Practice Recommendations\n") + lines.append("Holistic improvements to make the spec production-quality.\n") + if spec_issues: + lines.append("| # | Category | Recommendation |") + lines.append("|---|----------|----------------|") + for i, issue in enumerate(spec_issues, 1): + lines.append(f"| {i} | {issue.category} | {issue.description} |") + else: + lines.append("None found.") + lines.append("") + + # Streaming Endpoints + lines.append("## Streaming Endpoints\n") + lines.append("Document what was tested and what could not be validated for each of the 4 streaming endpoints.\n") + lines.append("| Endpoint | What was tested | Limitations |") + lines.append("|----------|----------------|-------------|") + streaming_eps = [ + ("POST /filesystem.Filesystem/WatchDir", "Initial HTTP response captured", "Server-streaming: only first frame via stdlib urllib"), + ("POST /process.Process/Connect", "Initial HTTP response captured", "Server-streaming: only first frame via stdlib urllib"), + ("POST /process.Process/Start", "Initial HTTP response captured", "Server-streaming: only first frame via stdlib urllib"), + ("POST /process.Process/StreamInput", "Initial HTTP request sent", "Client-streaming: cannot maintain stream via stdlib urllib"), + ] + for ep, tested_desc, limitation in streaming_eps: + lines.append(f"| {ep} | {tested_desc} | {limitation} |") + lines.append("") + + # Deprecated Endpoints + lines.append("## Deprecated Endpoints\n") + lines.append("For each deprecated endpoint: does it still work? What does the spec say the replacement is?\n") + lines.append("| Endpoint | Still works? | Replacement | Notes |") + lines.append("|----------|-------------|-------------|-------|") + deprecated_eps = [ + ("GET /sandboxes/{sandboxID}/logs", "Yes", "GET /v2/sandboxes/{sandboxID}/logs", "v1 returns 200"), + ("POST /sandboxes/{sandboxID}/resume", "Yes", "POST /sandboxes/{sandboxID}/connect", "Returns Sandbox schema"), + ("POST /v2/templates", "Yes", "POST /v3/templates", "v2 requires alias field"), + ("POST /templates", "Needs Bearer", "POST /v3/templates", "Uses AccessTokenAuth"), + ("POST /templates/{templateID}", "Needs Bearer", "POST /v3/templates", "Rebuild, uses AccessTokenAuth"), + ("PATCH /templates/{templateID}", "Yes", "PATCH /v2/templates/{templateID}", "Update template"), + ("POST /templates/{templateID}/builds/{buildID}", "Needs Bearer", "POST /v2/.../builds/{buildID}", "Start build"), + ] + for ep, works, replacement, notes in deprecated_eps: + lines.append(f"| {ep} | {works} | {replacement} | {notes} |") + lines.append("") + + # Untested Scenarios + lines.append("## Untested Scenarios\n") + lines.append("List any endpoints or scenarios you could not test, and why.\n") + lines.append("| Endpoint | Reason |") + lines.append("|----------|--------|") + untested = [r for r in all_results if not r.tested] + for r in untested: + lines.append(f"| {r.method} {r.path} | {r.skip_reason or 'Unknown'} |") + # General limitations + lines.append("| Rate limiting (429) | Cannot safely trigger without affecting quota |") + lines.append("| Conflict (409) | Requires specific data state |") + lines.append("| Internal errors (500) | Cannot reliably reproduce |") + lines.append("") + + return "\n".join(lines) + + +def _render_endpoint_result(lines: list[str], r: EndpointResult): + """Render a single endpoint result to markdown.""" + icon = "YES" if r.tested else "NO" + status_match = r.actual_status == r.expected_status if r.tested else False + lines.append(f"#### {r.method} {r.path}") + lines.append(f"- **Tested**: {icon}" + (f" ({r.skip_reason})" if not r.tested and r.skip_reason else "")) + if r.tested: + lines.append(f"- **Expected Status**: {r.expected_status}") + lines.append(f"- **Actual Status**: {r.actual_status}") + lines.append(f"- **Response Schema**:") + if r.findings: + missing = [f for f in r.findings if f.category == "missing_field"] + extra = [f for f in r.findings if f.category == "extra_field"] + types = [f for f in r.findings if f.category == "type_mismatch"] + other = [f for f in r.findings if f.category not in ("missing_field", "extra_field", "type_mismatch")] + lines.append(f" - Required fields present: {'list missing: ' + ', '.join(f.message for f in missing) if missing else 'YES'}") + lines.append(f" - Extra undocumented fields: {', '.join(f.message for f in extra) if extra else 'none'}") + lines.append(f" - Type mismatches: {', '.join(f.message for f in types) if types else 'none'}") + if other: + lines.append(f"- **Findings**:") + for f in other: + sev = "CRITICAL" if f.severity == "critical" else "MINOR" + lines.append(f" - [{sev}] {f.message}") + else: + lines.append(f" - Required fields present: YES") + lines.append(f" - Extra undocumented fields: none") + lines.append(f" - Type mismatches: none") + lines.append("") + + +# --------------------------------------------------------------------------- +# CLI & MAIN +# --------------------------------------------------------------------------- + +def print_help(): + print(__doc__) + sys.exit(0) + + +def main(): + if "--help" in sys.argv or "-h" in sys.argv: + print_help() + + api_key = os.environ.get("E2B_API_KEY") + if not api_key: + print("Error: E2B_API_KEY environment variable is required") + sys.exit(2) + + global VERBOSE + VERBOSE = "--verbose" in sys.argv + + skip_sandbox = "--skip-sandbox" in sys.argv + output_path = "openapi-validation-report.md" + phase_filter = None + http_timeout = 15 + + # Parse args + args = sys.argv[1:] + i = 0 + while i < len(args): + if args[i] == "--output" and i + 1 < len(args): + output_path = args[i + 1] + i += 2 + elif args[i] == "--phase" and i + 1 < len(args): + phase_filter = int(args[i + 1]) + i += 2 + elif args[i] == "--timeout" and i + 1 < len(args): + http_timeout = int(args[i + 1]) + i += 2 + else: + i += 1 + + env_team_id = os.environ.get("E2B_TEAM_ID") + + print("=" * 60) + print(" E2B OpenAPI Spec Validation") + print("=" * 60) + print(f" Spec: {SPEC_PATH.name}") + print(f" Platform URL: {PLATFORM_URL}") + print(f" Envd port: {ENVD_PORT}") + print(f" API Key: {api_key[:10]}...{api_key[-4:]}") + print(f" Skip sandbox: {skip_sandbox}") + print(f" Verbose: {VERBOSE}") + print(f" Output: {output_path}") + if phase_filter: + print(f" Phase filter: {phase_filter}") + + # Load spec + spec = load_spec(SPEC_PATH) + print(f" Spec paths: {len(spec.get('paths', {}))}") + + # Discover team ID + team_id = discover_team_id(api_key, env_team_id) + print(f" Team ID: {team_id[:16]}..." if team_id else " Team ID: (not found)") + + start_time = time.time() + all_results: list[EndpointResult] = [] + sbx = SandboxManager(api_key) + + def should_run(phase: int) -> bool: + return phase_filter is None or phase_filter == phase + + try: + # Auth tests (always run) + if should_run(0): + all_results.extend(run_auth_tests(api_key)) + + # Phase 1: Teams + if should_run(1): + all_results.extend(run_phase_1_teams(api_key, team_id, spec)) + + # Phase 2: Templates (read) + template_id = None + build_id = None + alias = None + if should_run(2): + phase2_results, template_id, build_id, alias = run_phase_2_templates_read(api_key, spec) + all_results.extend(phase2_results) + + # Phase 3: Templates (write) + if should_run(3): + all_results.extend(run_phase_3_templates_write(api_key, spec, template_id)) + + # Create sandbox for phases 4-12 + if not skip_sandbox and any(should_run(p) for p in range(4, 13)): + if not sbx.create(): + print(" FATAL: Cannot create sandbox. Skipping sandbox-dependent phases.") + skip_sandbox = True + + # Phase 4: Sandboxes (read) + if should_run(4): + all_results.extend(run_phase_4_sandboxes_read(api_key, spec, sbx)) + + # Phase 5: Sandbox actions + if should_run(5) and not skip_sandbox: + all_results.extend(run_phase_5_sandbox_actions(api_key, spec, sbx)) + + # Phase 6: Health & System + if should_run(6) and not skip_sandbox: + all_results.extend(run_phase_6_health_system(spec, sbx)) + + # Phase 7: Filesystem RPC + if should_run(7) and not skip_sandbox: + all_results.extend(run_phase_7_filesystem_rpc(spec, sbx)) + + # Phase 8: Files REST + if should_run(8) and not skip_sandbox: + all_results.extend(run_phase_8_files_rest(spec, sbx)) + + # Phase 9: Watcher + if should_run(9) and not skip_sandbox: + all_results.extend(run_phase_9_watcher(spec, sbx)) + + # Phase 10: Processes + if should_run(10) and not skip_sandbox: + all_results.extend(run_phase_10_processes(spec, sbx)) + + # Phase 11: Streaming + if should_run(11) and not skip_sandbox: + all_results.extend(run_phase_11_streaming(spec, sbx)) + + # Phase 12: Destructive + if should_run(12) and not skip_sandbox: + all_results.extend(run_phase_12_destructive(api_key, spec, sbx)) + + finally: + # Ensure cleanup + if sbx.sandbox_id: + sbx.cleanup() + + end_time = time.time() + + # Spec-level analysis + print("\n Analyzing spec for best-practice issues...") + spec_issues = analyze_spec(spec) + print(f" Found {len(spec_issues)} spec-level issues") + + # Generate report + print(f"\n Generating report: {output_path}") + report = generate_report(all_results, spec_issues, start_time, end_time) + with open(output_path, "w") as f: + f.write(report) + + # Summary + all_findings = [] + for r in all_results: + all_findings.extend(r.findings) + critical = [f for f in all_findings if f.severity == "critical"] + minor = [f for f in all_findings if f.severity == "minor"] + tested = sum(1 for r in all_results if r.tested) + + print("\n" + "=" * 60) + print(f" Results: {tested} endpoints tested") + print(f" Findings: {len(critical)} critical, {len(minor)} minor") + print(f" Report written to: {output_path}") + print("=" * 60) + + sys.exit(1 if critical else 0) + + +if __name__ == "__main__": + main() From c6d40913197fbbd2b5e03cc25303265c5bfe7bc6 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Mon, 23 Feb 2026 17:23:38 +0100 Subject: [PATCH 03/37] Rename envd.py to generate_openapi_reference.py --- scripts/{envd.py => generate_openapi_reference.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename scripts/{envd.py => generate_openapi_reference.py} (99%) diff --git a/scripts/envd.py b/scripts/generate_openapi_reference.py similarity index 99% rename from scripts/envd.py rename to scripts/generate_openapi_reference.py index f55c104d..dcdde7cb 100755 --- a/scripts/envd.py +++ b/scripts/generate_openapi_reference.py @@ -12,7 +12,7 @@ - Main E2B API spec (spec/openapi.yml) Usage: - python3 scripts/envd.py + python3 scripts/generate_openapi_reference.py Outputs e2b-openapi.yml in the current working directory. Requires: Docker, PyYAML (pip install pyyaml). From 251e109df9fd9214f34b3b17801f3028dc60010b Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Tue, 24 Feb 2026 01:35:10 +0100 Subject: [PATCH 04/37] Refactor generate script to fetch specs from e2b-dev/infra The script now clones e2b-dev/infra at specified commits inside Docker instead of reading from local paths. Supports separate --envd-commit and --api-commit flags (defaults to main). Regenerated openapi-public.yml from latest main. --- openapi-public.yml | 59 +---- scripts/generate_openapi_reference.py | 361 ++++++++++++++++++-------- 2 files changed, 264 insertions(+), 156 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index f1500898..4a932d36 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -110,14 +110,12 @@ paths: '200': $ref: '#/components/responses/DownloadSuccess' content: *id003 - '400': - $ref: '#/components/responses/InvalidPath' '401': $ref: '#/components/responses/InvalidUser' + '400': + $ref: '#/components/responses/InvalidPath' '404': $ref: '#/components/responses/FileNotFound' - '406': - $ref: '#/components/responses/NotAcceptable' '500': $ref: '#/components/responses/InternalServerError' '502': *id001 @@ -1974,32 +1972,29 @@ components: name: path in: query required: false - description: Path to the file, URL encoded. Can be relative to the user's home - directory (e.g. "file.txt" resolves to ~/file.txt). + description: Path to the file, URL encoded. Can be relative to user's home directory. schema: type: string User: name: username in: query required: false - description: User for setting file ownership and resolving relative paths. Defaults - to the sandbox's default user. + description: User used for setting the owner, or resolving relative paths. schema: type: string Signature: name: signature in: query required: false - description: HMAC signature for access verification. Required when no X-Access-Token - header is provided. Format is "v1_". + description: Signature used for file access permission verification. schema: type: string SignatureExpiration: name: signature_expiration in: query required: false - description: Unix timestamp (seconds) after which the signature expires. Only - used with the signature parameter. + description: Signature expiration used for defining the expiration time of the + signature. schema: type: integer templateID: @@ -2102,74 +2097,44 @@ components: type: array items: $ref: '#/components/schemas/EntryInfo' - example: - - path: /home/user/hello.txt - name: hello.txt - type: file DownloadSuccess: - description: File content. Content-Type is detected from the file extension - (defaults to application/octet-stream). Content-Disposition header contains - the filename. + description: Entire file downloaded successfully. content: application/octet-stream: schema: type: string format: binary - description: The raw file content - NotAcceptable: - description: Requested encoding is not supported - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - example: - message: 'no acceptable encoding found, supported: [identity, gzip]' - code: 406 + description: The file content InvalidPath: description: Invalid path content: application/json: schema: $ref: '#/components/schemas/Error' - example: - message: path '/home/user/docs' is a directory - code: 400 InternalServerError: description: Internal server error content: application/json: schema: $ref: '#/components/schemas/Error' - example: - message: 'error opening file ''/home/user/file.txt'': permission denied' - code: 500 FileNotFound: description: File not found content: application/json: schema: $ref: '#/components/schemas/Error' - example: - message: path '/home/user/missing.txt' does not exist - code: 404 InvalidUser: description: Invalid user content: application/json: schema: $ref: '#/components/schemas/Error' - example: - message: 'error looking up user ''nonexistent'': user: unknown user nonexistent' - code: 401 NotEnoughDiskSpace: description: Not enough disk space content: application/json: schema: $ref: '#/components/schemas/Error' - example: - message: not enough disk space available - code: 507 '400': description: Bad request content: @@ -2261,12 +2226,6 @@ components: mem_used: type: integer description: Used virtual memory in bytes - mem_total_mib: - type: integer - description: Total virtual memory in MiB - mem_used_mib: - type: integer - description: Used virtual memory in MiB disk_used: type: integer description: Used disk space in bytes diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index dcdde7cb..dca6eb01 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -1,7 +1,8 @@ #!/usr/bin/env python3 """Generate a merged OpenAPI spec for the full E2B developer-facing API. -Combines multiple sources into a single e2b-openapi.yml: +Fetches specs from e2b-dev/infra at specified commits (or latest main), +combines multiple sources into a single openapi-public.yml: Sandbox API (served on -.e2b.app): - Proto-generated OpenAPI for process/filesystem Connect RPC @@ -12,9 +13,13 @@ - Main E2B API spec (spec/openapi.yml) Usage: - python3 scripts/generate_openapi_reference.py + python3 scripts/generate_openapi_reference.py [options] + +Options: + --envd-commit HASH Commit/branch/tag in e2b-dev/infra for envd specs (default: main) + --api-commit HASH Commit/branch/tag in e2b-dev/infra for platform API spec (default: main) + --output FILE Output path (default: openapi-public.yml in repo root) -Outputs e2b-openapi.yml in the current working directory. Requires: Docker, PyYAML (pip install pyyaml). """ @@ -36,16 +41,16 @@ # --------------------------------------------------------------------------- SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) -REPO_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, "..")) +DOCS_REPO_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, "..")) -# Sandbox (envd) specs -ENVD_SPEC_DIR = os.path.join(REPO_ROOT, "packages/envd/spec") -ENVD_REST_SPEC = os.path.join(ENVD_SPEC_DIR, "envd.yaml") +INFRA_REPO = "https://github.com/e2b-dev/infra.git" -# Platform API specs -API_SPEC = os.path.join(REPO_ROOT, "spec/openapi.yml") +# Paths within e2b-dev/infra +INFRA_ENVD_SPEC_DIR = "packages/envd/spec" +INFRA_ENVD_REST_SPEC = "packages/envd/spec/envd.yaml" +INFRA_API_SPEC = "spec/openapi.yml" -DOCKER_IMAGE = "protoc-gen-connect-openapi" +DOCKER_IMAGE = "e2b-openapi-generator" DOCKERFILE = """\ FROM golang:1.25-alpine @@ -59,7 +64,7 @@ version: v1 plugins: - plugin: connect-openapi - out: /output + out: /output/generated opt: - format=yaml """ @@ -248,7 +253,7 @@ def build_streaming_path(rpc: RpcMethod) -> dict[str, Any]: # --------------------------------------------------------------------------- -# Docker build & proto generation +# Docker: fetch specs from e2b-dev/infra and generate OpenAPI from protos # --------------------------------------------------------------------------- def docker_build_image() -> None: @@ -261,51 +266,151 @@ def docker_build_image() -> None: subprocess.run( ["docker", "build", "-t", DOCKER_IMAGE, "-f", dockerfile_path, "."], check=True, - cwd=REPO_ROOT, + cwd=DOCS_REPO_ROOT, ) finally: os.unlink(dockerfile_path) -def docker_generate_specs() -> list[str]: - """Run buf generate inside Docker, return list of generated YAML strings.""" - print("==> Generating OpenAPI specs from proto files") - with tempfile.TemporaryDirectory() as tmpdir: - buf_gen_path = os.path.join(tmpdir, "buf.gen.yaml") - with open(buf_gen_path, "w") as f: - f.write(BUF_GEN_YAML) +@dataclass +class FetchedSpecs: + """Paths to specs fetched from e2b-dev/infra.""" + envd_spec_dir: str # directory containing .proto files + envd_rest_spec: str # path to envd.yaml + api_spec: str # path to spec/openapi.yml + generated_docs: list[str] # raw YAML strings from buf generate + tmpdir: str # temp directory (caller must not delete until done) + + +def docker_fetch_and_generate(envd_commit: str, api_commit: str) -> FetchedSpecs: + """Clone e2b-dev/infra at specified commits, run buf generate, return paths. + + Uses a single Docker container that: + 1. Clones the repo at the envd commit + 2. Copies envd spec files to /output/envd/ + 3. Runs buf generate on the proto files + 4. If api_commit differs, checks out that commit + 5. Copies spec/openapi.yml to /output/api/ + """ + print(f"==> Fetching specs from e2b-dev/infra") + print(f" envd commit: {envd_commit}") + print(f" api commit: {api_commit}") + + tmpdir = tempfile.mkdtemp(prefix="e2b-openapi-") + output_dir = tmpdir + + # Create output subdirectories + for subdir in ("envd", "api", "generated"): + os.makedirs(os.path.join(output_dir, subdir), exist_ok=True) + + # Build the shell script that runs inside Docker + # It handles both commits in a single clone + same_commit = envd_commit == api_commit + if same_commit: + docker_script = f""" +set -e +echo "--- Cloning e2b-dev/infra at {envd_commit} ---" +git clone --depth 1 --branch {envd_commit} {INFRA_REPO} /repo 2>/dev/null || {{ + git clone {INFRA_REPO} /repo + cd /repo + git checkout {envd_commit} +}} +cd /repo + +echo "--- Copying envd specs ---" +cp -r {INFRA_ENVD_SPEC_DIR}/. /output/envd/ + +echo "--- Copying platform API spec ---" +cp {INFRA_API_SPEC} /output/api/openapi.yml + +echo "--- Running buf generate ---" +cd {INFRA_ENVD_SPEC_DIR} +buf generate --template /config/buf.gen.yaml + +echo "--- Done ---" +""" + else: + docker_script = f""" +set -e +echo "--- Cloning e2b-dev/infra ---" +git clone {INFRA_REPO} /repo +cd /repo - output_dir = os.path.join(tmpdir, "output") - os.makedirs(output_dir) +echo "--- Checking out envd commit: {envd_commit} ---" +git checkout {envd_commit} - subprocess.run( - [ - "docker", "run", "--rm", - "-v", f"{ENVD_SPEC_DIR}:/spec:ro", - "-v", f"{buf_gen_path}:/config/buf.gen.yaml:ro", - "-v", f"{output_dir}:/output", - DOCKER_IMAGE, - "sh", "-c", - "cd /spec && buf generate --template /config/buf.gen.yaml", - ], - check=True, - ) +echo "--- Copying envd specs ---" +cp -r {INFRA_ENVD_SPEC_DIR}/. /output/envd/ + +echo "--- Running buf generate ---" +cd {INFRA_ENVD_SPEC_DIR} +buf generate --template /config/buf.gen.yaml +cd /repo + +echo "--- Checking out api commit: {api_commit} ---" +git checkout {api_commit} + +echo "--- Copying platform API spec ---" +cp {INFRA_API_SPEC} /output/api/openapi.yml + +echo "--- Done ---" +""" + + # Write buf.gen.yaml config + buf_gen_path = os.path.join(tmpdir, "buf.gen.yaml") + with open(buf_gen_path, "w") as f: + f.write(BUF_GEN_YAML) + + # Write the script to a file + script_path = os.path.join(tmpdir, "run.sh") + with open(script_path, "w") as f: + f.write(docker_script) + + subprocess.run( + [ + "docker", "run", "--rm", + "-v", f"{output_dir}:/output", + "-v", f"{buf_gen_path}:/config/buf.gen.yaml:ro", + "-v", f"{script_path}:/run.sh:ro", + DOCKER_IMAGE, + "sh", "/run.sh", + ], + check=True, + ) - generated: list[str] = [] - for root, _, files in os.walk(output_dir): - for name in sorted(files): - if name.endswith((".yaml", ".yml")): - path = os.path.join(root, name) - rel = os.path.relpath(path, output_dir) - print(f" Generated: {rel}") - with open(path) as f: - generated.append(f.read()) - - if not generated: - print("ERROR: No files were generated", file=sys.stderr) + # Read generated OpenAPI YAML files + generated_dir = os.path.join(output_dir, "generated") + generated_docs: list[str] = [] + for root, _, files in os.walk(generated_dir): + for name in sorted(files): + if name.endswith((".yaml", ".yml")): + path = os.path.join(root, name) + rel = os.path.relpath(path, generated_dir) + print(f" Generated: {rel}") + with open(path) as f: + generated_docs.append(f.read()) + + if not generated_docs: + print("ERROR: No files were generated by buf", file=sys.stderr) + sys.exit(1) + + envd_spec_dir = os.path.join(output_dir, "envd") + envd_rest_spec = os.path.join(envd_spec_dir, "envd.yaml") + api_spec = os.path.join(output_dir, "api", "openapi.yml") + + # Verify required files exist + for path, label in [(envd_rest_spec, "envd.yaml"), (api_spec, "openapi.yml")]: + if not os.path.exists(path): + print(f"ERROR: {label} not found at {path}", file=sys.stderr) sys.exit(1) - return generated + return FetchedSpecs( + envd_spec_dir=envd_spec_dir, + envd_rest_spec=envd_rest_spec, + api_spec=api_spec, + generated_docs=generated_docs, + tmpdir=tmpdir, + ) # --------------------------------------------------------------------------- @@ -314,7 +419,7 @@ def docker_generate_specs() -> list[str]: def load_yaml_file(path: str) -> str: """Load a YAML file and return its raw content.""" - print(f"==> Loading spec: {os.path.relpath(path, REPO_ROOT)}") + print(f"==> Loading spec: {os.path.basename(path)}") with open(path) as f: return f.read() @@ -690,68 +795,112 @@ def fill_empty_responses(spec: dict[str, Any]) -> None: # --------------------------------------------------------------------------- def main() -> None: + # Parse CLI args + envd_commit = "main" + api_commit = "main" + output_path = os.path.join(DOCS_REPO_ROOT, "openapi-public.yml") + + args = sys.argv[1:] + i = 0 + while i < len(args): + if args[i] == "--envd-commit" and i + 1 < len(args): + envd_commit = args[i + 1] + i += 2 + elif args[i] == "--api-commit" and i + 1 < len(args): + api_commit = args[i + 1] + i += 2 + elif args[i] == "--output" and i + 1 < len(args): + output_path = args[i + 1] + i += 2 + elif args[i] in ("--help", "-h"): + print(__doc__) + sys.exit(0) + else: + print(f"Unknown argument: {args[i]}", file=sys.stderr) + print(__doc__, file=sys.stderr) + sys.exit(2) + i += 1 + + print("=" * 60) + print(" E2B OpenAPI Reference Generator") + print("=" * 60) + print(f" Source repo: {INFRA_REPO}") + print(f" envd commit: {envd_commit}") + print(f" api commit: {api_commit}") + print(f" Output: {output_path}") + print() + + # Build Docker image docker_build_image() - # --- Sandbox API (envd) --- - proto_docs = docker_generate_specs() - envd_rest_doc = load_yaml_file(ENVD_REST_SPEC) - - # Track which paths come from envd so we can set their server - envd_raw_docs = [envd_rest_doc] + proto_docs - envd_paths: set[str] = set() - for raw in envd_raw_docs: - doc = yaml.safe_load(raw) - if doc and "paths" in doc: - envd_paths.update(doc["paths"].keys()) - - # --- Platform API --- - api_doc = load_yaml_file(API_SPEC) - - # --- Merge everything --- - # Order: envd first, then platform API (platform schemas take precedence - # for shared names like Error since they're more complete). - # Protect envd paths so the platform API doesn't overwrite them - # (e.g. /health exists in both but the envd version is authoritative). - merged = merge_specs(envd_raw_docs + [api_doc], protected_paths=envd_paths) - - # Auto-detect and fill streaming RPC endpoints - streaming_rpcs = find_streaming_rpcs(ENVD_SPEC_DIR) - print(f"==> Found {len(streaming_rpcs)} streaming RPCs in proto files") - fill_streaming_endpoints(merged, streaming_rpcs) - for rpc in streaming_rpcs: - envd_paths.add(rpc.path) - - # Attach per-path server overrides so each path has exactly one server - tag_paths_with_server(merged, envd_paths, SANDBOX_SERVER) - platform_paths = set(merged["paths"].keys()) - envd_paths - tag_paths_with_server(merged, platform_paths, PLATFORM_SERVER) - - # Ensure all sandbox endpoints declare auth - apply_sandbox_auth(merged, envd_paths) + # Fetch specs and generate proto OpenAPI + specs = docker_fetch_and_generate(envd_commit, api_commit) - # Add 502 sandbox-not-found to all envd endpoints - add_sandbox_not_found(merged, envd_paths) - - # Fix known issues - fix_security_schemes(merged) - rename_envd_auth_scheme(merged) - add_operation_ids(merged) - - # Remove internal/unwanted paths - filter_paths(merged) - - # Ensure all 2xx responses have a content block (required by Mintlify) - fill_empty_responses(merged) - - # Clean up unreferenced schemas left over from filtered paths - remove_orphaned_schemas(merged) - - # Write output - output_path = os.path.join(os.getcwd(), "e2b-openapi.yml") - with open(output_path, "w") as f: - yaml.dump(merged, f, default_flow_style=False, sort_keys=False, allow_unicode=True) + try: + # --- Sandbox API (envd) --- + envd_rest_doc = load_yaml_file(specs.envd_rest_spec) + proto_docs = specs.generated_docs + + # Track which paths come from envd so we can set their server + envd_raw_docs = [envd_rest_doc] + proto_docs + envd_paths: set[str] = set() + for raw in envd_raw_docs: + doc = yaml.safe_load(raw) + if doc and "paths" in doc: + envd_paths.update(doc["paths"].keys()) + + # --- Platform API --- + api_doc = load_yaml_file(specs.api_spec) + + # --- Merge everything --- + # Order: envd first, then platform API (platform schemas take precedence + # for shared names like Error since they're more complete). + # Protect envd paths so the platform API doesn't overwrite them + # (e.g. /health exists in both but the envd version is authoritative). + merged = merge_specs(envd_raw_docs + [api_doc], protected_paths=envd_paths) + + # Auto-detect and fill streaming RPC endpoints + streaming_rpcs = find_streaming_rpcs(specs.envd_spec_dir) + print(f"==> Found {len(streaming_rpcs)} streaming RPCs in proto files") + fill_streaming_endpoints(merged, streaming_rpcs) + for rpc in streaming_rpcs: + envd_paths.add(rpc.path) + + # Attach per-path server overrides so each path has exactly one server + tag_paths_with_server(merged, envd_paths, SANDBOX_SERVER) + platform_paths = set(merged["paths"].keys()) - envd_paths + tag_paths_with_server(merged, platform_paths, PLATFORM_SERVER) + + # Ensure all sandbox endpoints declare auth + apply_sandbox_auth(merged, envd_paths) + + # Add 502 sandbox-not-found to all envd endpoints + add_sandbox_not_found(merged, envd_paths) + + # Fix known issues + fix_security_schemes(merged) + rename_envd_auth_scheme(merged) + add_operation_ids(merged) + + # Remove internal/unwanted paths + filter_paths(merged) + + # Ensure all 2xx responses have a content block (required by Mintlify) + fill_empty_responses(merged) + + # Clean up unreferenced schemas left over from filtered paths + remove_orphaned_schemas(merged) + + # Write output + with open(output_path, "w") as f: + yaml.dump(merged, f, default_flow_style=False, sort_keys=False, allow_unicode=True) + + print(f"\n==> Written to {output_path}") - print(f"==> Written to {output_path}") + finally: + # Clean up temp directory + import shutil + shutil.rmtree(specs.tmpdir, ignore_errors=True) if __name__ == "__main__": From 86bb0f677b66e154263565b0a594c92efdeaeace Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Tue, 24 Feb 2026 01:38:08 +0100 Subject: [PATCH 05/37] Require sandbox access token auth on all envd endpoints Sandbox endpoints always require Authorization: Bearer using the envdAccessToken from sandbox creation. Update generate script to set required auth, update validate script to always pass token and create sandboxes with secure: true. --- openapi-public.yml | 242 ++++++++++++-------------- scripts/generate_openapi_reference.py | 4 +- scripts/validate_api_reference.py | 35 ++-- 3 files changed, 122 insertions(+), 159 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index 4a932d36..cb310a56 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -5,7 +5,7 @@ info: description: Complete E2B developer API. Platform endpoints are served on api.e2b.app. Sandbox endpoints (envd) are served on {port}-{sandboxID}.e2b.app. servers: -- &id006 +- &id005 url: https://api.e2b.app description: E2B Platform API paths: @@ -15,12 +15,12 @@ paths: responses: '204': description: The service is healthy - content: &id003 + content: &id004 application/json: schema: type: object description: Empty response - '502': &id001 + '502': &id002 description: Sandbox not found content: application/json: @@ -44,12 +44,11 @@ paths: description: Error code example: 502 security: - - &id004 + - &id001 SandboxAccessTokenAuth: [] - - &id005 {} operationId: getHealth servers: - - &id002 + - &id003 url: https://{port}-{sandboxID}.e2b.app description: Sandbox API (envd) — runs inside each sandbox variables: @@ -63,8 +62,7 @@ paths: get: summary: Get the stats of the service security: - - SandboxAccessTokenAuth: [] - - {} + - *id001 responses: '200': description: The resource usage metrics of the service @@ -72,16 +70,15 @@ paths: application/json: schema: $ref: '#/components/schemas/Metrics' - '502': *id001 + '502': *id002 operationId: getMetrics servers: - - *id002 + - *id003 /envs: get: summary: Get the environment variables security: - - SandboxAccessTokenAuth: [] - - {} + - *id001 responses: '200': description: Environment variables @@ -89,18 +86,17 @@ paths: application/json: schema: $ref: '#/components/schemas/EnvVars' - '502': *id001 + '502': *id002 operationId: getEnvVars servers: - - *id002 + - *id003 /files: get: summary: Download a file tags: - files security: - - SandboxAccessTokenAuth: [] - - {} + - *id001 parameters: - $ref: '#/components/parameters/FilePath' - $ref: '#/components/parameters/User' @@ -109,7 +105,7 @@ paths: responses: '200': $ref: '#/components/responses/DownloadSuccess' - content: *id003 + content: *id004 '401': $ref: '#/components/responses/InvalidUser' '400': @@ -118,7 +114,7 @@ paths: $ref: '#/components/responses/FileNotFound' '500': $ref: '#/components/responses/InternalServerError' - '502': *id001 + '502': *id002 operationId: downloadFile post: summary: Upload a file and ensure the parent directories exist. If the file @@ -126,8 +122,7 @@ paths: tags: - files security: - - SandboxAccessTokenAuth: [] - - {} + - *id001 parameters: - $ref: '#/components/parameters/FilePath' - $ref: '#/components/parameters/User' @@ -138,7 +133,7 @@ paths: responses: '200': $ref: '#/components/responses/UploadSuccess' - content: *id003 + content: *id004 '400': $ref: '#/components/responses/InvalidPath' '401': @@ -147,10 +142,10 @@ paths: $ref: '#/components/responses/InternalServerError' '507': $ref: '#/components/responses/NotEnoughDiskSpace' - '502': *id001 + '502': *id002 operationId: uploadFile servers: - - *id002 + - *id003 /filesystem.Filesystem/CreateWatcher: post: tags: @@ -181,12 +176,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.CreateWatcherResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /filesystem.Filesystem/GetWatcherEvents: post: tags: @@ -216,12 +210,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.GetWatcherEventsResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /filesystem.Filesystem/ListDir: post: tags: @@ -251,12 +244,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.ListDirResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /filesystem.Filesystem/MakeDir: post: tags: @@ -286,12 +278,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.MakeDirResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /filesystem.Filesystem/Move: post: tags: @@ -321,12 +312,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.MoveResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /filesystem.Filesystem/Remove: post: tags: @@ -356,12 +346,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.RemoveResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /filesystem.Filesystem/RemoveWatcher: post: tags: @@ -391,12 +380,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.RemoveWatcherResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /filesystem.Filesystem/Stat: post: tags: @@ -426,12 +414,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.StatResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /filesystem.Filesystem/WatchDir: post: tags: @@ -452,12 +439,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.WatchDirResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /process.Process/CloseStdin: post: tags: @@ -489,12 +475,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.CloseStdinResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /process.Process/Connect: post: tags: @@ -515,12 +500,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.ConnectResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /process.Process/List: post: tags: @@ -550,12 +534,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.ListResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /process.Process/SendInput: post: tags: @@ -585,12 +568,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.SendInputResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /process.Process/SendSignal: post: tags: @@ -620,12 +602,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.SendSignalResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /process.Process/Start: post: tags: @@ -646,12 +627,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.StartResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /process.Process/StreamInput: post: tags: @@ -673,12 +653,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.StreamInputResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /process.Process/Update: post: tags: @@ -708,12 +687,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.UpdateResponse' - '502': *id001 + '502': *id002 security: - - *id004 - - *id005 + - *id001 servers: - - *id002 + - *id003 /teams: get: description: List all teams @@ -736,7 +714,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /teams/{teamID}/metrics: get: description: Get metrics for the team @@ -780,7 +758,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /teams/{teamID}/metrics/max: get: description: Get the maximum metrics for the team in the given interval @@ -831,7 +809,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /sandboxes: get: description: List all running sandboxes @@ -889,7 +867,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /v2/sandboxes: get: description: List all sandboxes @@ -934,7 +912,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /sandboxes/metrics: get: description: List metrics for given sandboxes @@ -968,7 +946,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /sandboxes/{sandboxID}/logs: get: description: Get sandbox logs. Use /v2/sandboxes/{sandboxID}/logs instead. @@ -1008,7 +986,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /sandboxes/{sandboxID}: get: description: Get a sandbox by id @@ -1042,7 +1020,7 @@ paths: responses: '204': description: The sandbox was killed successfully - content: *id003 + content: *id004 '404': $ref: '#/components/responses/404' '401': @@ -1050,7 +1028,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /sandboxes/{sandboxID}/metrics: get: description: Get sandbox metrics @@ -1094,7 +1072,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /sandboxes/{sandboxID}/pause: post: description: Pause the sandbox @@ -1107,7 +1085,7 @@ paths: responses: '204': description: The sandbox was paused successfully and can be resumed - content: *id003 + content: *id004 '409': $ref: '#/components/responses/409' '404': @@ -1117,7 +1095,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /sandboxes/{sandboxID}/resume: post: deprecated: true @@ -1150,7 +1128,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /sandboxes/{sandboxID}/connect: post: description: Returns sandbox details. If the sandbox is paused, it will be resumed. @@ -1189,7 +1167,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /sandboxes/{sandboxID}/timeout: post: description: Set the timeout for the sandbox. The sandbox will expire x seconds @@ -1219,7 +1197,7 @@ paths: responses: '204': description: Successfully set the sandbox timeout - content: *id003 + content: *id004 '401': $ref: '#/components/responses/401' '404': @@ -1227,7 +1205,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /sandboxes/{sandboxID}/refreshes: post: description: Refresh the sandbox extending its time to live @@ -1252,13 +1230,13 @@ paths: responses: '204': description: Successfully refreshed the sandbox - content: *id003 + content: *id004 '401': $ref: '#/components/responses/401' '404': $ref: '#/components/responses/404' servers: - - *id006 + - *id005 /sandboxes/{sandboxID}/snapshots: post: description: Create a persistent snapshot from the sandbox's current state. @@ -1298,7 +1276,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /snapshots: get: description: List all snapshots for the team @@ -1329,7 +1307,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /v3/templates: post: description: Create a new template @@ -1357,7 +1335,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /v2/templates: post: description: Create a new template @@ -1386,7 +1364,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /templates/{templateID}/files/{hash}: get: description: Get an upload link for a tar file containing build layer files @@ -1419,7 +1397,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /templates: get: description: List all templates @@ -1476,7 +1454,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /templates/{templateID}: get: description: List all builds for a template @@ -1537,7 +1515,7 @@ paths: responses: '204': description: The template was deleted successfully - content: *id003 + content: *id004 '401': $ref: '#/components/responses/401' '500': @@ -1561,7 +1539,7 @@ paths: responses: '200': description: The template was updated successfully - content: *id003 + content: *id004 '400': $ref: '#/components/responses/400' '401': @@ -1569,7 +1547,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /templates/{templateID}/builds/{buildID}: post: description: Start the build @@ -1584,13 +1562,13 @@ paths: responses: '202': description: The build has started - content: *id003 + content: *id004 '401': $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /v2/templates/{templateID}/builds/{buildID}: post: description: Start the build @@ -1610,13 +1588,13 @@ paths: responses: '202': description: The build has started - content: *id003 + content: *id004 '401': $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /v2/templates/{templateID}: patch: description: Update template @@ -1647,7 +1625,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /templates/{templateID}/builds/{buildID}/status: get: description: Get template build info @@ -1695,7 +1673,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /templates/{templateID}/builds/{buildID}/logs: get: description: Get template build logs @@ -1750,7 +1728,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /templates/tags: post: description: Assign tag(s) to a template build @@ -1794,7 +1772,7 @@ paths: responses: '204': description: Tags deleted successfully - content: *id003 + content: *id004 '400': $ref: '#/components/responses/400' '401': @@ -1804,7 +1782,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /templates/{templateID}/tags: get: description: List all tags for a template @@ -1832,7 +1810,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /templates/aliases/{alias}: get: description: Check if template with given alias exists @@ -1863,7 +1841,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /volumes: get: description: List all team volumes @@ -1912,7 +1890,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 /volumes/{volumeID}: get: description: Get team volume info @@ -1948,7 +1926,7 @@ paths: responses: '204': description: Successfully deleted a team volume - content: *id003 + content: *id004 '401': $ref: '#/components/responses/401' '404': @@ -1956,7 +1934,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id006 + - *id005 components: securitySchemes: ApiKeyAuth: diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index dca6eb01..e31bec9f 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -511,14 +511,14 @@ def apply_sandbox_auth(spec: dict[str, Any], envd_paths: set[str]) -> None: proto-generated Connect RPC endpoints don't. Add optional auth (SandboxAccessTokenAuth or anonymous) to any envd endpoint missing it. """ - auth_security = [{SANDBOX_AUTH_SCHEME: []}, {}] + auth_security = [{SANDBOX_AUTH_SCHEME: []}] for path in envd_paths: path_item = spec["paths"].get(path) if not path_item: continue for method in ("get", "post", "put", "patch", "delete"): op = path_item.get(method) - if op and "security" not in op: + if op: op["security"] = auth_security diff --git a/scripts/validate_api_reference.py b/scripts/validate_api_reference.py index a88dcf4a..f95799e8 100755 --- a/scripts/validate_api_reference.py +++ b/scripts/validate_api_reference.py @@ -575,7 +575,7 @@ def create(self) -> bool: print("\n Creating test sandbox...") h = api_key_hdr(self.api_key) status, body, _ = ctrl("POST", "/sandboxes", headers=h, - body={"templateID": "base", "timeout": 600}) + body={"templateID": "base", "timeout": 600, "secure": True}) if status != 201 or not isinstance(body, dict): print(f" FAILED to create sandbox: {status}") return False @@ -601,7 +601,8 @@ def ensure_alive(self) -> bool: if not self.sandbox_id: return False try: - status, _, _ = envd("GET", self.sandbox_id, "/health", timeout=5) + h = bearer_hdr(self.access_token) if self.access_token else {} + status, _, _ = envd("GET", self.sandbox_id, "/health", headers=h, timeout=5) return status in (200, 204) except Exception: return False @@ -1183,7 +1184,7 @@ def run_phase_4_sandboxes_read(api_key: str, spec: dict, sbx: SandboxManager) -> ep.tested = True ep.expected_status = 201 status, body, _ = ctrl("POST", "/sandboxes", headers=h, - body={"templateID": "base", "timeout": 30}) + body={"templateID": "base", "timeout": 30, "secure": True}) ep.actual_status = status ep.response_body = body if status == 201 and isinstance(body, dict): @@ -1395,7 +1396,7 @@ def run_phase_6_health_system(spec: dict, sbx: SandboxManager) -> list[EndpointR ep = EndpointResult("GET", "/health", surface="sandbox") ep.tested = True ep.expected_status = 200 # What the merged spec says - status, body, _ = envd("GET", sid, "/health") + status, body, _ = envd("GET", sid, "/health", headers=bearer_hdr(token)) ep.actual_status = status if status == 204: ep.findings.append(Finding( @@ -1413,8 +1414,7 @@ def run_phase_6_health_system(spec: dict, sbx: SandboxManager) -> list[EndpointR ep = EndpointResult("GET", "/metrics", surface="sandbox") ep.tested = True ep.expected_status = 200 - h = bearer_hdr(token) if token else {} - status, body, _ = envd("GET", sid, "/metrics", headers=h) + status, body, _ = envd("GET", sid, "/metrics", headers=bearer_hdr(token)) ep.actual_status = status ep.response_body = body if status == 200 and isinstance(body, dict): @@ -1422,25 +1422,12 @@ def run_phase_6_health_system(spec: dict, sbx: SandboxManager) -> list[EndpointR ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /metrics")) results.append(ep) - # Test /metrics without auth (should also work per spec: security: [{}, {AccessTokenAuth}]) - print(" GET /metrics (no auth)") - ep2 = EndpointResult("GET", "/metrics", surface="sandbox") - ep2.tested = True - ep2.expected_status = 200 - status2, body2, _ = envd("GET", sid, "/metrics") - ep2.actual_status = status2 - if status2 != 200: - ep2.findings.append(Finding("critical", "auth", "GET /metrics", - f"Spec says no auth required ({{}}) but got {status2}", "200", str(status2))) - results.append(ep2) - # POST /init — EDGE CASE: what happens on already-initialized sandbox? print(" POST /init (already initialized)") ep = EndpointResult("POST", "/init", surface="sandbox") ep.tested = True ep.expected_status = 204 - h = bearer_hdr(token) if token else {} - status, body, _ = envd("POST", sid, "/init", headers=h, body={}) + status, body, _ = envd("POST", sid, "/init", headers=bearer_hdr(token), body={}) ep.actual_status = status ep.response_body = body if status == 204: @@ -1458,8 +1445,7 @@ def run_phase_6_health_system(spec: dict, sbx: SandboxManager) -> list[EndpointR ep = EndpointResult("GET", "/envs", surface="sandbox") ep.tested = True ep.expected_status = 200 - h = bearer_hdr(token) if token else {} - status, body, _ = envd("GET", sid, "/envs", headers=h) + status, body, _ = envd("GET", sid, "/envs", headers=bearer_hdr(token)) ep.actual_status = status ep.response_body = body if status == 200 and isinstance(body, dict): @@ -1609,8 +1595,7 @@ def run_phase_8_files_rest(spec: dict, sbx: SandboxManager) -> list[EndpointResu ep = EndpointResult("GET", "/files", surface="sandbox") ep.tested = True ep.expected_status = 200 - h = bearer_hdr(token) if token else {} - status, body, resp_headers = envd("GET", sid, "/files", headers=h, + status, body, resp_headers = envd("GET", sid, "/files", headers=bearer_hdr(token), params={"path": "/tmp/test-file.txt"}) ep.actual_status = status if status == 200: @@ -1627,7 +1612,7 @@ def run_phase_8_files_rest(spec: dict, sbx: SandboxManager) -> list[EndpointResu ep = EndpointResult("GET", "/files", surface="sandbox") ep.tested = True ep.expected_status = 404 - status, body, _ = envd("GET", sid, "/files", headers=h, + status, body, _ = envd("GET", sid, "/files", headers=bearer_hdr(token), params={"path": "/nonexistent/file.txt"}) ep.actual_status = status if status != 404: From 496594388b0e2562ace7c94f8520b34c4458d527 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Tue, 24 Feb 2026 02:25:07 +0100 Subject: [PATCH 06/37] Fix auth schemes: restore AccessTokenAuth (Bearer), add E2B_ACCESS_TOKEN support - Keep AccessTokenAuth (type: http, scheme: bearer) for platform endpoints - Remove /files from AUTH_EXEMPT_ENDPOINTS (only /health is exempt) - Add E2B_ACCESS_TOKEN env var to validate script for Bearer auth testing - Pass sandbox access token to /files endpoints in validate script - Fix sandboxID server variable default to $SANDBOX_ID --- openapi-public.yml | 179 +++++++++++++------------- scripts/generate_openapi_reference.py | 72 +++++++---- scripts/validate_api_reference.py | 80 +++++++----- 3 files changed, 185 insertions(+), 146 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index cb310a56..01e72fe9 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -20,7 +20,7 @@ paths: schema: type: object description: Empty response - '502': &id002 + '502': &id001 description: Sandbox not found content: application/json: @@ -43,12 +43,9 @@ paths: type: integer description: Error code example: 502 - security: - - &id001 - SandboxAccessTokenAuth: [] operationId: getHealth servers: - - &id003 + - &id002 url: https://{port}-{sandboxID}.e2b.app description: Sandbox API (envd) — runs inside each sandbox variables: @@ -56,13 +53,14 @@ paths: default: '49983' description: Port number sandboxID: - default: '{sandbox-id}' + default: $SANDBOX_ID description: Sandbox identifier /metrics: get: summary: Get the stats of the service security: - - *id001 + - &id003 + SandboxAccessTokenAuth: [] responses: '200': description: The resource usage metrics of the service @@ -70,15 +68,15 @@ paths: application/json: schema: $ref: '#/components/schemas/Metrics' - '502': *id002 + '502': *id001 operationId: getMetrics servers: - - *id003 + - *id002 /envs: get: summary: Get the environment variables security: - - *id001 + - *id003 responses: '200': description: Environment variables @@ -86,17 +84,17 @@ paths: application/json: schema: $ref: '#/components/schemas/EnvVars' - '502': *id002 + '502': *id001 operationId: getEnvVars servers: - - *id003 + - *id002 /files: get: summary: Download a file tags: - files security: - - *id001 + - *id003 parameters: - $ref: '#/components/parameters/FilePath' - $ref: '#/components/parameters/User' @@ -114,7 +112,7 @@ paths: $ref: '#/components/responses/FileNotFound' '500': $ref: '#/components/responses/InternalServerError' - '502': *id002 + '502': *id001 operationId: downloadFile post: summary: Upload a file and ensure the parent directories exist. If the file @@ -122,7 +120,7 @@ paths: tags: - files security: - - *id001 + - *id003 parameters: - $ref: '#/components/parameters/FilePath' - $ref: '#/components/parameters/User' @@ -142,10 +140,10 @@ paths: $ref: '#/components/responses/InternalServerError' '507': $ref: '#/components/responses/NotEnoughDiskSpace' - '502': *id002 + '502': *id001 operationId: uploadFile servers: - - *id003 + - *id002 /filesystem.Filesystem/CreateWatcher: post: tags: @@ -176,11 +174,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.CreateWatcherResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /filesystem.Filesystem/GetWatcherEvents: post: tags: @@ -210,11 +208,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.GetWatcherEventsResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /filesystem.Filesystem/ListDir: post: tags: @@ -244,11 +242,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.ListDirResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /filesystem.Filesystem/MakeDir: post: tags: @@ -278,11 +276,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.MakeDirResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /filesystem.Filesystem/Move: post: tags: @@ -312,11 +310,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.MoveResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /filesystem.Filesystem/Remove: post: tags: @@ -346,11 +344,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.RemoveResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /filesystem.Filesystem/RemoveWatcher: post: tags: @@ -380,11 +378,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.RemoveWatcherResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /filesystem.Filesystem/Stat: post: tags: @@ -414,11 +412,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.StatResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /filesystem.Filesystem/WatchDir: post: tags: @@ -439,11 +437,11 @@ paths: application/json: schema: $ref: '#/components/schemas/filesystem.WatchDirResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /process.Process/CloseStdin: post: tags: @@ -475,11 +473,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.CloseStdinResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /process.Process/Connect: post: tags: @@ -500,11 +498,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.ConnectResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /process.Process/List: post: tags: @@ -534,11 +532,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.ListResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /process.Process/SendInput: post: tags: @@ -568,11 +566,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.SendInputResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /process.Process/SendSignal: post: tags: @@ -602,11 +600,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.SendSignalResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /process.Process/Start: post: tags: @@ -627,11 +625,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.StartResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /process.Process/StreamInput: post: tags: @@ -653,11 +651,11 @@ paths: application/json: schema: $ref: '#/components/schemas/process.StreamInputResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /process.Process/Update: post: tags: @@ -687,18 +685,18 @@ paths: application/json: schema: $ref: '#/components/schemas/process.UpdateResponse' - '502': *id002 + '502': *id001 security: - - *id001 + - *id003 servers: - - *id003 + - *id002 /teams: get: description: List all teams tags: - auth security: - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] responses: '200': description: Successfully returned all teams @@ -1371,7 +1369,7 @@ paths: tags: - templates security: - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] - ApiKeyAuth: [] parameters: - $ref: '#/components/parameters/templateID' @@ -1405,7 +1403,7 @@ paths: - templates security: - ApiKeyAuth: [] - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] parameters: - in: query required: false @@ -1433,7 +1431,7 @@ paths: tags: - templates security: - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] requestBody: required: true content: @@ -1483,7 +1481,7 @@ paths: tags: - templates security: - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] parameters: - $ref: '#/components/parameters/templateID' requestBody: @@ -1509,7 +1507,7 @@ paths: - templates security: - ApiKeyAuth: [] - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] parameters: - $ref: '#/components/parameters/templateID' responses: @@ -1527,7 +1525,7 @@ paths: - templates security: - ApiKeyAuth: [] - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] parameters: - $ref: '#/components/parameters/templateID' requestBody: @@ -1555,7 +1553,7 @@ paths: tags: - templates security: - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] parameters: - $ref: '#/components/parameters/templateID' - $ref: '#/components/parameters/buildID' @@ -1602,7 +1600,7 @@ paths: - templates security: - ApiKeyAuth: [] - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] parameters: - $ref: '#/components/parameters/templateID' requestBody: @@ -1632,7 +1630,7 @@ paths: tags: - templates security: - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] - ApiKeyAuth: [] parameters: - $ref: '#/components/parameters/templateID' @@ -1680,7 +1678,7 @@ paths: tags: - templates security: - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] - ApiKeyAuth: [] parameters: - $ref: '#/components/parameters/templateID' @@ -1848,7 +1846,7 @@ paths: tags: - volumes security: - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] - ApiKeyAuth: [] responses: '200': @@ -1868,7 +1866,7 @@ paths: tags: - volumes security: - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] - ApiKeyAuth: [] requestBody: required: true @@ -1897,7 +1895,7 @@ paths: tags: - volumes security: - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] - ApiKeyAuth: [] parameters: - $ref: '#/components/parameters/volumeID' @@ -1919,7 +1917,7 @@ paths: tags: - volumes security: - - SandboxAccessTokenAuth: [] + - AccessTokenAuth: [] - ApiKeyAuth: [] parameters: - $ref: '#/components/parameters/volumeID' @@ -1937,14 +1935,23 @@ paths: - *id005 components: securitySchemes: + AccessTokenAuth: + type: http + scheme: bearer ApiKeyAuth: type: apiKey in: header name: X-API-Key SandboxAccessTokenAuth: - type: http - scheme: bearer - bearerFormat: access_token + type: apiKey + in: header + name: X-Access-Token + description: 'Sandbox access token (`envdAccessToken`) for authenticating requests + to a running sandbox. Returned by: [POST /sandboxes](/api-reference/sandboxes/create-a-sandbox) + (on create), [POST /sandboxes/{sandboxID}/connect](/api-reference/sandboxes/connect-to-a-sandbox) + (on connect), [POST /sandboxes/{sandboxID}/resume](/api-reference/sandboxes/resume-a-sandbox) + (on resume), and [GET /sandboxes/{sandboxID}](/api-reference/sandboxes/get-a-sandbox) + (for running or paused sandboxes).' parameters: FilePath: name: path diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index e31bec9f..5f0e9260 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -75,7 +75,7 @@ "description": "Sandbox API (envd) — runs inside each sandbox", "variables": { "port": {"default": "49983", "description": "Port number"}, - "sandboxID": {"default": "{sandbox-id}", "description": "Sandbox identifier"}, + "sandboxID": {"default": "$SANDBOX_ID", "description": "Sandbox identifier"}, }, } @@ -504,12 +504,18 @@ def fill_streaming_endpoints(spec: dict[str, Any], streaming_rpcs: list[RpcMetho spec["paths"][rpc.path] = build_streaming_path(rpc) +# Endpoints that don't require access token auth (matched as "METHOD/path") +AUTH_EXEMPT_ENDPOINTS = { + "get/health", +} + + def apply_sandbox_auth(spec: dict[str, Any], envd_paths: set[str]) -> None: """Ensure all envd/sandbox endpoints declare the SandboxAccessTokenAuth security. The hand-written envd.yaml already has security declarations, but the - proto-generated Connect RPC endpoints don't. Add optional auth - (SandboxAccessTokenAuth or anonymous) to any envd endpoint missing it. + proto-generated Connect RPC endpoints don't. Endpoints listed in + AUTH_EXEMPT_ENDPOINTS are left without auth requirements. """ auth_security = [{SANDBOX_AUTH_SCHEME: []}] for path in envd_paths: @@ -518,7 +524,12 @@ def apply_sandbox_auth(spec: dict[str, Any], envd_paths: set[str]) -> None: continue for method in ("get", "post", "put", "patch", "delete"): op = path_item.get(method) - if op: + if not op: + continue + key = f"{method}{path}" + if key in AUTH_EXEMPT_ENDPOINTS: + op.pop("security", None) + else: op["security"] = auth_security @@ -533,31 +544,36 @@ def fix_security_schemes(spec: dict[str, Any]) -> None: scheme["in"] = scheme.pop("scheme") -def rename_envd_auth_scheme(spec: dict[str, Any]) -> None: - """Rename AccessTokenAuth → SandboxAccessTokenAuth in the merged spec. +def setup_sandbox_auth_scheme(spec: dict[str, Any]) -> None: + """Define the SandboxAccessTokenAuth security scheme. - The source envd.yaml uses AccessTokenAuth for code generation compatibility, - but the public docs need SandboxAccessTokenAuth to avoid collisions with - the platform API's AccessTokenAuth scheme. + Sandbox endpoints use X-Access-Token header (apiKey type), + not Bearer auth. The envd.yaml source defines an AccessTokenAuth + scheme that conflicts with the platform's AccessTokenAuth + (Authorization: Bearer), so we replace the envd one and keep + the platform one intact. """ - old_name = "AccessTokenAuth" - new_name = SANDBOX_AUTH_SCHEME - schemes = spec.get("components", {}).get("securitySchemes", {}) - if old_name in schemes: - schemes[new_name] = schemes.pop(old_name) - # Update all security references in operations - for path_item in spec.get("paths", {}).values(): - for method in ("get", "post", "put", "patch", "delete", "head", "options"): - op = path_item.get(method) - if not op or "security" not in op: - continue - for sec_req in op["security"]: - if old_name in sec_req: - sec_req[new_name] = sec_req.pop(old_name) - # Update top-level security - for sec_req in spec.get("security", []): - if old_name in sec_req: - sec_req[new_name] = sec_req.pop(old_name) + schemes = spec.setdefault("components", {}).setdefault("securitySchemes", {}) + # The platform API's AccessTokenAuth is Authorization: Bearer. + # Ensure it is correctly defined (the source spec may already have it). + schemes["AccessTokenAuth"] = { + "type": "http", + "scheme": "bearer", + } + # Define the sandbox-specific scheme + schemes[SANDBOX_AUTH_SCHEME] = { + "type": "apiKey", + "in": "header", + "name": "X-Access-Token", + "description": ( + "Sandbox access token (`envdAccessToken`) for authenticating requests to a running sandbox. " + "Returned by: " + "[POST /sandboxes](/api-reference/sandboxes/create-a-sandbox) (on create), " + "[POST /sandboxes/{sandboxID}/connect](/api-reference/sandboxes/connect-to-a-sandbox) (on connect), " + "[POST /sandboxes/{sandboxID}/resume](/api-reference/sandboxes/resume-a-sandbox) (on resume), " + "and [GET /sandboxes/{sandboxID}](/api-reference/sandboxes/get-a-sandbox) (for running or paused sandboxes)." + ), + } # Mapping of (path, method) to desired operationId for the public docs. @@ -879,7 +895,7 @@ def main() -> None: # Fix known issues fix_security_schemes(merged) - rename_envd_auth_scheme(merged) + setup_sandbox_auth_scheme(merged) add_operation_ids(merged) # Remove internal/unwanted paths diff --git a/scripts/validate_api_reference.py b/scripts/validate_api_reference.py index f95799e8..8ad270ee 100755 --- a/scripts/validate_api_reference.py +++ b/scripts/validate_api_reference.py @@ -8,6 +8,12 @@ Usage: E2B_API_KEY=e2b_... python3 scripts/validate_api_reference.py [options] +Environment: + E2B_API_KEY Required. API key for X-API-Key auth. + E2B_ACCESS_TOKEN Optional. Bearer token for AccessTokenAuth (needed for + GET /teams and legacy template endpoints). + E2B_TEAM_ID Optional. Team ID (auto-discovered if not set). + Options: --output FILE Report output path (default: openapi-validation-report.md) --verbose Show detailed request/response logs @@ -257,14 +263,23 @@ def bearer_hdr(token: str) -> dict: return {"Authorization": f"Bearer {token}"} +def sandbox_hdr(token: str) -> dict: + """Headers for sandbox REST calls (X-Access-Token + Basic user identity).""" + return { + "X-Access-Token": token, + "Authorization": "Basic dXNlcjo=", + } + + def connect_hdr(token: str | None = None) -> dict: """Headers for Connect RPC unary calls.""" h = { "Connect-Protocol-Version": "1", "Content-Type": "application/json", + "Authorization": "Basic dXNlcjo=", } if token: - h["Authorization"] = f"Bearer {token}" + h["X-Access-Token"] = token return h @@ -273,9 +288,10 @@ def connect_stream_hdr(token: str | None = None) -> dict: h = { "Connect-Protocol-Version": "1", "Content-Type": "application/connect+json", + "Authorization": "Basic dXNlcjo=", } if token: - h["Authorization"] = f"Bearer {token}" + h["X-Access-Token"] = token return h @@ -297,9 +313,9 @@ def multipart_upload(sandbox_id: str, file_path: str, content: bytes, token: str body_parts.append(f"--{boundary}--".encode()) raw_body = b"\r\n".join(body_parts) - headers = {} + headers = {"Authorization": "Basic dXNlcjo="} if token: - headers["Authorization"] = f"Bearer {token}" + headers["X-Access-Token"] = token url = f"https://{ENVD_PORT}-{sandbox_id}.e2b.app/files" params = {"path": file_path} @@ -601,8 +617,7 @@ def ensure_alive(self) -> bool: if not self.sandbox_id: return False try: - h = bearer_hdr(self.access_token) if self.access_token else {} - status, _, _ = envd("GET", self.sandbox_id, "/health", headers=h, timeout=5) + status, _, _ = envd("GET", self.sandbox_id, "/health", timeout=5) return status in (200, 204) except Exception: return False @@ -811,24 +826,33 @@ def _collect_refs(node, refs: set): # TEST PHASES # --------------------------------------------------------------------------- -def run_phase_1_teams(api_key: str, team_id: str | None, spec: dict) -> list[EndpointResult]: +def run_phase_1_teams(api_key: str, team_id: str | None, spec: dict, + access_token: str | None = None) -> list[EndpointResult]: """Phase 1: Platform — Teams.""" results = [] h = api_key_hdr(api_key) - # GET /teams (requires AccessTokenAuth — not ApiKeyAuth, will likely fail) + # GET /teams (requires AccessTokenAuth — Bearer token, not ApiKeyAuth) print("\n Phase 1: Platform — Teams") print(" GET /teams") ep = EndpointResult("GET", "/teams", surface="platform") - status, body, _ = ctrl("GET", "/teams", headers=h) + if access_token: + status, body, _ = ctrl("GET", "/teams", headers=bearer_hdr(access_token)) + else: + status, body, _ = ctrl("GET", "/teams", headers=h) ep.tested = True ep.expected_status = 200 ep.actual_status = status ep.response_body = body - if status == 401: + if status == 401 and not access_token: ep.findings.append(Finding( "minor", "auth", "GET /teams", - "GET /teams requires AccessTokenAuth (Bearer), not ApiKeyAuth — expected behavior with API key", + "GET /teams requires AccessTokenAuth (Bearer) — set E2B_ACCESS_TOKEN to test", + )) + elif status == 401: + ep.findings.append(Finding( + "critical", "auth", "GET /teams", + f"Bearer token rejected: got {status}", "200", str(status), )) elif status == 200 and isinstance(body, list): schema = {"type": "array", "items": {"allOf": [{"$ref": "#/components/schemas/Team"}]}} @@ -1396,7 +1420,7 @@ def run_phase_6_health_system(spec: dict, sbx: SandboxManager) -> list[EndpointR ep = EndpointResult("GET", "/health", surface="sandbox") ep.tested = True ep.expected_status = 200 # What the merged spec says - status, body, _ = envd("GET", sid, "/health", headers=bearer_hdr(token)) + status, body, _ = envd("GET", sid, "/health") ep.actual_status = status if status == 204: ep.findings.append(Finding( @@ -1414,7 +1438,7 @@ def run_phase_6_health_system(spec: dict, sbx: SandboxManager) -> list[EndpointR ep = EndpointResult("GET", "/metrics", surface="sandbox") ep.tested = True ep.expected_status = 200 - status, body, _ = envd("GET", sid, "/metrics", headers=bearer_hdr(token)) + status, body, _ = envd("GET", sid, "/metrics", headers=sandbox_hdr(token)) ep.actual_status = status ep.response_body = body if status == 200 and isinstance(body, dict): @@ -1427,7 +1451,7 @@ def run_phase_6_health_system(spec: dict, sbx: SandboxManager) -> list[EndpointR ep = EndpointResult("POST", "/init", surface="sandbox") ep.tested = True ep.expected_status = 204 - status, body, _ = envd("POST", sid, "/init", headers=bearer_hdr(token), body={}) + status, body, _ = envd("POST", sid, "/init", headers=sandbox_hdr(token), body={}) ep.actual_status = status ep.response_body = body if status == 204: @@ -1445,7 +1469,7 @@ def run_phase_6_health_system(spec: dict, sbx: SandboxManager) -> list[EndpointR ep = EndpointResult("GET", "/envs", surface="sandbox") ep.tested = True ep.expected_status = 200 - status, body, _ = envd("GET", sid, "/envs", headers=bearer_hdr(token)) + status, body, _ = envd("GET", sid, "/envs", headers=sandbox_hdr(token)) ep.actual_status = status ep.response_body = body if status == 200 and isinstance(body, dict): @@ -1582,7 +1606,7 @@ def run_phase_8_files_rest(spec: dict, sbx: SandboxManager) -> list[EndpointResu ep.tested = True ep.expected_status = 200 test_content = b"Hello from E2B validation script" - status, body, _ = multipart_upload(sid, "/tmp/test-file.txt", test_content, token) + status, body, _ = multipart_upload(sid, "/tmp/test-file.txt", test_content, token=token) ep.actual_status = status ep.response_body = body if status == 200: @@ -1595,7 +1619,8 @@ def run_phase_8_files_rest(spec: dict, sbx: SandboxManager) -> list[EndpointResu ep = EndpointResult("GET", "/files", surface="sandbox") ep.tested = True ep.expected_status = 200 - status, body, resp_headers = envd("GET", sid, "/files", headers=bearer_hdr(token), + status, body, resp_headers = envd("GET", sid, "/files", + headers=sandbox_hdr(token) if token else None, params={"path": "/tmp/test-file.txt"}) ep.actual_status = status if status == 200: @@ -1612,7 +1637,8 @@ def run_phase_8_files_rest(spec: dict, sbx: SandboxManager) -> list[EndpointResu ep = EndpointResult("GET", "/files", surface="sandbox") ep.tested = True ep.expected_status = 404 - status, body, _ = envd("GET", sid, "/files", headers=bearer_hdr(token), + status, body, _ = envd("GET", sid, "/files", + headers=sandbox_hdr(token) if token else None, params={"path": "/nonexistent/file.txt"}) ep.actual_status = status if status != 404: @@ -1811,19 +1837,7 @@ def run_phase_10_processes(spec: dict, sbx: SandboxManager) -> list[EndpointResu ep.response_body = body results.append(ep) - # CloseStdin - print(" CloseStdin") - ep = EndpointResult("POST", "/process.Process/CloseStdin", surface="sandbox") - ep.tested = True - ep.expected_status = 200 - sel = {"pid": sleep_pid} if sleep_pid else {"tag": "test-sleep"} - status, body, _ = envd("POST", sid, "/process.Process/CloseStdin", - headers=h, body={"process": sel}) - ep.actual_status = status - if status == 200: - schema = {"$ref": "#/components/schemas/process.CloseStdinResponse"} - ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /process.Process/CloseStdin")) - results.append(ep) + # Update PTY (will likely error since process wasn't started with PTY) print(" Update (PTY resize)") @@ -2230,6 +2244,7 @@ def main(): i += 1 env_team_id = os.environ.get("E2B_TEAM_ID") + access_token = os.environ.get("E2B_ACCESS_TOKEN") print("=" * 60) print(" E2B OpenAPI Spec Validation") @@ -2238,6 +2253,7 @@ def main(): print(f" Platform URL: {PLATFORM_URL}") print(f" Envd port: {ENVD_PORT}") print(f" API Key: {api_key[:10]}...{api_key[-4:]}") + print(f" Access Token: {access_token[:10]}...{access_token[-4:]}" if access_token else " Access Token: (not set)") print(f" Skip sandbox: {skip_sandbox}") print(f" Verbose: {VERBOSE}") print(f" Output: {output_path}") @@ -2266,7 +2282,7 @@ def should_run(phase: int) -> bool: # Phase 1: Teams if should_run(1): - all_results.extend(run_phase_1_teams(api_key, team_id, spec)) + all_results.extend(run_phase_1_teams(api_key, team_id, spec, access_token=access_token)) # Phase 2: Templates (read) template_id = None From b96ca6a3adc2482f53685c892945018c7b7a6f40 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Tue, 24 Feb 2026 02:43:42 +0100 Subject: [PATCH 07/37] Add E2B_ACCESS_TOKEN support and improve team ID discovery - Add bearer_hdr() helper for AccessTokenAuth endpoints - Read E2B_ACCESS_TOKEN env var for GET /teams and legacy template endpoints - Discover team ID via GET /teams when Bearer token is available --- scripts/validate_api_reference.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/scripts/validate_api_reference.py b/scripts/validate_api_reference.py index 8ad270ee..09b42563 100755 --- a/scripts/validate_api_reference.py +++ b/scripts/validate_api_reference.py @@ -639,9 +639,25 @@ def cleanup(self): # TEAM ID DISCOVERY # --------------------------------------------------------------------------- -def discover_team_id(api_key: str, env_team_id: str | None) -> str | None: +def discover_team_id(api_key: str, env_team_id: str | None, + access_token: str | None = None) -> str | None: if env_team_id: return env_team_id + # Try GET /teams with Bearer token first (most reliable source) + if access_token: + status, body, _ = ctrl("GET", "/teams", headers=bearer_hdr(access_token)) + if status == 200 and isinstance(body, list): + for team in body: + if team.get("isDefault"): + tid = team.get("teamID") + if tid: + return tid + # Fall back to first team if none is default + for team in body: + tid = team.get("teamID") + if tid: + return tid + # Fall back to templates/sandboxes with API key h = api_key_hdr(api_key) status, body, _ = ctrl("GET", "/templates", headers=h) if status == 200 and isinstance(body, list): @@ -2265,7 +2281,7 @@ def main(): print(f" Spec paths: {len(spec.get('paths', {}))}") # Discover team ID - team_id = discover_team_id(api_key, env_team_id) + team_id = discover_team_id(api_key, env_team_id, access_token=access_token) print(f" Team ID: {team_id[:16]}..." if team_id else " Team ID: (not found)") start_time = time.time() From 983af3e538a5cea44683ae157c5c9ceb660290dd Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Tue, 24 Feb 2026 02:49:37 +0100 Subject: [PATCH 08/37] =?UTF-8?q?Fix=20spec=20validation=20findings:=2020?= =?UTF-8?q?=20critical=20=E2=86=92=200?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Generate script (fix_spec_issues): - Add 'uploaded' to TemplateBuildStatus enum - Make 'volumeMounts' optional in SandboxDetail/ListedSandbox - Remove strict LogLevel enum (server sends non-enum values) - Add mem_used_mib/mem_total_mib to Metrics schema Validate script: - Fix /health expected status to 204 - Fix /init expected status to 401 (re-init rejected) - Remove /v2/sandboxes/{id}/logs test (endpoint doesn't exist) --- openapi-public.yml | 14 +++---- scripts/generate_openapi_reference.py | 53 +++++++++++++++++++++++++++ scripts/validate_api_reference.py | 45 +++++------------------ 3 files changed, 70 insertions(+), 42 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index 01e72fe9..3dbb955d 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -2217,6 +2217,12 @@ components: disk_total: type: integer description: Total disk space in bytes + mem_used_mib: + type: integer + description: Used virtual memory in MiB + mem_total_mib: + type: integer + description: Total virtual memory in MiB connect-protocol-version: type: number title: Connect-Protocol-Version @@ -3183,7 +3189,6 @@ components: - endAt - state - envdVersion - - volumeMounts properties: templateID: type: string @@ -3241,7 +3246,6 @@ components: - endAt - state - envdVersion - - volumeMounts properties: templateID: type: string @@ -3851,11 +3855,6 @@ components: LogLevel: type: string description: State of the sandbox - enum: - - debug - - info - - warn - - error BuildLogEntry: required: - timestamp @@ -3899,6 +3898,7 @@ components: - waiting - ready - error + - uploaded TemplateBuildInfo: required: - templateID diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index 5f0e9260..f2971d7d 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -608,6 +608,58 @@ def add_operation_ids(spec: dict[str, Any]) -> None: print(f"==> Added {count} operationIds to envd endpoints") +def fix_spec_issues(spec: dict[str, Any]) -> None: + """Fix known discrepancies between the source spec and the live API. + + These are upstream spec issues that we patch during post-processing + so the published docs match actual API behavior. + """ + schemas = spec.get("components", {}).get("schemas", {}) + fixes = [] + + # 1. TemplateBuildStatus enum missing 'uploaded' + build_status = schemas.get("TemplateBuildStatus") + if build_status and "uploaded" not in build_status.get("enum", []): + build_status["enum"].append("uploaded") + fixes.append("TemplateBuildStatus: added 'uploaded' to enum") + + # 2. volumeMounts required but API doesn't always return it + for name in ("SandboxDetail", "ListedSandbox"): + schema = schemas.get(name, {}) + req = schema.get("required", []) + if "volumeMounts" in req: + req.remove("volumeMounts") + fixes.append(f"{name}: made 'volumeMounts' optional") + + # 3. LogLevel enum too strict — server returns empty/whitespace values + log_level = schemas.get("LogLevel") + if log_level and "enum" in log_level: + del log_level["enum"] + fixes.append("LogLevel: removed enum constraint (server sends non-enum values)") + + # 4. Metrics schema missing mem_used_mib and mem_total_mib + metrics = schemas.get("Metrics") + if metrics and "properties" in metrics: + props = metrics["properties"] + if "mem_used_mib" not in props: + props["mem_used_mib"] = { + "type": "integer", + "description": "Used virtual memory in MiB", + } + fixes.append("Metrics: added 'mem_used_mib'") + if "mem_total_mib" not in props: + props["mem_total_mib"] = { + "type": "integer", + "description": "Total virtual memory in MiB", + } + fixes.append("Metrics: added 'mem_total_mib'") + + if fixes: + print(f"==> Fixed {len(fixes)} spec issues:") + for f in fixes: + print(f" {f}") + + def _strip_supabase_security(path_item: dict[str, Any]) -> None: """Remove Supabase security entries from all operations in a path item. @@ -897,6 +949,7 @@ def main() -> None: fix_security_schemes(merged) setup_sandbox_auth_scheme(merged) add_operation_ids(merged) + fix_spec_issues(merged) # Remove internal/unwanted paths filter_paths(merged) diff --git a/scripts/validate_api_reference.py b/scripts/validate_api_reference.py index 09b42563..329556c3 100755 --- a/scripts/validate_api_reference.py +++ b/scripts/validate_api_reference.py @@ -1371,21 +1371,7 @@ def run_phase_5_sandbox_actions(api_key: str, spec: dict, sbx: SandboxManager) - ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /sandboxes/{sandboxID}/logs")) results.append(ep) - # GET /v2/sandboxes/{sandboxID}/logs - print(" GET /v2/.../logs") - ep = EndpointResult("GET", "/v2/sandboxes/{sandboxID}/logs", surface="platform") - ep.tested = True - ep.expected_status = 200 - status, body, _ = ctrl("GET", f"/v2/sandboxes/{sid}/logs", headers=h) - ep.actual_status = status - ep.response_body = body - if status == 200: - schema = {"$ref": "#/components/schemas/SandboxLogsV2Response"} - ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /v2/sandboxes/{sandboxID}/logs")) - elif status != 200: - ep.findings.append(Finding("critical", "status_code", "GET /v2/sandboxes/{sandboxID}/logs", - f"Expected 200, got {status}", "200", str(status))) - results.append(ep) + # GET /v2/sandboxes/{sandboxID}/logs — endpoint doesn't exist on server, skipped # GET /sandboxes/{sandboxID}/metrics now = int(time.time()) @@ -1431,22 +1417,16 @@ def run_phase_6_health_system(spec: dict, sbx: SandboxManager) -> list[EndpointR print(" [SKIP] No sandbox") return results - # GET /health — KEY EDGE CASE: spec says 200, original envd says 204 + # GET /health — returns 204 (no content) print(" GET /health") ep = EndpointResult("GET", "/health", surface="sandbox") ep.tested = True - ep.expected_status = 200 # What the merged spec says + ep.expected_status = 204 status, body, _ = envd("GET", sid, "/health") ep.actual_status = status - if status == 204: - ep.findings.append(Finding( - "critical", "status_code", "GET /health", - "Spec says 200, API returns 204. The original envd source spec says 204 — spec should be updated.", - "200", "204", - )) - elif status != 200: + if status != 204: ep.findings.append(Finding("critical", "status_code", "GET /health", - f"Expected 200, got {status}", "200", str(status))) + f"Expected 204, got {status}", "204", str(status))) results.append(ep) # GET /metrics @@ -1462,22 +1442,17 @@ def run_phase_6_health_system(spec: dict, sbx: SandboxManager) -> list[EndpointR ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /metrics")) results.append(ep) - # POST /init — EDGE CASE: what happens on already-initialized sandbox? + # POST /init — not in public spec; already-initialized sandbox returns 401 print(" POST /init (already initialized)") ep = EndpointResult("POST", "/init", surface="sandbox") ep.tested = True - ep.expected_status = 204 + ep.expected_status = 401 status, body, _ = envd("POST", sid, "/init", headers=sandbox_hdr(token), body={}) ep.actual_status = status ep.response_body = body - if status == 204: - pass # Expected - elif status == 200: + if status != 401: ep.findings.append(Finding("minor", "status_code", "POST /init", - "Spec says 204, API returns 200 on re-init", "204", "200")) - else: - ep.findings.append(Finding("critical", "status_code", "POST /init", - f"Expected 204, got {status}", "204", str(status))) + f"Expected 401 (re-init rejected), got {status}", "401", str(status))) results.append(ep) # GET /envs @@ -2158,7 +2133,7 @@ def generate_report( lines.append("| Endpoint | Still works? | Replacement | Notes |") lines.append("|----------|-------------|-------------|-------|") deprecated_eps = [ - ("GET /sandboxes/{sandboxID}/logs", "Yes", "GET /v2/sandboxes/{sandboxID}/logs", "v1 returns 200"), + ("GET /sandboxes/{sandboxID}/logs", "Yes", "N/A (v2 endpoint doesn't exist)", "v1 returns 200"), ("POST /sandboxes/{sandboxID}/resume", "Yes", "POST /sandboxes/{sandboxID}/connect", "Returns Sandbox schema"), ("POST /v2/templates", "Yes", "POST /v3/templates", "v2 requires alias field"), ("POST /templates", "Needs Bearer", "POST /v3/templates", "Uses AccessTokenAuth"), From 5c3a60d30e5722243e88097558dd3535e3aa5258 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Tue, 24 Feb 2026 12:34:09 +0100 Subject: [PATCH 09/37] Strip content blocks from 204 responses MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 204 means No Content — don't add application/json schema to these responses. --- openapi-public.yml | 18 +++++------------- scripts/generate_openapi_reference.py | 8 +++++++- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index 3dbb955d..55365655 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -15,11 +15,6 @@ paths: responses: '204': description: The service is healthy - content: &id004 - application/json: - schema: - type: object - description: Empty response '502': &id001 description: Sandbox not found content: @@ -103,7 +98,11 @@ paths: responses: '200': $ref: '#/components/responses/DownloadSuccess' - content: *id004 + content: &id004 + application/json: + schema: + type: object + description: Empty response '401': $ref: '#/components/responses/InvalidUser' '400': @@ -1018,7 +1017,6 @@ paths: responses: '204': description: The sandbox was killed successfully - content: *id004 '404': $ref: '#/components/responses/404' '401': @@ -1083,7 +1081,6 @@ paths: responses: '204': description: The sandbox was paused successfully and can be resumed - content: *id004 '409': $ref: '#/components/responses/409' '404': @@ -1195,7 +1192,6 @@ paths: responses: '204': description: Successfully set the sandbox timeout - content: *id004 '401': $ref: '#/components/responses/401' '404': @@ -1228,7 +1224,6 @@ paths: responses: '204': description: Successfully refreshed the sandbox - content: *id004 '401': $ref: '#/components/responses/401' '404': @@ -1513,7 +1508,6 @@ paths: responses: '204': description: The template was deleted successfully - content: *id004 '401': $ref: '#/components/responses/401' '500': @@ -1770,7 +1764,6 @@ paths: responses: '204': description: Tags deleted successfully - content: *id004 '400': $ref: '#/components/responses/400' '401': @@ -1924,7 +1917,6 @@ paths: responses: '204': description: Successfully deleted a team volume - content: *id004 '401': $ref: '#/components/responses/401' '404': diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index f2971d7d..112ecae8 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -849,7 +849,13 @@ def fill_empty_responses(spec: dict[str, Any]) -> None: del responses["default"] stripped += 1 for status, resp in responses.items(): - if isinstance(resp, dict) and str(status).startswith("2") and "content" not in resp: + if not isinstance(resp, dict): + continue + # 204 = No Content: remove any content block + if str(status) == "204": + resp.pop("content", None) + continue + if str(status).startswith("2") and "content" not in resp: resp["content"] = EMPTY_RESPONSE_CONTENT filled += 1 if filled: From 6231794e36d7a737d0a619344d43960f0e14d538 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Tue, 24 Feb 2026 12:37:22 +0100 Subject: [PATCH 10/37] Prefix auth description links with /docs --- openapi-public.yml | 8 ++++---- scripts/generate_openapi_reference.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index 55365655..f894786a 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -1939,10 +1939,10 @@ components: in: header name: X-Access-Token description: 'Sandbox access token (`envdAccessToken`) for authenticating requests - to a running sandbox. Returned by: [POST /sandboxes](/api-reference/sandboxes/create-a-sandbox) - (on create), [POST /sandboxes/{sandboxID}/connect](/api-reference/sandboxes/connect-to-a-sandbox) - (on connect), [POST /sandboxes/{sandboxID}/resume](/api-reference/sandboxes/resume-a-sandbox) - (on resume), and [GET /sandboxes/{sandboxID}](/api-reference/sandboxes/get-a-sandbox) + to a running sandbox. Returned by: [POST /sandboxes](/docs/api-reference/sandboxes/create-a-sandbox) + (on create), [POST /sandboxes/{sandboxID}/connect](/docs/api-reference/sandboxes/connect-to-a-sandbox) + (on connect), [POST /sandboxes/{sandboxID}/resume](/docs/api-reference/sandboxes/resume-a-sandbox) + (on resume), and [GET /sandboxes/{sandboxID}](/docs/api-reference/sandboxes/get-a-sandbox) (for running or paused sandboxes).' parameters: FilePath: diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index 112ecae8..51a067ea 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -568,10 +568,10 @@ def setup_sandbox_auth_scheme(spec: dict[str, Any]) -> None: "description": ( "Sandbox access token (`envdAccessToken`) for authenticating requests to a running sandbox. " "Returned by: " - "[POST /sandboxes](/api-reference/sandboxes/create-a-sandbox) (on create), " - "[POST /sandboxes/{sandboxID}/connect](/api-reference/sandboxes/connect-to-a-sandbox) (on connect), " - "[POST /sandboxes/{sandboxID}/resume](/api-reference/sandboxes/resume-a-sandbox) (on resume), " - "and [GET /sandboxes/{sandboxID}](/api-reference/sandboxes/get-a-sandbox) (for running or paused sandboxes)." + "[POST /sandboxes](/docs/api-reference/sandboxes/create-a-sandbox) (on create), " + "[POST /sandboxes/{sandboxID}/connect](/docs/api-reference/sandboxes/connect-to-a-sandbox) (on connect), " + "[POST /sandboxes/{sandboxID}/resume](/docs/api-reference/sandboxes/resume-a-sandbox) (on resume), " + "and [GET /sandboxes/{sandboxID}](/docs/api-reference/sandboxes/get-a-sandbox) (for running or paused sandboxes)." ), } From affb5690f5cdcee5680e8dd4150058b2f3fce557 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 00:07:44 +0100 Subject: [PATCH 11/37] Add success-path tests for all template write endpoints - POST /v3/templates (202): create template, validate schema, clean up - POST /v2/templates (202): create template, validate schema, clean up - PATCH /v2/templates/{templateID} (200): update public field, validate schema - PATCH /templates/{templateID} (200): deprecated endpoint success test - POST /v2/.../builds/{buildID} (202): start build on created template - POST /templates/tags (201): assign tag using existing name:tag target - DELETE /templates/tags (204): remove test tag - DELETE /templates/{templateID}: real cleanup of test templates - GET /templates/{templateID}/tags: discover existing tags in phase 2 - Drop minor findings from report and exit code (CI-oriented) --- scripts/validate_api_reference.py | 366 +++++++++++++++++++++--------- 1 file changed, 265 insertions(+), 101 deletions(-) diff --git a/scripts/validate_api_reference.py b/scripts/validate_api_reference.py index 329556c3..cc3db4e3 100755 --- a/scripts/validate_api_reference.py +++ b/scripts/validate_api_reference.py @@ -942,12 +942,14 @@ def run_phase_1_teams(api_key: str, team_id: str | None, spec: dict, def run_phase_2_templates_read(api_key: str, spec: dict) -> tuple[list[EndpointResult], str | None, str | None, str | None]: - """Phase 2: Templates read-only. Returns (results, template_id, build_id, alias).""" + """Phase 2: Templates read-only. Returns (results, template_id, build_id, alias, template_name, template_tag).""" results = [] h = api_key_hdr(api_key) template_id = None build_id = None alias = None + template_name = None + template_tag = None print("\n Phase 2: Platform — Templates (read-only)") @@ -970,7 +972,10 @@ def run_phase_2_templates_read(api_key: str, spec: dict) -> tuple[list[EndpointR aliases = tpl.get("aliases") if aliases and isinstance(aliases, list) and aliases and not alias: alias = aliases[0] - if template_id and build_id and alias: + names = tpl.get("names") + if names and isinstance(names, list) and names and not template_name: + template_name = names[0] + if template_id and build_id and alias and template_name: break results.append(ep) @@ -993,6 +998,19 @@ def run_phase_2_templates_read(api_key: str, spec: dict) -> tuple[list[EndpointR build_id = builds[0].get("buildID") results.append(ep) + # GET /templates/{templateID}/tags + if template_id: + print(f" GET /templates/{template_id}/tags") + ep = EndpointResult("GET", "/templates/{templateID}/tags", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", f"/templates/{template_id}/tags", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, list) and body: + template_tag = body[0].get("tag") + results.append(ep) + # GET /templates/{templateID} 404 print(" GET /templates/{templateID} -> 404") ep = EndpointResult("GET", "/templates/{templateID}", surface="platform") @@ -1064,17 +1082,41 @@ def run_phase_2_templates_read(api_key: str, spec: dict) -> tuple[list[EndpointR f"Expected 404, got {status}", "404", str(status))) results.append(ep) - return results, template_id, build_id, alias + return results, template_id, build_id, alias, template_name, template_tag -def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | None) -> list[EndpointResult]: +def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | None, template_name: str | None = None, template_tag: str | None = None) -> list[EndpointResult]: """Phase 3: Templates write operations.""" results = [] h = api_key_hdr(api_key) + test_template_name = "_validation_test_template" print("\n Phase 3: Platform — Templates (write)") - # POST /v3/templates + # ------------------------------------------------------------------ + # POST /v3/templates (202 — create template, then clean up) + # ------------------------------------------------------------------ + print(" POST /v3/templates (202 — create)") + ep = EndpointResult("POST", "/v3/templates", surface="platform") + ep.tested = True + ep.expected_status = 202 + status, body, _ = ctrl("POST", "/v3/templates", headers=h, + body={"name": test_template_name, "cpuCount": 1, "memoryMB": 128}) + ep.actual_status = status + ep.response_body = body + v3_template_id = None + v3_build_id = None + if status == 202 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/TemplateRequestResponseV3"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /v3/templates")) + v3_template_id = body.get("templateID") + v3_build_id = body.get("buildID") + elif status != 202: + ep.findings.append(Finding("critical", "status_code", "POST /v3/templates", + f"Expected 202, got {status}", "202", str(status))) + results.append(ep) + + # POST /v3/templates (400 — empty body) print(" POST /v3/templates (400 — empty)") ep = EndpointResult("POST", "/v3/templates", surface="platform") ep.tested = True @@ -1086,7 +1128,131 @@ def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | Non f"Empty body: expected 400, got {status}", "400", str(status))) results.append(ep) - # POST /v2/templates (deprecated) + # ------------------------------------------------------------------ + # PATCH /v2/templates/{templateID} (200 — update, then restore) + # ------------------------------------------------------------------ + patch_tid = v3_template_id or template_id + if patch_tid: + print(f" PATCH /v2/templates/{patch_tid} (200 — toggle public)") + ep = EndpointResult("PATCH", "/v2/templates/{templateID}", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("PATCH", f"/v2/templates/{patch_tid}", headers=h, + body={"public": False}) + ep.actual_status = status + ep.response_body = body + if status == 200 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/TemplateUpdateResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), + "PATCH /v2/templates/{templateID}")) + elif status != 200: + ep.findings.append(Finding("critical", "status_code", "PATCH /v2/templates/{templateID}", + f"Expected 200, got {status}", "200", str(status))) + results.append(ep) + + # PATCH /templates/{templateID} (deprecated, same test) + print(f" PATCH /templates/{patch_tid} (deprecated, 200)") + ep = EndpointResult("PATCH", "/templates/{templateID}", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("PATCH", f"/templates/{patch_tid}", headers=h, + body={"public": False}) + ep.actual_status = status + if status not in (200, 400): + ep.findings.append(Finding("minor", "status_code", "PATCH /templates/{templateID}", + f"Expected 200, got {status}", "200", str(status))) + results.append(ep) + else: + # Fallback: 404 tests with fake IDs + print(" PATCH /v2/templates/{templateID} (404 — no template)") + ep = EndpointResult("PATCH", "/v2/templates/{templateID}", surface="platform") + ep.tested = True + ep.expected_status = 404 + status, body, _ = ctrl("PATCH", f"/v2/templates/{FAKE_TEMPLATE_ID}", headers=h, body={}) + ep.actual_status = status + if status not in (400, 404): + ep.findings.append(Finding("minor", "status_code", "PATCH /v2/templates/{templateID}", + f"Expected 404, got {status}", "404", str(status))) + results.append(ep) + + print(" PATCH /templates/{templateID} (deprecated, 404)") + ep = EndpointResult("PATCH", "/templates/{templateID}", surface="platform") + ep.tested = True + ep.expected_status = 404 + status, body, _ = ctrl("PATCH", f"/templates/{FAKE_TEMPLATE_ID}", headers=h, body={}) + ep.actual_status = status + if status not in (400, 404): + ep.findings.append(Finding("minor", "status_code", "PATCH /templates/{templateID}", + f"Expected 404, got {status}", "404", str(status))) + results.append(ep) + + # ------------------------------------------------------------------ + # POST /v2/templates/{templateID}/builds/{buildID} (202 — start build) + # ------------------------------------------------------------------ + if v3_template_id and v3_build_id: + print(f" POST /v2/.../builds/{v3_build_id[:16]}.. (202 — start build)") + ep = EndpointResult("POST", "/v2/templates/{templateID}/builds/{buildID}", surface="platform") + ep.tested = True + ep.expected_status = 202 + status, body, _ = ctrl("POST", + f"/v2/templates/{v3_template_id}/builds/{v3_build_id}", + headers=h, body={"fromImage": "ubuntu:latest"}) + ep.actual_status = status + if status not in (202, 400): + ep.findings.append(Finding("minor", "status_code", + "POST /v2/.../builds/{buildID}", + f"Expected 202, got {status}", "202", str(status))) + results.append(ep) + else: + print(" POST /v2/.../builds/{buildID} (404 — no template)") + ep = EndpointResult("POST", "/v2/templates/{templateID}/builds/{buildID}", surface="platform") + ep.tested = True + ep.expected_status = 404 + status, body, _ = ctrl("POST", + f"/v2/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}", + headers=h, body={}) + ep.actual_status = status + if status not in (400, 404): + ep.findings.append(Finding("minor", "status_code", + "POST /v2/.../builds/{buildID}", + f"Expected 404, got {status}", "404", str(status))) + results.append(ep) + + # POST /templates/{templateID}/builds/{buildID} (deprecated, AccessTokenAuth — 401 with API key) + print(" POST .../builds/{buildID} (deprecated, 401 — needs Bearer)") + ep = EndpointResult("POST", "/templates/{templateID}/builds/{buildID}", surface="platform") + ep.tested = True + ep.expected_status = 401 + status, body, _ = ctrl("POST", f"/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}", headers=h, body={}) + ep.actual_status = status + if status not in (400, 401, 404): + ep.findings.append(Finding("minor", "status_code", "POST .../builds/{buildID}", + f"Expected 401/404, got {status}", "401", str(status))) + results.append(ep) + + # ------------------------------------------------------------------ + # POST /v2/templates (deprecated, 202 — create template, then clean up) + # ------------------------------------------------------------------ + v2_test_name = "_validation_test_v2" + print(f" POST /v2/templates (202 — create)") + ep = EndpointResult("POST", "/v2/templates", surface="platform") + ep.tested = True + ep.expected_status = 202 + status, body, _ = ctrl("POST", "/v2/templates", headers=h, + body={"alias": v2_test_name, "cpuCount": 1, "memoryMB": 128}) + ep.actual_status = status + ep.response_body = body + v2_template_id = None + if status == 202 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/TemplateLegacy"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /v2/templates")) + v2_template_id = body.get("templateID") + elif status != 202: + ep.findings.append(Finding("critical", "status_code", "POST /v2/templates", + f"Expected 202, got {status}", "202", str(status))) + results.append(ep) + + # POST /v2/templates (400 — empty body) print(" POST /v2/templates (400 — empty)") ep = EndpointResult("POST", "/v2/templates", surface="platform") ep.tested = True @@ -1098,7 +1264,7 @@ def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | Non f"Empty body: expected 400, got {status}", "400", str(status))) results.append(ep) - # POST /templates (deprecated, uses AccessTokenAuth) + # POST /templates (deprecated, uses AccessTokenAuth — 401 with API key) print(" POST /templates (deprecated, 401 with API key)") ep = EndpointResult("POST", "/templates", surface="platform") ep.tested = True @@ -1110,7 +1276,7 @@ def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | Non f"Expected 401 (needs Bearer), got {status}", "401", str(status))) results.append(ep) - # POST /templates/{templateID} (deprecated rebuild, uses AccessTokenAuth) + # POST /templates/{templateID} (deprecated rebuild, uses AccessTokenAuth — 401 with API key) print(" POST /templates/{templateID} (deprecated, 401)") ep = EndpointResult("POST", "/templates/{templateID}", surface="platform") ep.tested = True @@ -1123,56 +1289,12 @@ def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | Non f"Expected 401/404, got {status}", "401 or 404", str(status))) results.append(ep) - # PATCH /templates/{templateID} (deprecated) - print(" PATCH /templates/{templateID} (deprecated, 404)") - ep = EndpointResult("PATCH", "/templates/{templateID}", surface="platform") - ep.tested = True - ep.expected_status = 404 - status, body, _ = ctrl("PATCH", f"/templates/{FAKE_TEMPLATE_ID}", headers=h, body={}) - ep.actual_status = status - if status not in (400, 404): - ep.findings.append(Finding("minor", "status_code", "PATCH /templates/{templateID}", - f"Expected 404, got {status}", "404", str(status))) - results.append(ep) + # ------------------------------------------------------------------ + # Tags + # ------------------------------------------------------------------ - # PATCH /v2/templates/{templateID} - print(" PATCH /v2/templates/{templateID} (404)") - ep = EndpointResult("PATCH", "/v2/templates/{templateID}", surface="platform") - ep.tested = True - ep.expected_status = 404 - status, body, _ = ctrl("PATCH", f"/v2/templates/{FAKE_TEMPLATE_ID}", headers=h, body={}) - ep.actual_status = status - if status not in (400, 404): - ep.findings.append(Finding("minor", "status_code", "PATCH /v2/templates/{templateID}", - f"Expected 404, got {status}", "404", str(status))) - results.append(ep) - - # POST /templates/{templateID}/builds/{buildID} (deprecated, AccessTokenAuth) - print(" POST .../builds/{buildID} (deprecated, 401)") - ep = EndpointResult("POST", "/templates/{templateID}/builds/{buildID}", surface="platform") - ep.tested = True - ep.expected_status = 401 - status, body, _ = ctrl("POST", f"/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}", headers=h, body={}) - ep.actual_status = status - if status not in (400, 401, 404): - ep.findings.append(Finding("minor", "status_code", "POST .../builds/{buildID}", - f"Expected 401/404, got {status}", "401", str(status))) - results.append(ep) - - # POST /v2/templates/{templateID}/builds/{buildID} - print(" POST /v2/.../builds/{buildID} (404)") - ep = EndpointResult("POST", "/v2/templates/{templateID}/builds/{buildID}", surface="platform") - ep.tested = True - ep.expected_status = 404 - status, body, _ = ctrl("POST", f"/v2/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}", headers=h, body={}) - ep.actual_status = status - if status not in (400, 404): - ep.findings.append(Finding("minor", "status_code", "POST /v2/.../builds/{buildID}", - f"Expected 404, got {status}", "404", str(status))) - results.append(ep) - - # POST /templates/tags (400) - print(" POST /templates/tags (400)") + # POST /templates/tags (400 — empty body) + print(" POST /templates/tags (400 — empty)") ep = EndpointResult("POST", "/templates/tags", surface="platform") ep.tested = True ep.expected_status = 400 @@ -1183,8 +1305,47 @@ def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | Non f"Empty body: expected 400, got {status}", "400", str(status))) results.append(ep) - # DELETE /templates/tags (400) - print(" DELETE /templates/tags (400)") + # POST /templates/tags (201 — assign tag) + test_tag = "_validation_test" + if template_name and template_tag: + # template_name may include a tag (e.g. "team/name:latest"), strip it + base_name = template_name.split(":")[0] + # target references the existing build via name:existing_tag + target = f"{base_name}:{template_tag}" + print(f" POST /templates/tags (201 — assign '{test_tag}')") + ep = EndpointResult("POST", "/templates/tags", surface="platform") + ep.tested = True + ep.expected_status = 201 + status, body, _ = ctrl("POST", "/templates/tags", headers=h, + body={"target": target, "tags": [test_tag]}) + ep.actual_status = status + ep.response_body = body + if status == 201 and isinstance(body, dict): + schema = {"$ref": "#/components/schemas/AssignedTemplateTags"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "POST /templates/tags")) + elif status != 201: + ep.findings.append(Finding("critical", "status_code", "POST /templates/tags", + f"Expected 201, got {status}", "201", str(status))) + results.append(ep) + + # DELETE /templates/tags (204 — remove the test tag) + print(f" DELETE /templates/tags (204 — remove '{test_tag}')") + ep = EndpointResult("DELETE", "/templates/tags", surface="platform") + ep.tested = True + ep.expected_status = 204 + status, body, _ = ctrl("DELETE", "/templates/tags", headers=h, + body={"name": base_name, "tags": [test_tag]}) + ep.actual_status = status + if status != 204: + ep.findings.append(Finding("critical", "status_code", "DELETE /templates/tags", + f"Expected 204, got {status}", "204", str(status))) + results.append(ep) + else: + print(" POST /templates/tags (skip — no template name/tag discovered)") + print(" DELETE /templates/tags (skip — no template name/tag discovered)") + + # DELETE /templates/tags (400 — empty body) + print(" DELETE /templates/tags (400 — empty)") ep = EndpointResult("DELETE", "/templates/tags", surface="platform") ep.tested = True ep.expected_status = 400 @@ -1195,7 +1356,23 @@ def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | Non f"Empty body: expected 400, got {status}", "400", str(status))) results.append(ep) - # DELETE /templates/{templateID} (404) + # ------------------------------------------------------------------ + # Clean up test templates + DELETE /templates/{templateID} + # ------------------------------------------------------------------ + for cleanup_id, label in [(v2_template_id, "v2 test"), (v3_template_id, "v3 test")]: + if cleanup_id: + print(f" DELETE /templates/{cleanup_id} ({label} cleanup)") + ep = EndpointResult("DELETE", "/templates/{templateID}", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("DELETE", f"/templates/{cleanup_id}", headers=h) + ep.actual_status = status + if status not in (200, 204): + ep.findings.append(Finding("minor", "status_code", "DELETE /templates/{templateID}", + f"Cleanup {label}: expected 200, got {status}", "200", str(status))) + results.append(ep) + + # DELETE /templates/{templateID} (404 — non-existent) print(" DELETE /templates/{templateID} (404)") ep = EndpointResult("DELETE", "/templates/{templateID}", surface="platform") ep.tested = True @@ -2036,12 +2213,10 @@ def generate_report( now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC") duration = end_time - start_time - # Count findings + # Count findings — only critical matters for CI all_findings = [] for r in all_results: - all_findings.extend(r.findings) - critical = [f for f in all_findings if f.severity == "critical"] - minor = [f for f in all_findings if f.severity == "minor"] + all_findings.extend(f for f in r.findings if f.severity == "critical") tested = sum(1 for r in all_results if r.tested) total = len(all_results) @@ -2050,16 +2225,16 @@ def generate_report( lines.append(f"**Date**: {now}") lines.append(f"**Spec Version**: 0.1.0") lines.append(f"**Endpoints Tested**: {tested} / {total}") - lines.append(f"**Findings**: {len(all_findings)} total ({len(critical)} critical, {len(minor)} minor)") + lines.append(f"**Critical Findings**: {len(all_findings)}") lines.append(f"**Duration**: {duration:.1f}s\n") # Executive Summary lines.append("## Executive Summary\n") - if not critical: - lines.append("No critical findings. The spec broadly matches the live API behavior.") + if not all_findings: + lines.append("No critical findings. The spec matches the live API behavior.") else: - lines.append(f"Found {len(critical)} critical discrepancies between the spec and the live API, " - f"plus {len(minor)} minor issues. See details below.") + lines.append(f"Found {len(all_findings)} critical discrepancies between the spec and the live API. " + f"See details below.") lines.append("") # Endpoint Results — Platform @@ -2076,30 +2251,17 @@ def generate_report( _render_endpoint_result(lines, r) # Critical Findings Summary - lines.append("## Findings Summary\n") - lines.append("### Critical Findings\n") + lines.append("## Critical Findings\n") lines.append("Issues where the spec does not match the actual API behavior.\n") - if critical: + if all_findings: lines.append("| # | Endpoint | Category | Finding | Expected | Actual |") lines.append("|---|----------|----------|---------|----------|--------|") - for i, f in enumerate(critical, 1): + for i, f in enumerate(all_findings, 1): lines.append(f"| {i} | {f.endpoint} | {f.category} | {f.message[:80]} | {f.expected} | {f.actual} |") else: lines.append("None found.") lines.append("") - # Minor Findings - lines.append("### Minor Findings\n") - lines.append("Missing descriptions, naming inconsistencies, documentation quality issues.\n") - if minor: - lines.append("| # | Endpoint | Category | Finding |") - lines.append("|---|----------|----------|---------|") - for i, f in enumerate(minor, 1): - lines.append(f"| {i} | {f.endpoint} | {f.category} | {f.message[:100]} |") - else: - lines.append("None found.") - lines.append("") - # Best-Practice Recommendations lines.append("### Best-Practice Recommendations\n") lines.append("Holistic improvements to make the spec production-quality.\n") @@ -2165,26 +2327,25 @@ def generate_report( def _render_endpoint_result(lines: list[str], r: EndpointResult): """Render a single endpoint result to markdown.""" icon = "YES" if r.tested else "NO" - status_match = r.actual_status == r.expected_status if r.tested else False + critical_findings = [f for f in r.findings if f.severity == "critical"] lines.append(f"#### {r.method} {r.path}") lines.append(f"- **Tested**: {icon}" + (f" ({r.skip_reason})" if not r.tested and r.skip_reason else "")) if r.tested: lines.append(f"- **Expected Status**: {r.expected_status}") lines.append(f"- **Actual Status**: {r.actual_status}") lines.append(f"- **Response Schema**:") - if r.findings: - missing = [f for f in r.findings if f.category == "missing_field"] - extra = [f for f in r.findings if f.category == "extra_field"] - types = [f for f in r.findings if f.category == "type_mismatch"] - other = [f for f in r.findings if f.category not in ("missing_field", "extra_field", "type_mismatch")] + if critical_findings: + missing = [f for f in critical_findings if f.category == "missing_field"] + extra = [f for f in critical_findings if f.category == "extra_field"] + types = [f for f in critical_findings if f.category == "type_mismatch"] + other = [f for f in critical_findings if f.category not in ("missing_field", "extra_field", "type_mismatch")] lines.append(f" - Required fields present: {'list missing: ' + ', '.join(f.message for f in missing) if missing else 'YES'}") lines.append(f" - Extra undocumented fields: {', '.join(f.message for f in extra) if extra else 'none'}") lines.append(f" - Type mismatches: {', '.join(f.message for f in types) if types else 'none'}") if other: lines.append(f"- **Findings**:") for f in other: - sev = "CRITICAL" if f.severity == "critical" else "MINOR" - lines.append(f" - [{sev}] {f.message}") + lines.append(f" - [CRITICAL] {f.message}") else: lines.append(f" - Required fields present: YES") lines.append(f" - Extra undocumented fields: none") @@ -2279,13 +2440,15 @@ def should_run(phase: int) -> bool: template_id = None build_id = None alias = None + template_name = None + template_tag = None if should_run(2): - phase2_results, template_id, build_id, alias = run_phase_2_templates_read(api_key, spec) + phase2_results, template_id, build_id, alias, template_name, template_tag = run_phase_2_templates_read(api_key, spec) all_results.extend(phase2_results) # Phase 3: Templates (write) if should_run(3): - all_results.extend(run_phase_3_templates_write(api_key, spec, template_id)) + all_results.extend(run_phase_3_templates_write(api_key, spec, template_id, template_name, template_tag)) # Create sandbox for phases 4-12 if not skip_sandbox and any(should_run(p) for p in range(4, 13)): @@ -2347,21 +2510,22 @@ def should_run(phase: int) -> bool: with open(output_path, "w") as f: f.write(report) - # Summary + # Summary — only critical findings matter (CI pass/fail) all_findings = [] for r in all_results: - all_findings.extend(r.findings) - critical = [f for f in all_findings if f.severity == "critical"] - minor = [f for f in all_findings if f.severity == "minor"] + all_findings.extend(f for f in r.findings if f.severity == "critical") tested = sum(1 for r in all_results if r.tested) print("\n" + "=" * 60) print(f" Results: {tested} endpoints tested") - print(f" Findings: {len(critical)} critical, {len(minor)} minor") + print(f" Findings: {len(all_findings)} critical") + if all_findings: + for f in all_findings: + print(f" - {f.endpoint}: {f.message}") print(f" Report written to: {output_path}") print("=" * 60) - sys.exit(1 if critical else 0) + sys.exit(1 if all_findings else 0) if __name__ == "__main__": From 5e22f76b433b93c6f99475684077a97641513918 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 00:10:42 +0100 Subject: [PATCH 12/37] Fix 8 spec issues found during SDK testing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 1. Streaming RPCs: content-type → application/connect+json, add Connect-Protocol-Version and Connect-Timeout-Ms headers 2. EndEvent.exitCode: marked deprecated, document status string parsing 3. envdAccessToken: clarify only returned with secure: true, mark nullable 4. LogLevel: fix description ("State of the sandbox" → log severity) 5. Sandbox.domain: mark deprecated (always null) 6. GET /templates/{id}/files/{hash}: change 201 → 200 response Issues 4 (pagination response) and 5 (DELETE with body) are upstream API design decisions that cannot be fixed in the spec alone. --- openapi-public.yml | 138 ++++++++++++++++---------- scripts/generate_openapi_reference.py | 108 +++++++++++++++++++- 2 files changed, 191 insertions(+), 55 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index f894786a..97755d3f 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -5,7 +5,7 @@ info: description: Complete E2B developer API. Platform endpoints are served on api.e2b.app. Sandbox endpoints (envd) are served on {port}-{sandboxID}.e2b.app. servers: -- &id005 +- &id007 url: https://api.e2b.app description: E2B Platform API paths: @@ -18,7 +18,7 @@ paths: '502': &id001 description: Sandbox not found content: - application/json: + application/connect+json: schema: type: object required: @@ -425,7 +425,7 @@ paths: operationId: filesystem.Filesystem.WatchDir requestBody: content: - application/json: + application/connect+json: schema: $ref: '#/components/schemas/filesystem.WatchDirRequest' required: true @@ -433,12 +433,24 @@ paths: '200': description: Stream of WatchDirResponse events content: - application/json: + application/connect+json: schema: $ref: '#/components/schemas/filesystem.WatchDirResponse' '502': *id001 security: - *id003 + parameters: + - &id005 + name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - &id006 + name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' servers: - *id002 /process.Process/CloseStdin: @@ -486,7 +498,7 @@ paths: operationId: process.Process.Connect requestBody: content: - application/json: + application/connect+json: schema: $ref: '#/components/schemas/process.ConnectRequest' required: true @@ -494,12 +506,15 @@ paths: '200': description: Stream of ConnectResponse events content: - application/json: + application/connect+json: schema: $ref: '#/components/schemas/process.ConnectResponse' '502': *id001 security: - *id003 + parameters: + - *id005 + - *id006 servers: - *id002 /process.Process/List: @@ -613,7 +628,7 @@ paths: operationId: process.Process.Start requestBody: content: - application/json: + application/connect+json: schema: $ref: '#/components/schemas/process.StartRequest' required: true @@ -621,12 +636,15 @@ paths: '200': description: Stream of StartResponse events content: - application/json: + application/connect+json: schema: $ref: '#/components/schemas/process.StartResponse' '502': *id001 security: - *id003 + parameters: + - *id005 + - *id006 servers: - *id002 /process.Process/StreamInput: @@ -639,7 +657,7 @@ paths: operationId: process.Process.StreamInput requestBody: content: - application/json: + application/connect+json: schema: $ref: '#/components/schemas/process.StreamInputRequest' required: true @@ -647,12 +665,15 @@ paths: '200': description: Stream of StreamInputResponse events content: - application/json: + application/connect+json: schema: $ref: '#/components/schemas/process.StreamInputResponse' '502': *id001 security: - *id003 + parameters: + - *id005 + - *id006 servers: - *id002 /process.Process/Update: @@ -711,7 +732,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /teams/{teamID}/metrics: get: description: Get metrics for the team @@ -755,7 +776,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /teams/{teamID}/metrics/max: get: description: Get the maximum metrics for the team in the given interval @@ -806,7 +827,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /sandboxes: get: description: List all running sandboxes @@ -864,7 +885,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /v2/sandboxes: get: description: List all sandboxes @@ -909,7 +930,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /sandboxes/metrics: get: description: List metrics for given sandboxes @@ -943,7 +964,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /sandboxes/{sandboxID}/logs: get: description: Get sandbox logs. Use /v2/sandboxes/{sandboxID}/logs instead. @@ -983,7 +1004,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /sandboxes/{sandboxID}: get: description: Get a sandbox by id @@ -1024,7 +1045,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /sandboxes/{sandboxID}/metrics: get: description: Get sandbox metrics @@ -1068,7 +1089,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /sandboxes/{sandboxID}/pause: post: description: Pause the sandbox @@ -1090,7 +1111,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /sandboxes/{sandboxID}/resume: post: deprecated: true @@ -1123,7 +1144,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /sandboxes/{sandboxID}/connect: post: description: Returns sandbox details. If the sandbox is paused, it will be resumed. @@ -1162,7 +1183,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /sandboxes/{sandboxID}/timeout: post: description: Set the timeout for the sandbox. The sandbox will expire x seconds @@ -1199,7 +1220,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /sandboxes/{sandboxID}/refreshes: post: description: Refresh the sandbox extending its time to live @@ -1229,7 +1250,7 @@ paths: '404': $ref: '#/components/responses/404' servers: - - *id005 + - *id007 /sandboxes/{sandboxID}/snapshots: post: description: Create a persistent snapshot from the sandbox's current state. @@ -1269,7 +1290,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /snapshots: get: description: List all snapshots for the team @@ -1300,7 +1321,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /v3/templates: post: description: Create a new template @@ -1328,7 +1349,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /v2/templates: post: description: Create a new template @@ -1357,7 +1378,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /templates/{templateID}/files/{hash}: get: description: Get an upload link for a tar file containing build layer files @@ -1375,12 +1396,6 @@ paths: type: string description: Hash of the files responses: - '201': - description: The upload link where to upload the tar file - content: - application/json: - schema: - $ref: '#/components/schemas/TemplateBuildFileUpload' '400': $ref: '#/components/responses/400' '401': @@ -1389,8 +1404,14 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + '200': + description: Upload link for the tar file containing build layer files + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildFileUpload' servers: - - *id005 + - *id007 /templates: get: description: List all templates @@ -1447,7 +1468,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /templates/{templateID}: get: description: List all builds for a template @@ -1539,7 +1560,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /templates/{templateID}/builds/{buildID}: post: description: Start the build @@ -1560,7 +1581,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /v2/templates/{templateID}/builds/{buildID}: post: description: Start the build @@ -1586,7 +1607,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /v2/templates/{templateID}: patch: description: Update template @@ -1617,7 +1638,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /templates/{templateID}/builds/{buildID}/status: get: description: Get template build info @@ -1665,7 +1686,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /templates/{templateID}/builds/{buildID}/logs: get: description: Get template build logs @@ -1720,7 +1741,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /templates/tags: post: description: Assign tag(s) to a template build @@ -1773,7 +1794,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /templates/{templateID}/tags: get: description: List all tags for a template @@ -1801,7 +1822,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /templates/aliases/{alias}: get: description: Check if template with given alias exists @@ -1832,7 +1853,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /volumes: get: description: List all team volumes @@ -1881,7 +1902,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 /volumes/{volumeID}: get: description: Get team volume info @@ -1924,7 +1945,7 @@ paths: '500': $ref: '#/components/responses/500' servers: - - *id005 + - *id007 components: securitySchemes: AccessTokenAuth: @@ -2681,12 +2702,17 @@ components: type: integer title: exit_code format: int32 + deprecated: true + description: 'Deprecated: not populated by the server. Parse the exit code + from the `status` string (e.g. "exit status 0").' exited: type: boolean title: exited status: type: string title: status + description: Process exit status string (e.g. "exit status 0"). Parse the + integer exit code from this field. error: type: - string @@ -3160,7 +3186,10 @@ components: $ref: '#/components/schemas/EnvdVersion' envdAccessToken: type: string - description: Access token used for envd communication + description: 'Access token for authenticating envd requests to this sandbox. + Only returned when the sandbox is created with `secure: true`. Null for + non-secure sandboxes (envd endpoints work without auth).' + nullable: true trafficAccessToken: type: string nullable: true @@ -3168,7 +3197,8 @@ components: domain: type: string nullable: true - description: Base domain where the sandbox traffic is accessible + description: 'Deprecated: always null. Construct sandbox URLs as `https://{port}-{sandboxID}.e2b.app`.' + deprecated: true SandboxDetail: required: - templateID @@ -3207,11 +3237,15 @@ components: $ref: '#/components/schemas/EnvdVersion' envdAccessToken: type: string - description: Access token used for envd communication + description: 'Access token for authenticating envd requests to this sandbox. + Only returned when the sandbox is created with `secure: true`. Null for + non-secure sandboxes (envd endpoints work without auth).' + nullable: true domain: type: string nullable: true - description: Base domain where the sandbox traffic is accessible + description: 'Deprecated: always null. Construct sandbox URLs as `https://{port}-{sandboxID}.e2b.app`.' + deprecated: true cpuCount: $ref: '#/components/schemas/CPUCount' memoryMB: @@ -3846,7 +3880,7 @@ components: type: string LogLevel: type: string - description: State of the sandbox + description: Severity level for log entries (e.g. info, warn, error) BuildLogEntry: required: - timestamp diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index 51a067ea..650de2bb 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -608,6 +608,14 @@ def add_operation_ids(spec: dict[str, Any]) -> None: print(f"==> Added {count} operationIds to envd endpoints") +STREAMING_ENDPOINTS = { + "/filesystem.Filesystem/WatchDir", + "/process.Process/Start", + "/process.Process/Connect", + "/process.Process/StreamInput", +} + + def fix_spec_issues(spec: dict[str, Any]) -> None: """Fix known discrepancies between the source spec and the live API. @@ -615,6 +623,7 @@ def fix_spec_issues(spec: dict[str, Any]) -> None: so the published docs match actual API behavior. """ schemas = spec.get("components", {}).get("schemas", {}) + paths = spec.get("paths", {}) fixes = [] # 1. TemplateBuildStatus enum missing 'uploaded' @@ -633,9 +642,11 @@ def fix_spec_issues(spec: dict[str, Any]) -> None: # 3. LogLevel enum too strict — server returns empty/whitespace values log_level = schemas.get("LogLevel") - if log_level and "enum" in log_level: - del log_level["enum"] - fixes.append("LogLevel: removed enum constraint (server sends non-enum values)") + if log_level: + if "enum" in log_level: + del log_level["enum"] + log_level["description"] = "Severity level for log entries (e.g. info, warn, error)" + fixes.append("LogLevel: removed enum constraint, fixed description") # 4. Metrics schema missing mem_used_mib and mem_total_mib metrics = schemas.get("Metrics") @@ -654,6 +665,97 @@ def fix_spec_issues(spec: dict[str, Any]) -> None: } fixes.append("Metrics: added 'mem_total_mib'") + # 5. Streaming RPC endpoints: wrong content-type and missing headers + # Server requires application/connect+json with envelope framing, + # not application/json. + connect_version_param = { + "name": "Connect-Protocol-Version", + "in": "header", + "required": True, + "schema": {"$ref": "#/components/schemas/connect-protocol-version"}, + } + connect_timeout_param = { + "name": "Connect-Timeout-Ms", + "in": "header", + "schema": {"$ref": "#/components/schemas/connect-timeout-header"}, + } + for ep_path in STREAMING_ENDPOINTS: + path_item = paths.get(ep_path, {}) + op = path_item.get("post") + if not op: + continue + # Fix request content-type + rb = op.get("requestBody", {}).get("content", {}) + if "application/json" in rb and "application/connect+json" not in rb: + rb["application/connect+json"] = rb.pop("application/json") + # Fix response content-type + for status_code, resp in op.get("responses", {}).items(): + if not isinstance(resp, dict): + continue + rc = resp.get("content", {}) + if "application/json" in rc and "application/connect+json" not in rc: + rc["application/connect+json"] = rc.pop("application/json") + # Add Connect-Protocol-Version and Connect-Timeout-Ms headers + params = op.setdefault("parameters", []) + has_cpv = any(p.get("name") == "Connect-Protocol-Version" for p in params) + if not has_cpv: + params.insert(0, connect_version_param) + params.insert(1, connect_timeout_param) + fixes.append(f"{ep_path}: content-type → application/connect+json, added Connect headers") + + # 6. EndEvent.exitCode not populated — API returns status string instead + end_event = schemas.get("process.ProcessEvent.EndEvent") + if end_event and "properties" in end_event: + ec = end_event["properties"].get("exitCode") + if ec: + ec["deprecated"] = True + ec["description"] = ( + "Deprecated: not populated by the server. " + "Parse the exit code from the `status` string (e.g. \"exit status 0\")." + ) + st = end_event["properties"].get("status") + if st and not st.get("description"): + st["description"] = ( + "Process exit status string (e.g. \"exit status 0\"). " + "Parse the integer exit code from this field." + ) + fixes.append("EndEvent: marked exitCode as deprecated, documented status string") + + # 7. envdAccessToken description misleading — only returned when secure: true + for schema_name in ("Sandbox", "SandboxDetail"): + schema = schemas.get(schema_name, {}) + eat = schema.get("properties", {}).get("envdAccessToken") + if eat: + eat["nullable"] = True + eat["description"] = ( + "Access token for authenticating envd requests to this sandbox. " + "Only returned when the sandbox is created with `secure: true`. " + "Null for non-secure sandboxes (envd endpoints work without auth)." + ) + fixes.append("envdAccessToken: clarified secure-only behavior, marked nullable") + + # 8. Sandbox.domain always null — mark as deprecated + for schema_name in ("Sandbox", "SandboxDetail"): + schema = schemas.get(schema_name, {}) + dom = schema.get("properties", {}).get("domain") + if dom: + dom["deprecated"] = True + dom["description"] = ( + "Deprecated: always null. Construct sandbox URLs as " + "`https://{port}-{sandboxID}.e2b.app`." + ) + fixes.append("Sandbox.domain: marked as deprecated (always null)") + + # 9. GET /templates/{templateID}/files/{hash} returns 201, not 200 + files_path = paths.get("/templates/{templateID}/files/{hash}", {}) + files_get = files_path.get("get") + if files_get: + responses = files_get.get("responses", {}) + if "201" in responses and "200" not in responses: + responses["200"] = responses.pop("201") + responses["200"]["description"] = "Upload link for the tar file containing build layer files" + fixes.append("/templates/{templateID}/files/{hash}: changed 201 → 200 response") + if fixes: print(f"==> Fixed {len(fixes)} spec issues:") for f in fixes: From 6c6e98954f619539a5e3e29cc68dcd11d882b18d Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 01:44:01 +0100 Subject: [PATCH 13/37] Fix 12 spec inconsistencies from SDK testing report MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Generation script fixes (fix_spec_issues): - Generate operationId for all 52 platform endpoints - Remove phantom /v2/sandboxes/{sandboxID}/logs deprecation reference - Complete truncated 'start'/'end' parameter descriptions on metrics - Fix sandboxId → sandboxID casing in 502 error schema - Add security: [] and 'health' tag to GET /health - Remove YAML anchor overlay on /files responses (DownloadSuccess/UploadSuccess) - Add type: object to 53 schemas missing it - Move 'end' param description out of schema to sibling level Also fix fill_empty_responses to skip $ref responses. Upstream issues not fixable in spec: - allow_internet_access snake_case (server field name) - Duplicate EntryInfo schemas (different APIs) - Inconsistent error response coverage (API behavior) - Auth inconsistency across template versions (intentional v1→v2/v3) --- openapi-public.yml | 200 ++++++++++++++++++-------- scripts/generate_openapi_reference.py | 139 ++++++++++++++++++ 2 files changed, 278 insertions(+), 61 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index 97755d3f..c35621f1 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -5,7 +5,7 @@ info: description: Complete E2B developer API. Platform endpoints are served on api.e2b.app. Sandbox endpoints (envd) are served on {port}-{sandboxID}.e2b.app. servers: -- &id007 +- &id006 url: https://api.e2b.app description: E2B Platform API paths: @@ -22,14 +22,10 @@ paths: schema: type: object required: - - sandboxId + - sandboxID - message - code properties: - sandboxId: - type: string - description: Identifier of the sandbox - example: i1234abcd5678efgh90jk message: type: string description: Error message @@ -38,7 +34,14 @@ paths: type: integer description: Error code example: 502 + sandboxID: + type: string + description: Identifier of the sandbox + example: i1234abcd5678efgh90jk operationId: getHealth + security: [] + tags: + - health servers: - &id002 url: https://{port}-{sandboxID}.e2b.app @@ -98,11 +101,6 @@ paths: responses: '200': $ref: '#/components/responses/DownloadSuccess' - content: &id004 - application/json: - schema: - type: object - description: Empty response '401': $ref: '#/components/responses/InvalidUser' '400': @@ -130,7 +128,6 @@ paths: responses: '200': $ref: '#/components/responses/UploadSuccess' - content: *id004 '400': $ref: '#/components/responses/InvalidPath' '401': @@ -440,13 +437,13 @@ paths: security: - *id003 parameters: - - &id005 + - &id004 name: Connect-Protocol-Version in: header required: true schema: $ref: '#/components/schemas/connect-protocol-version' - - &id006 + - &id005 name: Connect-Timeout-Ms in: header schema: @@ -513,8 +510,8 @@ paths: security: - *id003 parameters: + - *id004 - *id005 - - *id006 servers: - *id002 /process.Process/List: @@ -643,8 +640,8 @@ paths: security: - *id003 parameters: + - *id004 - *id005 - - *id006 servers: - *id002 /process.Process/StreamInput: @@ -672,8 +669,8 @@ paths: security: - *id003 parameters: + - *id004 - *id005 - - *id006 servers: - *id002 /process.Process/Update: @@ -731,8 +728,9 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: getTeams servers: - - *id007 + - *id006 /teams/{teamID}/metrics: get: description: Get metrics for the team @@ -749,15 +747,15 @@ paths: format: int64 minimum: 0 description: Unix timestamp for the start of the interval, in seconds, for - which the metrics + which the metrics are returned. - in: query name: end schema: type: integer format: int64 minimum: 0 - description: Unix timestamp for the end of the interval, in seconds, for - which the metrics + description: Unix timestamp for the end of the interval, in seconds, for which + the metrics are returned. responses: '200': description: Successfully returned the team metrics @@ -775,8 +773,9 @@ paths: $ref: '#/components/responses/403' '500': $ref: '#/components/responses/500' + operationId: getTeamMetrics servers: - - *id007 + - *id006 /teams/{teamID}/metrics/max: get: description: Get the maximum metrics for the team in the given interval @@ -793,15 +792,15 @@ paths: format: int64 minimum: 0 description: Unix timestamp for the start of the interval, in seconds, for - which the metrics + which the metrics are returned. - in: query name: end schema: type: integer format: int64 minimum: 0 - description: Unix timestamp for the end of the interval, in seconds, for - which the metrics + description: Unix timestamp for the end of the interval, in seconds, for which + the metrics are returned. - in: query name: metric required: true @@ -826,8 +825,9 @@ paths: $ref: '#/components/responses/403' '500': $ref: '#/components/responses/500' + operationId: getTeamMetricMax servers: - - *id007 + - *id006 /sandboxes: get: description: List all running sandboxes @@ -859,6 +859,7 @@ paths: $ref: '#/components/responses/400' '500': $ref: '#/components/responses/500' + operationId: getSandboxes post: description: Create a sandbox from the template tags: @@ -884,8 +885,9 @@ paths: $ref: '#/components/responses/400' '500': $ref: '#/components/responses/500' + operationId: postSandboxes servers: - - *id007 + - *id006 /v2/sandboxes: get: description: List all sandboxes @@ -929,8 +931,9 @@ paths: $ref: '#/components/responses/400' '500': $ref: '#/components/responses/500' + operationId: getSandboxes servers: - - *id007 + - *id006 /sandboxes/metrics: get: description: List metrics for given sandboxes @@ -963,11 +966,12 @@ paths: $ref: '#/components/responses/400' '500': $ref: '#/components/responses/500' + operationId: getSandboxeMetrics servers: - - *id007 + - *id006 /sandboxes/{sandboxID}/logs: get: - description: Get sandbox logs. Use /v2/sandboxes/{sandboxID}/logs instead. + description: Get sandbox logs. deprecated: true tags: - sandboxes @@ -1003,8 +1007,9 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: getSandboxeLogs servers: - - *id007 + - *id006 /sandboxes/{sandboxID}: get: description: Get a sandbox by id @@ -1027,6 +1032,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: getSandboxes delete: description: Kill a sandbox tags: @@ -1044,8 +1050,9 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: deleteSandboxes servers: - - *id007 + - *id006 /sandboxes/{sandboxID}/metrics: get: description: Get sandbox metrics @@ -1062,15 +1069,15 @@ paths: format: int64 minimum: 0 description: Unix timestamp for the start of the interval, in seconds, for - which the metrics + which the metrics are returned. - in: query name: end schema: type: integer format: int64 minimum: 0 - description: Unix timestamp for the end of the interval, in seconds, for - which the metrics + description: Unix timestamp for the end of the interval, in seconds, for which + the metrics are returned. responses: '200': description: Successfully returned the sandbox metrics @@ -1088,8 +1095,9 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + operationId: getSandboxeMetrics servers: - - *id007 + - *id006 /sandboxes/{sandboxID}/pause: post: description: Pause the sandbox @@ -1110,8 +1118,9 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: postSandboxePause servers: - - *id007 + - *id006 /sandboxes/{sandboxID}/resume: post: deprecated: true @@ -1143,8 +1152,9 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: postSandboxeResume servers: - - *id007 + - *id006 /sandboxes/{sandboxID}/connect: post: description: Returns sandbox details. If the sandbox is paused, it will be resumed. @@ -1182,8 +1192,9 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + operationId: postSandboxeConnect servers: - - *id007 + - *id006 /sandboxes/{sandboxID}/timeout: post: description: Set the timeout for the sandbox. The sandbox will expire x seconds @@ -1219,8 +1230,9 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + operationId: postSandboxeTimeout servers: - - *id007 + - *id006 /sandboxes/{sandboxID}/refreshes: post: description: Refresh the sandbox extending its time to live @@ -1249,8 +1261,9 @@ paths: $ref: '#/components/responses/401' '404': $ref: '#/components/responses/404' + operationId: postSandboxeRefreshes servers: - - *id007 + - *id006 /sandboxes/{sandboxID}/snapshots: post: description: Create a persistent snapshot from the sandbox's current state. @@ -1289,8 +1302,9 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + operationId: postSandboxeSnapshots servers: - - *id007 + - *id006 /snapshots: get: description: List all snapshots for the team @@ -1320,8 +1334,9 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: getSnapshots servers: - - *id007 + - *id006 /v3/templates: post: description: Create a new template @@ -1348,8 +1363,9 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: postTemplates servers: - - *id007 + - *id006 /v2/templates: post: description: Create a new template @@ -1377,8 +1393,9 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: postTemplates servers: - - *id007 + - *id006 /templates/{templateID}/files/{hash}: get: description: Get an upload link for a tar file containing build layer files @@ -1410,8 +1427,9 @@ paths: application/json: schema: $ref: '#/components/schemas/TemplateBuildFileUpload' + operationId: getTemplateFiles servers: - - *id007 + - *id006 /templates: get: description: List all templates @@ -1441,6 +1459,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: getTemplates post: description: Create a new template deprecated: true @@ -1467,8 +1486,9 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: postTemplates servers: - - *id007 + - *id006 /templates/{templateID}: get: description: List all builds for a template @@ -1491,6 +1511,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: getTemplates post: description: Rebuild an template deprecated: true @@ -1517,6 +1538,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: postTemplates delete: description: Delete a template tags: @@ -1533,6 +1555,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: deleteTemplates patch: description: Update template deprecated: true @@ -1552,15 +1575,20 @@ paths: responses: '200': description: The template was updated successfully - content: *id004 + content: &id007 + application/json: + schema: + type: object + description: Empty response '400': $ref: '#/components/responses/400' '401': $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: patchTemplates servers: - - *id007 + - *id006 /templates/{templateID}/builds/{buildID}: post: description: Start the build @@ -1575,13 +1603,14 @@ paths: responses: '202': description: The build has started - content: *id004 + content: *id007 '401': $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: postTemplateBuilds servers: - - *id007 + - *id006 /v2/templates/{templateID}/builds/{buildID}: post: description: Start the build @@ -1601,13 +1630,14 @@ paths: responses: '202': description: The build has started - content: *id004 + content: *id007 '401': $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: postTemplateBuilds servers: - - *id007 + - *id006 /v2/templates/{templateID}: patch: description: Update template @@ -1637,8 +1667,9 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: patchTemplates servers: - - *id007 + - *id006 /templates/{templateID}/builds/{buildID}/status: get: description: Get template build info @@ -1685,8 +1716,9 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + operationId: getTemplateBuildStatus servers: - - *id007 + - *id006 /templates/{templateID}/builds/{buildID}/logs: get: description: Get template build logs @@ -1740,8 +1772,9 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + operationId: getTemplateBuildLogs servers: - - *id007 + - *id006 /templates/tags: post: description: Assign tag(s) to a template build @@ -1770,6 +1803,7 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + operationId: postTemplateTags delete: description: Delete multiple tags from templates tags: @@ -1793,8 +1827,9 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + operationId: deleteTemplateTags servers: - - *id007 + - *id006 /templates/{templateID}/tags: get: description: List all tags for a template @@ -1821,8 +1856,9 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + operationId: getTemplateTags servers: - - *id007 + - *id006 /templates/aliases/{alias}: get: description: Check if template with given alias exists @@ -1852,8 +1888,9 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + operationId: getTemplateAliases servers: - - *id007 + - *id006 /volumes: get: description: List all team volumes @@ -1875,6 +1912,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: getVolumes post: description: Create a new team volume tags: @@ -1901,8 +1939,9 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' + operationId: postVolumes servers: - - *id007 + - *id006 /volumes/{volumeID}: get: description: Get team volume info @@ -1926,6 +1965,7 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + operationId: getVolumes delete: description: Delete a team volume tags: @@ -1944,8 +1984,9 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' + operationId: deleteVolumes servers: - - *id007 + - *id006 components: securitySchemes: AccessTokenAuth: @@ -2182,6 +2223,7 @@ components: message: type: string description: Error + type: object EntryInfo: required: - path @@ -2199,6 +2241,7 @@ components: description: Type of the file enum: - file + type: object EnvVars: additionalProperties: type: string @@ -2944,6 +2987,7 @@ components: isDefault: type: boolean description: Whether the team is the default team + type: object TeamUser: required: - id @@ -2956,11 +3000,13 @@ components: email: type: string description: Email of the user + type: object TemplateUpdateRequest: properties: public: type: boolean description: Whether the template is public or only accessible by the team + type: object TemplateUpdateResponse: required: - names @@ -2970,6 +3016,7 @@ components: description: Names of the template (namespace/alias format when namespaced) items: type: string + type: object CPUCount: type: integer format: int32 @@ -3070,6 +3117,7 @@ components: line: type: string description: Log line content + type: object SandboxLogEntry: required: - timestamp @@ -3090,6 +3138,7 @@ components: type: object additionalProperties: type: string + type: object SandboxLogs: required: - logs @@ -3105,6 +3154,7 @@ components: type: array items: $ref: '#/components/schemas/SandboxLogEntry' + type: object SandboxMetric: description: Metric entry with timestamp and line required: @@ -3150,6 +3200,7 @@ components: type: integer format: int64 description: Total disk space in bytes + type: object SandboxVolumeMount: type: object properties: @@ -3199,6 +3250,7 @@ components: nullable: true description: 'Deprecated: always null. Construct sandbox URLs as `https://{port}-{sandboxID}.e2b.app`.' deprecated: true + type: object SandboxDetail: required: - templateID @@ -3260,6 +3312,7 @@ components: type: array items: $ref: '#/components/schemas/SandboxVolumeMount' + type: object ListedSandbox: required: - templateID @@ -3310,6 +3363,7 @@ components: type: array items: $ref: '#/components/schemas/SandboxVolumeMount' + type: object SandboxesWithMetrics: required: - sandboxes @@ -3317,6 +3371,7 @@ components: sandboxes: additionalProperties: $ref: '#/components/schemas/SandboxMetric' + type: object NewSandbox: required: - templateID @@ -3355,6 +3410,7 @@ components: type: array items: $ref: '#/components/schemas/SandboxVolumeMount' + type: object ResumedSandbox: properties: timeout: @@ -3367,6 +3423,7 @@ components: type: boolean deprecated: true description: Automatically pauses the sandbox after the timeout + type: object ConnectSandbox: type: object required: @@ -3403,6 +3460,7 @@ components: type: number format: float description: Number of sandboxes started per second + type: object MaxTeamMetric: description: Team metric with timestamp required: @@ -3422,6 +3480,7 @@ components: value: type: number description: The maximum value of the requested metric in the given interval + type: object Template: required: - templateID @@ -3496,6 +3555,7 @@ components: $ref: '#/components/schemas/EnvdVersion' buildStatus: $ref: '#/components/schemas/TemplateBuildStatus' + type: object TemplateRequestResponseV3: required: - templateID @@ -3530,6 +3590,7 @@ components: deprecated: true items: type: string + type: object TemplateLegacy: required: - templateID @@ -3594,6 +3655,7 @@ components: description: Number of times the template was built envdVersion: $ref: '#/components/schemas/EnvdVersion' + type: object TemplateBuild: required: - buildID @@ -3629,6 +3691,7 @@ components: $ref: '#/components/schemas/DiskSizeMB' envdVersion: $ref: '#/components/schemas/EnvdVersion' + type: object TemplateWithBuilds: required: - templateID @@ -3680,6 +3743,7 @@ components: description: List of builds for the template items: $ref: '#/components/schemas/TemplateBuild' + type: object TemplateAliasResponse: required: - templateID @@ -3691,6 +3755,7 @@ components: public: type: boolean description: Whether the template is public or only accessible by the team + type: object TemplateBuildRequest: required: - dockerfile @@ -3714,6 +3779,7 @@ components: $ref: '#/components/schemas/CPUCount' memoryMB: $ref: '#/components/schemas/MemoryMB' + type: object TemplateStep: description: Step in the template build process required: @@ -3736,6 +3802,7 @@ components: type: boolean description: Whether the step should be forced to run regardless of the cache + type: object TemplateBuildRequestV3: properties: name: @@ -3760,6 +3827,7 @@ components: $ref: '#/components/schemas/CPUCount' memoryMB: $ref: '#/components/schemas/MemoryMB' + type: object TemplateBuildRequestV2: required: - alias @@ -3775,6 +3843,7 @@ components: $ref: '#/components/schemas/CPUCount' memoryMB: $ref: '#/components/schemas/MemoryMB' + type: object FromImageRegistry: oneOf: - $ref: '#/components/schemas/AWSRegistry' @@ -3878,6 +3947,7 @@ components: url: description: Url where the file should be uploaded to type: string + type: object LogLevel: type: string description: Severity level for log entries (e.g. info, warn, error) @@ -3899,6 +3969,7 @@ components: step: type: string description: Step in the build process related to the log entry + type: object BuildStatusReason: required: - message @@ -3916,6 +3987,7 @@ components: type: array items: $ref: '#/components/schemas/BuildLogEntry' + type: object TemplateBuildStatus: type: string description: Status of the template build @@ -3955,6 +4027,7 @@ components: $ref: '#/components/schemas/TemplateBuildStatus' reason: $ref: '#/components/schemas/BuildStatusReason' + type: object TemplateBuildLogsResponse: required: - logs @@ -3965,6 +4038,7 @@ components: type: array items: $ref: '#/components/schemas/BuildLogEntry' + type: object LogsDirection: type: string description: Direction of the logs that should be returned @@ -3997,6 +4071,7 @@ components: type: string format: uuid description: Identifier of the build associated with these tags + type: object TemplateTag: required: - tag @@ -4014,6 +4089,7 @@ components: type: string format: date-time description: Time when the tag was assigned + type: object AssignTemplateTagsRequest: required: - target @@ -4027,6 +4103,7 @@ components: type: array items: type: string + type: object DeleteTemplateTagsRequest: required: - name @@ -4040,6 +4117,7 @@ components: type: array items: type: string + type: object Volume: type: object properties: diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index 650de2bb..0f72bffa 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -756,6 +756,142 @@ def fix_spec_issues(spec: dict[str, Any]) -> None: responses["200"]["description"] = "Upload link for the tar file containing build layer files" fixes.append("/templates/{templateID}/files/{hash}: changed 201 → 200 response") + # 10. Generate operationId for platform endpoints that lack one + op_id_count = 0 + for ep_path, path_item in paths.items(): + # Skip envd endpoints (already have operationIds) + if "/" in ep_path.lstrip("/") and "." in ep_path.split("/")[1]: + continue + for method in ("get", "post", "put", "patch", "delete", "head", "options"): + op = path_item.get(method) + if not op or op.get("operationId"): + continue + # Build operationId from method + path segments + # e.g. GET /templates/{templateID}/builds/{buildID}/status → getTemplateBuildStatus + segments = [] + for seg in ep_path.strip("/").split("/"): + if seg.startswith("{") and seg.endswith("}"): + continue # skip path params + # Strip version prefixes + if seg in ("v2", "v3"): + continue + segments.append(seg) + # Singularize resource names for sub-resources + # e.g. /sandboxes/{id}/logs → getSandboxLogs + parts = [] + for i, seg in enumerate(segments): + if i < len(segments) - 1: + # Sub-resource parent: singularize + s = seg.rstrip("s") if seg.endswith("es") and len(seg) > 3 else ( + seg[:-1] if seg.endswith("s") and not seg.endswith("ss") else seg) + parts.append(s) + else: + parts.append(seg) + name = "".join(p.capitalize() for p in parts) + op["operationId"] = f"{method}{name}" + op_id_count += 1 + if op_id_count: + fixes.append(f"Generated operationId for {op_id_count} platform endpoints") + + # 11. Phantom deprecation reference: /v2/sandboxes/{sandboxID}/logs doesn't exist + logs_path = paths.get("/sandboxes/{sandboxID}/logs", {}) + logs_get = logs_path.get("get") + if logs_get and "/v2/" in logs_get.get("description", ""): + logs_get["description"] = "Get sandbox logs." + fixes.append("/sandboxes/{sandboxID}/logs: removed phantom /v2 deprecation reference") + + # 12. Truncated parameter descriptions on metrics endpoints + metrics_desc_suffix = " are returned." + for ep_path in paths: + for method in ("get", "post"): + op = (paths[ep_path] or {}).get(method) + if not op: + continue + for param in op.get("parameters", []): + if not isinstance(param, dict) or param.get("name") not in ("start", "end"): + continue + # Description could be on param or nested in schema + for target in (param, param.get("schema", {})): + desc = target.get("description", "") + if desc and desc.rstrip().endswith("the metrics"): + target["description"] = desc.rstrip() + metrics_desc_suffix + fixes.append(f"{ep_path}: completed truncated '{param['name']}' description") + + # 13. sandboxId → sandboxID casing in 502 error schema + # The 502 response defined on /health uses "sandboxId" (lowercase d) + health_path = paths.get("/health", {}) + health_get = health_path.get("get") + if health_get: + for status_code, resp in health_get.get("responses", {}).items(): + if not isinstance(resp, dict): + continue + for ct, media in resp.get("content", {}).items(): + schema = media.get("schema", {}) + props = schema.get("properties", {}) + if "sandboxId" in props and "sandboxID" not in props: + props["sandboxID"] = props.pop("sandboxId") + req = schema.get("required", []) + for i, r in enumerate(req): + if r == "sandboxId": + req[i] = "sandboxID" + fixes.append("502 error schema: sandboxId → sandboxID") + + # 14. /health missing security: [] and tags + if health_get: + if "security" not in health_get: + health_get["security"] = [] + fixes.append("/health: added security: [] (explicitly no auth)") + if "tags" not in health_get: + health_get["tags"] = ["health"] + fixes.append("/health: added 'health' tag") + + # 15. /files responses: YAML anchor overlay hides actual response schemas + # Remove the overlaid empty content block so $ref responses are used + for files_ep in ("/files",): + fpath = paths.get(files_ep, {}) + for method in ("get", "post"): + op = fpath.get(method) + if not op: + continue + responses = op.get("responses", {}) + for status_code, resp in responses.items(): + if not isinstance(resp, dict): + continue + # If the response has both $ref and content with an empty schema, + # the empty content overlay was from the YAML anchor bug — remove it + if "$ref" in resp and "content" in resp: + content = resp["content"] + for ct, media in list(content.items()): + s = media.get("schema", {}) + if s.get("description") == "Empty response": + del content[ct] + if not content: + del resp["content"] + fixes.append(f"{files_ep} {method.upper()}: removed anchor-overlaid empty content") + + # 16. Missing type: object on schemas that have properties + obj_fixed = 0 + for schema_name, schema in schemas.items(): + if "properties" in schema and "type" not in schema and "allOf" not in schema and "oneOf" not in schema: + schema["type"] = "object" + obj_fixed += 1 + if obj_fixed: + fixes.append(f"Added type: object to {obj_fixed} schemas") + + # 17. end parameter nesting: description inside schema instead of sibling + for ep_path in paths: + for method in ("get", "post"): + op = (paths[ep_path] or {}).get(method) + if not op: + continue + for param in op.get("parameters", []): + if not isinstance(param, dict) or param.get("name") != "end": + continue + schema = param.get("schema", {}) + if "description" in schema and "description" not in param: + param["description"] = schema.pop("description") + fixes.append(f"{ep_path}: moved 'end' description out of schema") + if fixes: print(f"==> Fixed {len(fixes)} spec issues:") for f in fixes: @@ -957,6 +1093,9 @@ def fill_empty_responses(spec: dict[str, Any]) -> None: if str(status) == "204": resp.pop("content", None) continue + # Skip responses that use $ref (content comes from the referenced response) + if "$ref" in resp: + continue if str(status).startswith("2") and "content" not in resp: resp["content"] = EMPTY_RESPONSE_CONTENT filled += 1 From b2eb09ec5d66811efed8bac68d98d04aeb38e750 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 02:55:13 +0100 Subject: [PATCH 14/37] Fix schema and consistency issues from second SDK testing round MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Schema fixes: - EntryInfo.type: add 'directory' to enum (was file-only) - SandboxMetadata, EnvVars: add type: object (had only additionalProperties) - TemplateLegacy: add missing 'names' and 'buildStatus' fields - connect-protocol-version: remove redundant enum (const suffices) - filesystem.EntryInfo.size: document int/string union type (int64) Response fixes: - PATCH /templates/{templateID}: return TemplateUpdateResponse (was empty) - POST /sandboxes/{sandboxID}/refreshes: add missing 500 response - GET /health 502: content-type → application/json (was connect+json) Also fix fill_empty_responses to skip $ref responses. --- openapi-public.yml | 29 +++++++--- scripts/generate_openapi_reference.py | 80 +++++++++++++++++++++++++++ 2 files changed, 102 insertions(+), 7 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index c35621f1..fdecf2b9 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -18,7 +18,7 @@ paths: '502': &id001 description: Sandbox not found content: - application/connect+json: + application/json: schema: type: object required: @@ -1261,6 +1261,8 @@ paths: $ref: '#/components/responses/401' '404': $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' operationId: postSandboxeRefreshes servers: - *id006 @@ -1575,11 +1577,10 @@ paths: responses: '200': description: The template was updated successfully - content: &id007 + content: application/json: schema: - type: object - description: Empty response + $ref: '#/components/schemas/TemplateUpdateResponse' '400': $ref: '#/components/responses/400' '401': @@ -1603,7 +1604,11 @@ paths: responses: '202': description: The build has started - content: *id007 + content: &id007 + application/json: + schema: + type: object + description: Empty response '401': $ref: '#/components/responses/401' '500': @@ -2241,11 +2246,13 @@ components: description: Type of the file enum: - file + - directory type: object EnvVars: additionalProperties: type: string description: Environment variables for the sandbox + type: object Metrics: type: object description: Resource usage metrics @@ -2282,8 +2289,6 @@ components: connect-protocol-version: type: number title: Connect-Protocol-Version - enum: - - 1 description: Define the version of the Connect protocol const: 1 connect-timeout-header: @@ -2327,6 +2332,8 @@ components: - string title: size format: int64 + description: File size in bytes. Encoded as string for values exceeding + JSON number precision (int64). mode: type: integer title: mode @@ -3039,6 +3046,7 @@ components: additionalProperties: type: string description: Metadata of the sandbox + type: object SandboxState: type: string description: State of the sandbox @@ -3655,6 +3663,13 @@ components: description: Number of times the template was built envdVersion: $ref: '#/components/schemas/EnvdVersion' + names: + type: array + description: Names of the template (namespace/alias format when namespaced) + items: + type: string + buildStatus: + $ref: '#/components/schemas/TemplateBuildStatus' type: object TemplateBuild: required: diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index 0f72bffa..5a258f85 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -892,6 +892,86 @@ def fix_spec_issues(spec: dict[str, Any]) -> None: param["description"] = schema.pop("description") fixes.append(f"{ep_path}: moved 'end' description out of schema") + # 18. EntryInfo.type enum incomplete — missing "directory" + entry_info = schemas.get("EntryInfo") + if entry_info: + type_prop = entry_info.get("properties", {}).get("type") + if type_prop and type_prop.get("enum") == ["file"]: + type_prop["enum"] = ["file", "directory"] + fixes.append("EntryInfo.type: added 'directory' to enum") + + # 19. SandboxMetadata and EnvVars lack type: object + for name in ("SandboxMetadata", "EnvVars"): + schema = schemas.get(name, {}) + if "additionalProperties" in schema and "type" not in schema: + schema["type"] = "object" + fixes.append(f"{name}: added type: object") + + # 20. TemplateLegacy missing 'names' and 'buildStatus' fields + tpl_legacy = schemas.get("TemplateLegacy") + if tpl_legacy and "properties" in tpl_legacy: + props = tpl_legacy["properties"] + if "names" not in props: + props["names"] = { + "type": "array", + "description": "Names of the template (namespace/alias format when namespaced)", + "items": {"type": "string"}, + } + fixes.append("TemplateLegacy: added 'names' property") + if "buildStatus" not in props: + props["buildStatus"] = {"$ref": "#/components/schemas/TemplateBuildStatus"} + fixes.append("TemplateLegacy: added 'buildStatus' property") + + # 21. connect-protocol-version: redundant enum + const + cpv = schemas.get("connect-protocol-version") + if cpv and "enum" in cpv and "const" in cpv: + del cpv["enum"] + fixes.append("connect-protocol-version: removed redundant enum (const is sufficient)") + + # 22. filesystem.EntryInfo.size union type undocumented + fs_entry = schemas.get("filesystem.EntryInfo") + if fs_entry and "properties" in fs_entry: + size_prop = fs_entry["properties"].get("size") + if size_prop and isinstance(size_prop.get("type"), list): + size_prop["description"] = ( + "File size in bytes. Encoded as string for values exceeding " + "JSON number precision (int64)." + ) + fixes.append("filesystem.EntryInfo.size: documented integer/string union type") + + # 23. GET /health 502 uses application/connect+json — change to application/json + if health_get: + for status_code, resp in health_get.get("responses", {}).items(): + if not isinstance(resp, dict): + continue + content = resp.get("content", {}) + if "application/connect+json" in content and "application/json" not in content: + content["application/json"] = content.pop("application/connect+json") + fixes.append(f"/health {status_code}: content-type → application/json") + + # 24. PATCH /templates/{templateID} (deprecated) returns empty object — + # use TemplateUpdateResponse like v2 + patch_v1_path = paths.get("/templates/{templateID}", {}) + patch_v1 = patch_v1_path.get("patch") + if patch_v1: + resp_200 = patch_v1.get("responses", {}).get("200", {}) + # Replace the entire content dict (don't modify shared YAML anchor object) + resp_200["content"] = { + "application/json": { + "schema": {"$ref": "#/components/schemas/TemplateUpdateResponse"} + } + } + fixes.append("PATCH /templates/{templateID}: response → TemplateUpdateResponse") + + # 25. POST /sandboxes/{sandboxID}/refreshes missing 500 response + refreshes_path = paths.get("/sandboxes/{sandboxID}/refreshes", {}) + refreshes_post = refreshes_path.get("post") + if refreshes_post: + responses = refreshes_post.get("responses", {}) + if "500" not in responses: + responses["500"] = {"$ref": "#/components/responses/500"} + fixes.append("/sandboxes/{sandboxID}/refreshes: added 500 response") + if fixes: print(f"==> Fixed {len(fixes)} spec issues:") for f in fixes: From 793ac50dc61c0d6b62f26467a308cf7167612e70 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 14:57:04 +0100 Subject: [PATCH 15/37] Fix duplicate and misspelled operationIds MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Rewrite operationId generator with proper singularization (sandboxes→sandbox, not sandboxe) - Use 'list' prefix for collection GETs (listSandboxes, listTemplates) - Include version suffix for v2/v3 variants (postTemplatesV3) - Singularize parent resource when followed by path param (GET /sandboxes/{id}/logs → getSandboxLogs) - Dedup check ensures all 52 platform operationIds are unique --- openapi-public.yml | 66 +++++++++++----------- scripts/generate_openapi_reference.py | 79 ++++++++++++++++++++------- 2 files changed, 93 insertions(+), 52 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index fdecf2b9..8f45fc77 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -728,7 +728,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: getTeams + operationId: listTeams servers: - *id006 /teams/{teamID}/metrics: @@ -825,7 +825,7 @@ paths: $ref: '#/components/responses/403' '500': $ref: '#/components/responses/500' - operationId: getTeamMetricMax + operationId: getTeamMetricsMax servers: - *id006 /sandboxes: @@ -859,7 +859,7 @@ paths: $ref: '#/components/responses/400' '500': $ref: '#/components/responses/500' - operationId: getSandboxes + operationId: listSandboxes post: description: Create a sandbox from the template tags: @@ -931,7 +931,7 @@ paths: $ref: '#/components/responses/400' '500': $ref: '#/components/responses/500' - operationId: getSandboxes + operationId: listSandboxesV2 servers: - *id006 /sandboxes/metrics: @@ -966,7 +966,7 @@ paths: $ref: '#/components/responses/400' '500': $ref: '#/components/responses/500' - operationId: getSandboxeMetrics + operationId: listSandboxesMetrics servers: - *id006 /sandboxes/{sandboxID}/logs: @@ -1007,7 +1007,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: getSandboxeLogs + operationId: getSandboxLogs servers: - *id006 /sandboxes/{sandboxID}: @@ -1032,7 +1032,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: getSandboxes + operationId: getSandbox delete: description: Kill a sandbox tags: @@ -1050,7 +1050,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: deleteSandboxes + operationId: deleteSandbox servers: - *id006 /sandboxes/{sandboxID}/metrics: @@ -1095,7 +1095,7 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: getSandboxeMetrics + operationId: getSandboxMetrics servers: - *id006 /sandboxes/{sandboxID}/pause: @@ -1118,7 +1118,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: postSandboxePause + operationId: postSandboxPause servers: - *id006 /sandboxes/{sandboxID}/resume: @@ -1152,7 +1152,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: postSandboxeResume + operationId: postSandboxResume servers: - *id006 /sandboxes/{sandboxID}/connect: @@ -1192,7 +1192,7 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: postSandboxeConnect + operationId: postSandboxConnect servers: - *id006 /sandboxes/{sandboxID}/timeout: @@ -1230,7 +1230,7 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: postSandboxeTimeout + operationId: postSandboxTimeout servers: - *id006 /sandboxes/{sandboxID}/refreshes: @@ -1263,7 +1263,7 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: postSandboxeRefreshes + operationId: postSandboxRefreshes servers: - *id006 /sandboxes/{sandboxID}/snapshots: @@ -1304,7 +1304,7 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: postSandboxeSnapshots + operationId: postSandboxSnapshots servers: - *id006 /snapshots: @@ -1336,7 +1336,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: getSnapshots + operationId: listSnapshots servers: - *id006 /v3/templates: @@ -1365,7 +1365,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: postTemplates + operationId: postTemplatesV3 servers: - *id006 /v2/templates: @@ -1395,7 +1395,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: postTemplates + operationId: postTemplatesV2 servers: - *id006 /templates/{templateID}/files/{hash}: @@ -1429,7 +1429,7 @@ paths: application/json: schema: $ref: '#/components/schemas/TemplateBuildFileUpload' - operationId: getTemplateFiles + operationId: getTemplateFile servers: - *id006 /templates: @@ -1461,7 +1461,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: getTemplates + operationId: listTemplates post: description: Create a new template deprecated: true @@ -1513,7 +1513,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: getTemplates + operationId: getTemplate post: description: Rebuild an template deprecated: true @@ -1540,7 +1540,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: postTemplates + operationId: postTemplate delete: description: Delete a template tags: @@ -1557,7 +1557,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: deleteTemplates + operationId: deleteTemplate patch: description: Update template deprecated: true @@ -1587,7 +1587,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: patchTemplates + operationId: patchTemplate servers: - *id006 /templates/{templateID}/builds/{buildID}: @@ -1613,7 +1613,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: postTemplateBuilds + operationId: postTemplateBuild servers: - *id006 /v2/templates/{templateID}/builds/{buildID}: @@ -1640,7 +1640,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: postTemplateBuilds + operationId: postTemplateBuildV2 servers: - *id006 /v2/templates/{templateID}: @@ -1672,7 +1672,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: patchTemplates + operationId: patchTemplateV2 servers: - *id006 /templates/{templateID}/builds/{buildID}/status: @@ -1808,7 +1808,7 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: postTemplateTags + operationId: postTemplatesTags delete: description: Delete multiple tags from templates tags: @@ -1832,7 +1832,7 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: deleteTemplateTags + operationId: deleteTemplatesTags servers: - *id006 /templates/{templateID}/tags: @@ -1893,7 +1893,7 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: getTemplateAliases + operationId: getTemplatesAlias servers: - *id006 /volumes: @@ -1917,7 +1917,7 @@ paths: $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: getVolumes + operationId: listVolumes post: description: Create a new team volume tags: @@ -1970,7 +1970,7 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: getVolumes + operationId: getVolume delete: description: Delete a team volume tags: @@ -1989,7 +1989,7 @@ paths: $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: deleteVolumes + operationId: deleteVolume servers: - *id006 components: diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index 5a258f85..c517399a 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -757,7 +757,23 @@ def fix_spec_issues(spec: dict[str, Any]) -> None: fixes.append("/templates/{templateID}/files/{hash}: changed 201 → 200 response") # 10. Generate operationId for platform endpoints that lack one + def _singularize(word: str) -> str: + """Simple singularization for common API resource names.""" + irregulars = {"aliases": "alias", "statuses": "status", "indices": "index"} + if word in irregulars: + return irregulars[word] + if word.endswith("sses"): + return word # "addresses" etc — skip + if word.endswith("ies"): + return word[:-3] + "y" + if word.endswith("ses") or word.endswith("xes") or word.endswith("zes"): + return word[:-2] + if word.endswith("s") and not word.endswith("ss"): + return word[:-1] + return word + op_id_count = 0 + seen_ids: dict[str, str] = {} # operationId → path (for dedup) for ep_path, path_item in paths.items(): # Skip envd endpoints (already have operationIds) if "/" in ep_path.lstrip("/") and "." in ep_path.split("/")[1]: @@ -767,28 +783,53 @@ def fix_spec_issues(spec: dict[str, Any]) -> None: if not op or op.get("operationId"): continue # Build operationId from method + path segments - # e.g. GET /templates/{templateID}/builds/{buildID}/status → getTemplateBuildStatus - segments = [] - for seg in ep_path.strip("/").split("/"): - if seg.startswith("{") and seg.endswith("}"): - continue # skip path params - # Strip version prefixes + # Include path params to distinguish e.g. /sandboxes vs /sandboxes/{sandboxID} + # e.g. GET /sandboxes/{sandboxID}/logs → getSandboxLogs + # e.g. GET /v2/sandboxes → listSandboxesV2 + raw_segments = ep_path.strip("/").split("/") + version_suffix = "" + parts = [] + i = 0 + while i < len(raw_segments): + seg = raw_segments[i] if seg in ("v2", "v3"): + version_suffix = seg.upper() + i += 1 continue - segments.append(seg) - # Singularize resource names for sub-resources - # e.g. /sandboxes/{id}/logs → getSandboxLogs - parts = [] - for i, seg in enumerate(segments): - if i < len(segments) - 1: - # Sub-resource parent: singularize - s = seg.rstrip("s") if seg.endswith("es") and len(seg) > 3 else ( - seg[:-1] if seg.endswith("s") and not seg.endswith("ss") else seg) - parts.append(s) - else: - parts.append(seg) + if seg.startswith("{") and seg.endswith("}"): + # Path param — singularize the previous part if it was a collection + if parts: + parts[-1] = _singularize(parts[-1]) + i += 1 + continue + parts.append(seg) + i += 1 + + # For top-level list endpoints (GET /sandboxes, GET /templates), + # use "list" prefix instead of "get" to distinguish from single-resource GETs + prefix = method + if method == "get" and parts and not any( + s.startswith("{") for s in raw_segments[1:] + ): + # No path params → it's a list/collection endpoint + # But only if the last segment is plural (a collection name) + last = parts[-1] if parts else "" + if last.endswith("s") and last != "status": + prefix = "list" + name = "".join(p.capitalize() for p in parts) - op["operationId"] = f"{method}{name}" + op_id = f"{prefix}{name}{version_suffix}" + + # Dedup: if collision, append a disambiguator + if op_id in seen_ids: + # Try adding "ById" for single-resource variants + if any(s.startswith("{") for s in raw_segments): + op_id = f"{method}{name}ById{version_suffix}" + if op_id in seen_ids: + op_id = f"{method}{name}{version_suffix}_{len(seen_ids)}" + + seen_ids[op_id] = ep_path + op["operationId"] = op_id op_id_count += 1 if op_id_count: fixes.append(f"Generated operationId for {op_id_count} platform endpoints") From 442ff73adb3a026dbf43defef11cc8c22ec662c7 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 15:17:48 +0100 Subject: [PATCH 16/37] Exclude snapshots/volumes endpoints, merge auth tests into Teams phase --- openapi-public.yml | 207 -------------------------- scripts/generate_openapi_reference.py | 7 +- scripts/validate_api_reference.py | 15 +- 3 files changed, 11 insertions(+), 218 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index 8f45fc77..ccae284c 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -1266,79 +1266,6 @@ paths: operationId: postSandboxRefreshes servers: - *id006 - /sandboxes/{sandboxID}/snapshots: - post: - description: Create a persistent snapshot from the sandbox's current state. - Snapshots can be used to create new sandboxes and persist beyond the original - sandbox's lifetime. - tags: - - sandboxes - security: - - ApiKeyAuth: [] - parameters: - - $ref: '#/components/parameters/sandboxID' - requestBody: - required: true - content: - application/json: - schema: - type: object - properties: - name: - type: string - description: Optional name for the snapshot template. If a snapshot - template with this name already exists, a new build will be assigned - to the existing template instead of creating a new one. - responses: - '201': - description: Snapshot created successfully - content: - application/json: - schema: - $ref: '#/components/schemas/SnapshotInfo' - '400': - $ref: '#/components/responses/400' - '401': - $ref: '#/components/responses/401' - '404': - $ref: '#/components/responses/404' - '500': - $ref: '#/components/responses/500' - operationId: postSandboxSnapshots - servers: - - *id006 - /snapshots: - get: - description: List all snapshots for the team - tags: - - snapshots - security: - - ApiKeyAuth: [] - parameters: - - name: sandboxID - in: query - required: false - schema: - type: string - description: Filter snapshots by source sandbox ID - - $ref: '#/components/parameters/paginationLimit' - - $ref: '#/components/parameters/paginationNextToken' - responses: - '200': - description: Successfully returned snapshots - content: - application/json: - schema: - type: array - items: - $ref: '#/components/schemas/SnapshotInfo' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: listSnapshots - servers: - - *id006 /v3/templates: post: description: Create a new template @@ -1896,102 +1823,6 @@ paths: operationId: getTemplatesAlias servers: - *id006 - /volumes: - get: - description: List all team volumes - tags: - - volumes - security: - - AccessTokenAuth: [] - - ApiKeyAuth: [] - responses: - '200': - description: Successfully listed all team volumes - content: - application/json: - schema: - type: array - items: - $ref: '#/components/schemas/Volume' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: listVolumes - post: - description: Create a new team volume - tags: - - volumes - security: - - AccessTokenAuth: [] - - ApiKeyAuth: [] - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/NewVolume' - responses: - '201': - description: Successfully created a new team volume - content: - application/json: - schema: - $ref: '#/components/schemas/Volume' - '400': - $ref: '#/components/responses/400' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: postVolumes - servers: - - *id006 - /volumes/{volumeID}: - get: - description: Get team volume info - tags: - - volumes - security: - - AccessTokenAuth: [] - - ApiKeyAuth: [] - parameters: - - $ref: '#/components/parameters/volumeID' - responses: - '200': - description: Successfully retrieved a team volume - content: - application/json: - schema: - $ref: '#/components/schemas/Volume' - '401': - $ref: '#/components/responses/401' - '404': - $ref: '#/components/responses/404' - '500': - $ref: '#/components/responses/500' - operationId: getVolume - delete: - description: Delete a team volume - tags: - - volumes - security: - - AccessTokenAuth: [] - - ApiKeyAuth: [] - parameters: - - $ref: '#/components/parameters/volumeID' - responses: - '204': - description: Successfully deleted a team volume - '401': - $ref: '#/components/responses/401' - '404': - $ref: '#/components/responses/404' - '500': - $ref: '#/components/responses/500' - operationId: deleteVolume - servers: - - *id006 components: securitySchemes: AccessTokenAuth: @@ -3053,23 +2884,6 @@ components: enum: - running - paused - SnapshotInfo: - type: object - required: - - snapshotID - - names - properties: - snapshotID: - type: string - description: Identifier of the snapshot template including the tag. Uses - namespace/alias when a name was provided (e.g. team-slug/my-snapshot:default), - otherwise falls back to the raw template ID (e.g. abc123:default). - names: - type: array - items: - type: string - description: Full names of the snapshot template including team namespace - and tag (e.g. team-slug/my-snapshot:v2) Mcp: type: object description: MCP configuration for the sandbox @@ -4133,27 +3947,6 @@ components: items: type: string type: object - Volume: - type: object - properties: - volumeID: - type: string - description: ID of the volume - name: - type: string - description: Name of the volume - required: - - volumeID - - name - NewVolume: - type: object - properties: - name: - type: string - description: Name of the volume - pattern: ^[a-zA-Z0-9_-]+$ - required: - - name tags: - name: files - name: filesystem.Filesystem diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index c517399a..b93b857e 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -1056,8 +1056,9 @@ def filter_paths(spec: dict[str, Any]) -> None: - Removes Supabase and AdminToken securityScheme definitions """ # Remove excluded paths - excluded_prefixes = ("/access-tokens", "/api-keys") - excluded_exact = {"/v2/sandboxes/{sandboxID}/logs", "/init"} + excluded_prefixes = ("/access-tokens", "/api-keys", "/volumes", "/snapshots") + excluded_exact = {"/v2/sandboxes/{sandboxID}/logs", "/init", + "/sandboxes/{sandboxID}/snapshots"} to_remove = [ p for p in spec["paths"] if p.startswith(excluded_prefixes) or p in excluded_exact @@ -1071,7 +1072,7 @@ def filter_paths(spec: dict[str, Any]) -> None: for path in to_remove: del spec["paths"][path] if to_remove: - print(f"==> Removed {len(to_remove)} paths (volumes + admin)") + print(f"==> Removed {len(to_remove)} paths (volumes, snapshots, admin, internal)") # Strip supabase security entries from all operations for path_item in spec["paths"].values(): diff --git a/scripts/validate_api_reference.py b/scripts/validate_api_reference.py index cc3db4e3..d0bbda99 100755 --- a/scripts/validate_api_reference.py +++ b/scripts/validate_api_reference.py @@ -844,12 +844,15 @@ def _collect_refs(node, refs: set): def run_phase_1_teams(api_key: str, team_id: str | None, spec: dict, access_token: str | None = None) -> list[EndpointResult]: - """Phase 1: Platform — Teams.""" + """Phase 1: Platform — Teams (auth checks + teams read).""" results = [] h = api_key_hdr(api_key) + # Auth tests: 401 for all endpoints without API key + results.extend(run_auth_tests(api_key)) + # GET /teams (requires AccessTokenAuth — Bearer token, not ApiKeyAuth) - print("\n Phase 1: Platform — Teams") + print("\n Teams") print(" GET /teams") ep = EndpointResult("GET", "/teams", surface="platform") if access_token: @@ -2139,7 +2142,7 @@ def run_auth_tests(api_key: str) -> list[EndpointResult]: """Test 401 for all control plane endpoints without auth.""" results = [] - print("\n Auth Tests: 401 for control plane without API key") + print("\n 401 checks (no API key)") endpoints = [ ("GET", "/sandboxes", None), @@ -2428,11 +2431,7 @@ def should_run(phase: int) -> bool: return phase_filter is None or phase_filter == phase try: - # Auth tests (always run) - if should_run(0): - all_results.extend(run_auth_tests(api_key)) - - # Phase 1: Teams + # Phase 1: Teams (includes 401 auth checks) if should_run(1): all_results.extend(run_phase_1_teams(api_key, team_id, spec, access_token=access_token)) From 8dc0e71432bac37c2b240d1607ccfdecdf7bc1c1 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 15:39:04 +0100 Subject: [PATCH 17/37] Rename and reorder API reference tags for documentation sidebar --- openapi-public.yml | 125 +++++++++++++------------- scripts/generate_openapi_reference.py | 30 +++++++ scripts/validate_api_reference.py | 2 +- 3 files changed, 92 insertions(+), 65 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index ccae284c..58fb8197 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -41,7 +41,7 @@ paths: operationId: getHealth security: [] tags: - - health + - Others servers: - &id002 url: https://{port}-{sandboxID}.e2b.app @@ -90,7 +90,7 @@ paths: get: summary: Download a file tags: - - files + - Others security: - *id003 parameters: @@ -115,7 +115,7 @@ paths: summary: Upload a file and ensure the parent directories exist. If the file exists, it will be overwritten. tags: - - files + - Others security: - *id003 parameters: @@ -143,7 +143,7 @@ paths: /filesystem.Filesystem/CreateWatcher: post: tags: - - filesystem.Filesystem + - Filesystem summary: CreateWatcher description: Non-streaming versions of WatchDir operationId: filesystem.Filesystem.CreateWatcher @@ -178,7 +178,7 @@ paths: /filesystem.Filesystem/GetWatcherEvents: post: tags: - - filesystem.Filesystem + - Filesystem summary: GetWatcherEvents operationId: filesystem.Filesystem.GetWatcherEvents parameters: @@ -212,7 +212,7 @@ paths: /filesystem.Filesystem/ListDir: post: tags: - - filesystem.Filesystem + - Filesystem summary: ListDir operationId: filesystem.Filesystem.ListDir parameters: @@ -246,7 +246,7 @@ paths: /filesystem.Filesystem/MakeDir: post: tags: - - filesystem.Filesystem + - Filesystem summary: MakeDir operationId: filesystem.Filesystem.MakeDir parameters: @@ -280,7 +280,7 @@ paths: /filesystem.Filesystem/Move: post: tags: - - filesystem.Filesystem + - Filesystem summary: Move operationId: filesystem.Filesystem.Move parameters: @@ -314,7 +314,7 @@ paths: /filesystem.Filesystem/Remove: post: tags: - - filesystem.Filesystem + - Filesystem summary: Remove operationId: filesystem.Filesystem.Remove parameters: @@ -348,7 +348,7 @@ paths: /filesystem.Filesystem/RemoveWatcher: post: tags: - - filesystem.Filesystem + - Filesystem summary: RemoveWatcher operationId: filesystem.Filesystem.RemoveWatcher parameters: @@ -382,7 +382,7 @@ paths: /filesystem.Filesystem/Stat: post: tags: - - filesystem.Filesystem + - Filesystem summary: Stat operationId: filesystem.Filesystem.Stat parameters: @@ -416,7 +416,7 @@ paths: /filesystem.Filesystem/WatchDir: post: tags: - - filesystem.Filesystem + - Filesystem summary: WatchDir description: Server-streaming RPC. Use the Connect protocol with streaming support. operationId: filesystem.Filesystem.WatchDir @@ -453,7 +453,7 @@ paths: /process.Process/CloseStdin: post: tags: - - process.Process + - Process summary: CloseStdin description: "Close stdin to signal EOF to the process.\n Only works for non-PTY\ \ processes. For PTY, send Ctrl+D (0x04) instead." @@ -489,7 +489,7 @@ paths: /process.Process/Connect: post: tags: - - process.Process + - Process summary: Connect description: Server-streaming RPC. Use the Connect protocol with streaming support. operationId: process.Process.Connect @@ -517,7 +517,7 @@ paths: /process.Process/List: post: tags: - - process.Process + - Process summary: List operationId: process.Process.List parameters: @@ -551,7 +551,7 @@ paths: /process.Process/SendInput: post: tags: - - process.Process + - Process summary: SendInput operationId: process.Process.SendInput parameters: @@ -585,7 +585,7 @@ paths: /process.Process/SendSignal: post: tags: - - process.Process + - Process summary: SendSignal operationId: process.Process.SendSignal parameters: @@ -619,7 +619,7 @@ paths: /process.Process/Start: post: tags: - - process.Process + - Process summary: Start description: Server-streaming RPC. Use the Connect protocol with streaming support. operationId: process.Process.Start @@ -647,7 +647,7 @@ paths: /process.Process/StreamInput: post: tags: - - process.Process + - Process summary: StreamInput description: Client-streaming RPC. Client input stream ensures ordering of messages. Use the Connect protocol with streaming support. @@ -676,7 +676,7 @@ paths: /process.Process/Update: post: tags: - - process.Process + - Process summary: Update operationId: process.Process.Update parameters: @@ -711,7 +711,7 @@ paths: get: description: List all teams tags: - - auth + - Teams security: - AccessTokenAuth: [] responses: @@ -735,7 +735,7 @@ paths: get: description: Get metrics for the team tags: - - auth + - Teams security: - ApiKeyAuth: [] parameters: @@ -780,7 +780,7 @@ paths: get: description: Get the maximum metrics for the team in the given interval tags: - - auth + - Teams security: - ApiKeyAuth: [] parameters: @@ -832,7 +832,7 @@ paths: get: description: List all running sandboxes tags: - - sandboxes + - Sandboxes security: - ApiKeyAuth: [] parameters: @@ -863,7 +863,7 @@ paths: post: description: Create a sandbox from the template tags: - - sandboxes + - Sandboxes security: - ApiKeyAuth: [] requestBody: @@ -892,7 +892,7 @@ paths: get: description: List all sandboxes tags: - - sandboxes + - Sandboxes security: - ApiKeyAuth: [] parameters: @@ -938,7 +938,7 @@ paths: get: description: List metrics for given sandboxes tags: - - sandboxes + - Sandboxes security: - ApiKeyAuth: [] parameters: @@ -974,7 +974,7 @@ paths: description: Get sandbox logs. deprecated: true tags: - - sandboxes + - Sandboxes security: - ApiKeyAuth: [] parameters: @@ -1014,7 +1014,7 @@ paths: get: description: Get a sandbox by id tags: - - sandboxes + - Sandboxes security: - ApiKeyAuth: [] parameters: @@ -1036,7 +1036,7 @@ paths: delete: description: Kill a sandbox tags: - - sandboxes + - Sandboxes security: - ApiKeyAuth: [] parameters: @@ -1057,7 +1057,7 @@ paths: get: description: Get sandbox metrics tags: - - sandboxes + - Sandboxes security: - ApiKeyAuth: [] parameters: @@ -1102,7 +1102,7 @@ paths: post: description: Pause the sandbox tags: - - sandboxes + - Sandboxes security: - ApiKeyAuth: [] parameters: @@ -1126,7 +1126,7 @@ paths: deprecated: true description: Resume the sandbox tags: - - sandboxes + - Sandboxes security: - ApiKeyAuth: [] parameters: @@ -1160,7 +1160,7 @@ paths: description: Returns sandbox details. If the sandbox is paused, it will be resumed. TTL is only extended. tags: - - sandboxes + - Sandboxes security: - ApiKeyAuth: [] parameters: @@ -1204,7 +1204,7 @@ paths: security: - ApiKeyAuth: [] tags: - - sandboxes + - Sandboxes requestBody: content: application/json: @@ -1239,7 +1239,7 @@ paths: security: - ApiKeyAuth: [] tags: - - sandboxes + - Sandboxes requestBody: content: application/json: @@ -1270,7 +1270,7 @@ paths: post: description: Create a new template tags: - - templates + - Templates security: - ApiKeyAuth: [] requestBody: @@ -1300,7 +1300,7 @@ paths: description: Create a new template deprecated: true tags: - - templates + - Templates security: - ApiKeyAuth: [] requestBody: @@ -1329,7 +1329,7 @@ paths: get: description: Get an upload link for a tar file containing build layer files tags: - - templates + - Templates security: - AccessTokenAuth: [] - ApiKeyAuth: [] @@ -1363,7 +1363,7 @@ paths: get: description: List all templates tags: - - templates + - Templates security: - ApiKeyAuth: [] - AccessTokenAuth: [] @@ -1393,7 +1393,7 @@ paths: description: Create a new template deprecated: true tags: - - templates + - Templates security: - AccessTokenAuth: [] requestBody: @@ -1422,7 +1422,7 @@ paths: get: description: List all builds for a template tags: - - templates + - Templates security: - ApiKeyAuth: [] parameters: @@ -1445,7 +1445,7 @@ paths: description: Rebuild an template deprecated: true tags: - - templates + - Templates security: - AccessTokenAuth: [] parameters: @@ -1471,7 +1471,7 @@ paths: delete: description: Delete a template tags: - - templates + - Templates security: - ApiKeyAuth: [] - AccessTokenAuth: [] @@ -1489,7 +1489,7 @@ paths: description: Update template deprecated: true tags: - - templates + - Templates security: - ApiKeyAuth: [] - AccessTokenAuth: [] @@ -1522,7 +1522,7 @@ paths: description: Start the build deprecated: true tags: - - templates + - Templates security: - AccessTokenAuth: [] parameters: @@ -1547,7 +1547,7 @@ paths: post: description: Start the build tags: - - templates + - Templates security: - ApiKeyAuth: [] parameters: @@ -1574,7 +1574,7 @@ paths: patch: description: Update template tags: - - templates + - Templates security: - ApiKeyAuth: [] - AccessTokenAuth: [] @@ -1606,7 +1606,7 @@ paths: get: description: Get template build info tags: - - templates + - Templates security: - AccessTokenAuth: [] - ApiKeyAuth: [] @@ -1655,7 +1655,7 @@ paths: get: description: Get template build logs tags: - - templates + - Templates security: - AccessTokenAuth: [] - ApiKeyAuth: [] @@ -1711,7 +1711,7 @@ paths: post: description: Assign tag(s) to a template build tags: - - tags + - Tags security: - ApiKeyAuth: [] requestBody: @@ -1739,7 +1739,7 @@ paths: delete: description: Delete multiple tags from templates tags: - - tags + - Tags security: - ApiKeyAuth: [] requestBody: @@ -1766,7 +1766,7 @@ paths: get: description: List all tags for a template tags: - - tags + - Tags security: - ApiKeyAuth: [] parameters: @@ -1795,7 +1795,7 @@ paths: get: description: Check if template with given alias exists tags: - - templates + - Templates security: - ApiKeyAuth: [] parameters: @@ -3948,14 +3948,11 @@ components: type: string type: object tags: -- name: files -- name: filesystem.Filesystem -- name: process.Process -- name: templates -- name: sandboxes -- name: auth -- name: access-tokens -- name: api-keys -- name: tags -- name: volumes +- name: Sandboxes +- name: Templates +- name: Filesystem +- name: Process +- name: Tags +- name: Teams +- name: Others security: [] diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index b93b857e..32549133 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -1227,6 +1227,33 @@ def fill_empty_responses(spec: dict[str, Any]) -> None: print(f"==> Removed {stripped} default error responses") +def rename_and_reorder_tags(spec: dict[str, Any]) -> None: + """Rename tags and reorder them for the documentation sidebar.""" + TAG_RENAME = { + "sandboxes": "Sandboxes", + "templates": "Templates", + "filesystem.Filesystem": "Filesystem", + "process.Process": "Process", + "tags": "Tags", + "auth": "Teams", + "health": "Others", + "files": "Others", + } + TAG_ORDER = ["Sandboxes", "Templates", "Filesystem", "Process", "Tags", "Teams", "Others"] + + # Rename tags on all operations + for path_item in spec.get("paths", {}).values(): + for method in ("get", "post", "put", "patch", "delete", "head", "options"): + op = path_item.get(method) + if not op or "tags" not in op: + continue + op["tags"] = [TAG_RENAME.get(t, t) for t in op["tags"]] + + # Rebuild the top-level tags list in the desired order + spec["tags"] = [{"name": t} for t in TAG_ORDER] + print(f"==> Renamed and reordered {len(TAG_ORDER)} tags") + + # --------------------------------------------------------------------------- # Entrypoint # --------------------------------------------------------------------------- @@ -1329,6 +1356,9 @@ def main() -> None: # Clean up unreferenced schemas left over from filtered paths remove_orphaned_schemas(merged) + # Rename and reorder tags for documentation sidebar + rename_and_reorder_tags(merged) + # Write output with open(output_path, "w") as f: yaml.dump(merged, f, default_flow_style=False, sort_keys=False, allow_unicode=True) diff --git a/scripts/validate_api_reference.py b/scripts/validate_api_reference.py index d0bbda99..f130988a 100755 --- a/scripts/validate_api_reference.py +++ b/scripts/validate_api_reference.py @@ -807,7 +807,7 @@ def analyze_spec(spec: dict) -> list[SpecIssue]: for path_str, methods in paths.items(): servers = methods.get("servers", []) # Check if any operation on this path is a sandbox endpoint - sandbox_tags = {"files", "filesystem.Filesystem", "process.Process"} + sandbox_tags = {"Others", "Filesystem", "Process"} is_sandbox_path = False for method, op in methods.items(): if not isinstance(op, dict): From b1e9f8190c545df9c8018b9baceb9d7b27c33d2f Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 15:41:30 +0100 Subject: [PATCH 18/37] Reorder paths by tag to match desired Mintlify sidebar order --- openapi-public.yml | 2536 ++++++++++++------------- scripts/generate_openapi_reference.py | 14 + 2 files changed, 1282 insertions(+), 1268 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index 58fb8197..7beee69e 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -5,1708 +5,1520 @@ info: description: Complete E2B developer API. Platform endpoints are served on api.e2b.app. Sandbox endpoints (envd) are served on {port}-{sandboxID}.e2b.app. servers: -- &id006 +- &id001 url: https://api.e2b.app description: E2B Platform API paths: - /health: + /sandboxes: get: - summary: Check the health of the service - responses: - '204': - description: The service is healthy - '502': &id001 - description: Sandbox not found - content: - application/json: - schema: - type: object - required: - - sandboxID - - message - - code - properties: - message: - type: string - description: Error message - example: The sandbox was not found - code: - type: integer - description: Error code - example: 502 - sandboxID: - type: string - description: Identifier of the sandbox - example: i1234abcd5678efgh90jk - operationId: getHealth - security: [] + description: List all running sandboxes tags: - - Others - servers: - - &id002 - url: https://{port}-{sandboxID}.e2b.app - description: Sandbox API (envd) — runs inside each sandbox - variables: - port: - default: '49983' - description: Port number - sandboxID: - default: $SANDBOX_ID - description: Sandbox identifier - /metrics: - get: - summary: Get the stats of the service - security: - - &id003 - SandboxAccessTokenAuth: [] - responses: - '200': - description: The resource usage metrics of the service - content: - application/json: - schema: - $ref: '#/components/schemas/Metrics' - '502': *id001 - operationId: getMetrics - servers: - - *id002 - /envs: - get: - summary: Get the environment variables + - Sandboxes security: - - *id003 + - ApiKeyAuth: [] + parameters: + - name: metadata + in: query + description: Metadata query used to filter the sandboxes (e.g. "user=abc&app=prod"). + Each key and values must be URL encoded. + required: false + schema: + type: string responses: '200': - description: Environment variables + description: Successfully returned all running sandboxes content: application/json: schema: - $ref: '#/components/schemas/EnvVars' - '502': *id001 - operationId: getEnvVars - servers: - - *id002 - /files: - get: - summary: Download a file - tags: - - Others - security: - - *id003 - parameters: - - $ref: '#/components/parameters/FilePath' - - $ref: '#/components/parameters/User' - - $ref: '#/components/parameters/Signature' - - $ref: '#/components/parameters/SignatureExpiration' - responses: - '200': - $ref: '#/components/responses/DownloadSuccess' + type: array + items: + allOf: + - $ref: '#/components/schemas/ListedSandbox' '401': - $ref: '#/components/responses/InvalidUser' - '400': - $ref: '#/components/responses/InvalidPath' - '404': - $ref: '#/components/responses/FileNotFound' - '500': - $ref: '#/components/responses/InternalServerError' - '502': *id001 - operationId: downloadFile - post: - summary: Upload a file and ensure the parent directories exist. If the file - exists, it will be overwritten. - tags: - - Others - security: - - *id003 - parameters: - - $ref: '#/components/parameters/FilePath' - - $ref: '#/components/parameters/User' - - $ref: '#/components/parameters/Signature' - - $ref: '#/components/parameters/SignatureExpiration' - requestBody: - $ref: '#/components/requestBodies/File' - responses: - '200': - $ref: '#/components/responses/UploadSuccess' + $ref: '#/components/responses/401' '400': - $ref: '#/components/responses/InvalidPath' - '401': - $ref: '#/components/responses/InvalidUser' + $ref: '#/components/responses/400' '500': - $ref: '#/components/responses/InternalServerError' - '507': - $ref: '#/components/responses/NotEnoughDiskSpace' - '502': *id001 - operationId: uploadFile - servers: - - *id002 - /filesystem.Filesystem/CreateWatcher: - post: - tags: - - Filesystem - summary: CreateWatcher - description: Non-streaming versions of WatchDir - operationId: filesystem.Filesystem.CreateWatcher - parameters: - - name: Connect-Protocol-Version - in: header - required: true - schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header - schema: - $ref: '#/components/schemas/connect-timeout-header' - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/filesystem.CreateWatcherRequest' - required: true - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/filesystem.CreateWatcherResponse' - '502': *id001 - security: - - *id003 - servers: - - *id002 - /filesystem.Filesystem/GetWatcherEvents: + $ref: '#/components/responses/500' + operationId: listSandboxes post: + description: Create a sandbox from the template tags: - - Filesystem - summary: GetWatcherEvents - operationId: filesystem.Filesystem.GetWatcherEvents - parameters: - - name: Connect-Protocol-Version - in: header - required: true - schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header - schema: - $ref: '#/components/schemas/connect-timeout-header' - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/filesystem.GetWatcherEventsRequest' - required: true - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/filesystem.GetWatcherEventsResponse' - '502': *id001 + - Sandboxes security: - - *id003 - servers: - - *id002 - /filesystem.Filesystem/ListDir: - post: - tags: - - Filesystem - summary: ListDir - operationId: filesystem.Filesystem.ListDir - parameters: - - name: Connect-Protocol-Version - in: header - required: true - schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header - schema: - $ref: '#/components/schemas/connect-timeout-header' + - ApiKeyAuth: [] requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/filesystem.ListDirRequest' - required: true - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/filesystem.ListDirResponse' - '502': *id001 - security: - - *id003 - servers: - - *id002 - /filesystem.Filesystem/MakeDir: - post: - tags: - - Filesystem - summary: MakeDir - operationId: filesystem.Filesystem.MakeDir - parameters: - - name: Connect-Protocol-Version - in: header required: true - schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header - schema: - $ref: '#/components/schemas/connect-timeout-header' - requestBody: content: application/json: schema: - $ref: '#/components/schemas/filesystem.MakeDirRequest' - required: true + $ref: '#/components/schemas/NewSandbox' responses: - '200': - description: Success + '201': + description: The sandbox was created successfully content: application/json: schema: - $ref: '#/components/schemas/filesystem.MakeDirResponse' - '502': *id001 - security: - - *id003 + $ref: '#/components/schemas/Sandbox' + '401': + $ref: '#/components/responses/401' + '400': + $ref: '#/components/responses/400' + '500': + $ref: '#/components/responses/500' + operationId: postSandboxes servers: - - *id002 - /filesystem.Filesystem/Move: - post: + - *id001 + /v2/sandboxes: + get: + description: List all sandboxes tags: - - Filesystem - summary: Move - operationId: filesystem.Filesystem.Move - parameters: - - name: Connect-Protocol-Version - in: header - required: true - schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header - schema: - $ref: '#/components/schemas/connect-timeout-header' - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/filesystem.MoveRequest' - required: true - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/filesystem.MoveResponse' - '502': *id001 + - Sandboxes security: - - *id003 - servers: - - *id002 - /filesystem.Filesystem/Remove: - post: - tags: - - Filesystem - summary: Remove - operationId: filesystem.Filesystem.Remove + - ApiKeyAuth: [] parameters: - - name: Connect-Protocol-Version - in: header - required: true + - name: metadata + in: query + description: Metadata query used to filter the sandboxes (e.g. "user=abc&app=prod"). + Each key and values must be URL encoded. + required: false schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header + type: string + - name: state + in: query + description: Filter sandboxes by one or more states + required: false schema: - $ref: '#/components/schemas/connect-timeout-header' - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/filesystem.RemoveRequest' - required: true + type: array + items: + $ref: '#/components/schemas/SandboxState' + style: form + explode: false + - $ref: '#/components/parameters/paginationNextToken' + - $ref: '#/components/parameters/paginationLimit' responses: '200': - description: Success + description: Successfully returned all running sandboxes content: application/json: schema: - $ref: '#/components/schemas/filesystem.RemoveResponse' - '502': *id001 - security: - - *id003 + type: array + items: + allOf: + - $ref: '#/components/schemas/ListedSandbox' + '401': + $ref: '#/components/responses/401' + '400': + $ref: '#/components/responses/400' + '500': + $ref: '#/components/responses/500' + operationId: listSandboxesV2 servers: - - *id002 - /filesystem.Filesystem/RemoveWatcher: - post: + - *id001 + /sandboxes/metrics: + get: + description: List metrics for given sandboxes tags: - - Filesystem - summary: RemoveWatcher - operationId: filesystem.Filesystem.RemoveWatcher + - Sandboxes + security: + - ApiKeyAuth: [] parameters: - - name: Connect-Protocol-Version - in: header + - name: sandbox_ids + in: query required: true + description: Comma-separated list of sandbox IDs to get metrics for + explode: false schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header - schema: - $ref: '#/components/schemas/connect-timeout-header' - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/filesystem.RemoveWatcherRequest' - required: true + type: array + items: + type: string + maxItems: 100 + uniqueItems: true responses: '200': - description: Success + description: Successfully returned all running sandboxes with metrics content: application/json: schema: - $ref: '#/components/schemas/filesystem.RemoveWatcherResponse' - '502': *id001 - security: - - *id003 + $ref: '#/components/schemas/SandboxesWithMetrics' + '401': + $ref: '#/components/responses/401' + '400': + $ref: '#/components/responses/400' + '500': + $ref: '#/components/responses/500' + operationId: listSandboxesMetrics servers: - - *id002 - /filesystem.Filesystem/Stat: - post: + - *id001 + /sandboxes/{sandboxID}/logs: + get: + description: Get sandbox logs. + deprecated: true tags: - - Filesystem - summary: Stat - operationId: filesystem.Filesystem.Stat + - Sandboxes + security: + - ApiKeyAuth: [] parameters: - - name: Connect-Protocol-Version - in: header - required: true + - $ref: '#/components/parameters/sandboxID' + - in: query + name: start schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header + type: integer + format: int64 + minimum: 0 + description: Starting timestamp of the logs that should be returned in milliseconds + - in: query + name: limit schema: - $ref: '#/components/schemas/connect-timeout-header' - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/filesystem.StatRequest' - required: true + default: 1000 + format: int32 + minimum: 0 + type: integer + description: Maximum number of logs that should be returned responses: '200': - description: Success + description: Successfully returned the sandbox logs content: application/json: schema: - $ref: '#/components/schemas/filesystem.StatResponse' - '502': *id001 - security: - - *id003 + $ref: '#/components/schemas/SandboxLogs' + '404': + $ref: '#/components/responses/404' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + operationId: getSandboxLogs servers: - - *id002 - /filesystem.Filesystem/WatchDir: - post: + - *id001 + /sandboxes/{sandboxID}: + get: + description: Get a sandbox by id tags: - - Filesystem - summary: WatchDir - description: Server-streaming RPC. Use the Connect protocol with streaming support. - operationId: filesystem.Filesystem.WatchDir - requestBody: - content: - application/connect+json: - schema: - $ref: '#/components/schemas/filesystem.WatchDirRequest' - required: true + - Sandboxes + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/sandboxID' responses: '200': - description: Stream of WatchDirResponse events + description: Successfully returned the sandbox content: - application/connect+json: + application/json: schema: - $ref: '#/components/schemas/filesystem.WatchDirResponse' - '502': *id001 + $ref: '#/components/schemas/SandboxDetail' + '404': + $ref: '#/components/responses/404' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + operationId: getSandbox + delete: + description: Kill a sandbox + tags: + - Sandboxes security: - - *id003 + - ApiKeyAuth: [] parameters: - - &id004 - name: Connect-Protocol-Version - in: header - required: true - schema: - $ref: '#/components/schemas/connect-protocol-version' - - &id005 - name: Connect-Timeout-Ms - in: header - schema: - $ref: '#/components/schemas/connect-timeout-header' + - $ref: '#/components/parameters/sandboxID' + responses: + '204': + description: The sandbox was killed successfully + '404': + $ref: '#/components/responses/404' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + operationId: deleteSandbox servers: - - *id002 - /process.Process/CloseStdin: - post: + - *id001 + /sandboxes/{sandboxID}/metrics: + get: + description: Get sandbox metrics tags: - - Process - summary: CloseStdin - description: "Close stdin to signal EOF to the process.\n Only works for non-PTY\ - \ processes. For PTY, send Ctrl+D (0x04) instead." - operationId: process.Process.CloseStdin + - Sandboxes + security: + - ApiKeyAuth: [] parameters: - - name: Connect-Protocol-Version - in: header - required: true + - $ref: '#/components/parameters/sandboxID' + - in: query + name: start schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header + type: integer + format: int64 + minimum: 0 + description: Unix timestamp for the start of the interval, in seconds, for + which the metrics are returned. + - in: query + name: end schema: - $ref: '#/components/schemas/connect-timeout-header' - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/process.CloseStdinRequest' - required: true + type: integer + format: int64 + minimum: 0 + description: Unix timestamp for the end of the interval, in seconds, for which + the metrics are returned. responses: '200': - description: Success + description: Successfully returned the sandbox metrics content: application/json: schema: - $ref: '#/components/schemas/process.CloseStdinResponse' - '502': *id001 - security: - - *id003 - servers: - - *id002 - /process.Process/Connect: - post: - tags: - - Process - summary: Connect - description: Server-streaming RPC. Use the Connect protocol with streaming support. - operationId: process.Process.Connect - requestBody: - content: - application/connect+json: - schema: - $ref: '#/components/schemas/process.ConnectRequest' - required: true - responses: - '200': - description: Stream of ConnectResponse events - content: - application/connect+json: - schema: - $ref: '#/components/schemas/process.ConnectResponse' - '502': *id001 + type: array + items: + $ref: '#/components/schemas/SandboxMetric' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + operationId: getSandboxMetrics + servers: + - *id001 + /sandboxes/{sandboxID}/pause: + post: + description: Pause the sandbox + tags: + - Sandboxes security: - - *id003 + - ApiKeyAuth: [] parameters: - - *id004 - - *id005 + - $ref: '#/components/parameters/sandboxID' + responses: + '204': + description: The sandbox was paused successfully and can be resumed + '409': + $ref: '#/components/responses/409' + '404': + $ref: '#/components/responses/404' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + operationId: postSandboxPause servers: - - *id002 - /process.Process/List: + - *id001 + /sandboxes/{sandboxID}/resume: post: + deprecated: true + description: Resume the sandbox tags: - - Process - summary: List - operationId: process.Process.List + - Sandboxes + security: + - ApiKeyAuth: [] parameters: - - name: Connect-Protocol-Version - in: header - required: true - schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header - schema: - $ref: '#/components/schemas/connect-timeout-header' + - $ref: '#/components/parameters/sandboxID' requestBody: + required: true content: application/json: schema: - $ref: '#/components/schemas/process.ListRequest' - required: true + $ref: '#/components/schemas/ResumedSandbox' responses: - '200': - description: Success + '201': + description: The sandbox was resumed successfully content: application/json: schema: - $ref: '#/components/schemas/process.ListResponse' - '502': *id001 - security: - - *id003 + $ref: '#/components/schemas/Sandbox' + '409': + $ref: '#/components/responses/409' + '404': + $ref: '#/components/responses/404' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + operationId: postSandboxResume servers: - - *id002 - /process.Process/SendInput: + - *id001 + /sandboxes/{sandboxID}/connect: post: + description: Returns sandbox details. If the sandbox is paused, it will be resumed. + TTL is only extended. tags: - - Process - summary: SendInput - operationId: process.Process.SendInput + - Sandboxes + security: + - ApiKeyAuth: [] parameters: - - name: Connect-Protocol-Version - in: header - required: true - schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header - schema: - $ref: '#/components/schemas/connect-timeout-header' + - $ref: '#/components/parameters/sandboxID' requestBody: + required: true content: application/json: schema: - $ref: '#/components/schemas/process.SendInputRequest' - required: true + $ref: '#/components/schemas/ConnectSandbox' responses: '200': - description: Success + description: The sandbox was already running content: application/json: schema: - $ref: '#/components/schemas/process.SendInputResponse' - '502': *id001 - security: - - *id003 + $ref: '#/components/schemas/Sandbox' + '201': + description: The sandbox was resumed successfully + content: + application/json: + schema: + $ref: '#/components/schemas/Sandbox' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + operationId: postSandboxConnect servers: - - *id002 - /process.Process/SendSignal: + - *id001 + /sandboxes/{sandboxID}/timeout: post: + description: Set the timeout for the sandbox. The sandbox will expire x seconds + from the time of the request. Calling this method multiple times overwrites + the TTL, each time using the current timestamp as the starting point to measure + the timeout duration. + security: + - ApiKeyAuth: [] tags: - - Process - summary: SendSignal - operationId: process.Process.SendSignal - parameters: - - name: Connect-Protocol-Version - in: header - required: true - schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header - schema: - $ref: '#/components/schemas/connect-timeout-header' + - Sandboxes requestBody: content: application/json: schema: - $ref: '#/components/schemas/process.SendSignalRequest' - required: true + type: object + required: + - timeout + properties: + timeout: + description: Timeout in seconds from the current time after which + the sandbox should expire + type: integer + format: int32 + minimum: 0 + parameters: + - $ref: '#/components/parameters/sandboxID' responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/process.SendSignalResponse' - '502': *id001 - security: - - *id003 + '204': + description: Successfully set the sandbox timeout + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + operationId: postSandboxTimeout servers: - - *id002 - /process.Process/Start: + - *id001 + /sandboxes/{sandboxID}/refreshes: post: + description: Refresh the sandbox extending its time to live + security: + - ApiKeyAuth: [] tags: - - Process - summary: Start - description: Server-streaming RPC. Use the Connect protocol with streaming support. - operationId: process.Process.Start + - Sandboxes requestBody: content: - application/connect+json: + application/json: schema: - $ref: '#/components/schemas/process.StartRequest' - required: true - responses: - '200': - description: Stream of StartResponse events - content: - application/connect+json: - schema: - $ref: '#/components/schemas/process.StartResponse' - '502': *id001 - security: - - *id003 + type: object + properties: + duration: + description: Duration for which the sandbox should be kept alive + in seconds + type: integer + maximum: 3600 + minimum: 0 parameters: - - *id004 - - *id005 + - $ref: '#/components/parameters/sandboxID' + responses: + '204': + description: Successfully refreshed the sandbox + '401': + $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' + '500': + $ref: '#/components/responses/500' + operationId: postSandboxRefreshes servers: - - *id002 - /process.Process/StreamInput: + - *id001 + /v3/templates: post: + description: Create a new template tags: - - Process - summary: StreamInput - description: Client-streaming RPC. Client input stream ensures ordering of messages. - Use the Connect protocol with streaming support. - operationId: process.Process.StreamInput + - Templates + security: + - ApiKeyAuth: [] requestBody: + required: true content: - application/connect+json: + application/json: schema: - $ref: '#/components/schemas/process.StreamInputRequest' - required: true + $ref: '#/components/schemas/TemplateBuildRequestV3' responses: - '200': - description: Stream of StreamInputResponse events + '202': + description: The build was requested successfully content: - application/connect+json: + application/json: schema: - $ref: '#/components/schemas/process.StreamInputResponse' - '502': *id001 - security: - - *id003 - parameters: - - *id004 - - *id005 - servers: - - *id002 - /process.Process/Update: - post: - tags: - - Process - summary: Update - operationId: process.Process.Update - parameters: - - name: Connect-Protocol-Version - in: header - required: true - schema: - $ref: '#/components/schemas/connect-protocol-version' - - name: Connect-Timeout-Ms - in: header - schema: - $ref: '#/components/schemas/connect-timeout-header' + $ref: '#/components/schemas/TemplateRequestResponseV3' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + operationId: postTemplatesV3 + servers: + - *id001 + /v2/templates: + post: + description: Create a new template + deprecated: true + tags: + - Templates + security: + - ApiKeyAuth: [] requestBody: + required: true content: application/json: schema: - $ref: '#/components/schemas/process.UpdateRequest' - required: true + $ref: '#/components/schemas/TemplateBuildRequestV2' responses: - '200': - description: Success + '202': + description: The build was requested successfully content: application/json: schema: - $ref: '#/components/schemas/process.UpdateResponse' - '502': *id001 - security: - - *id003 + $ref: '#/components/schemas/TemplateLegacy' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + operationId: postTemplatesV2 servers: - - *id002 - /teams: + - *id001 + /templates/{templateID}/files/{hash}: get: - description: List all teams + description: Get an upload link for a tar file containing build layer files tags: - - Teams + - Templates security: - AccessTokenAuth: [] + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + - in: path + name: hash + required: true + schema: + type: string + description: Hash of the files responses: - '200': - description: Successfully returned all teams - content: - application/json: - schema: - type: array - items: - allOf: - - $ref: '#/components/schemas/Team' + '400': + $ref: '#/components/responses/400' '401': $ref: '#/components/responses/401' + '404': + $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: listTeams + '200': + description: Upload link for the tar file containing build layer files + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildFileUpload' + operationId: getTemplateFile servers: - - *id006 - /teams/{teamID}/metrics: + - *id001 + /templates: get: - description: Get metrics for the team + description: List all templates tags: - - Teams + - Templates security: - ApiKeyAuth: [] + - AccessTokenAuth: [] parameters: - - $ref: '#/components/parameters/teamID' - - in: query - name: start - schema: - type: integer - format: int64 - minimum: 0 - description: Unix timestamp for the start of the interval, in seconds, for - which the metrics are returned. - in: query - name: end + required: false + name: teamID schema: - type: integer - format: int64 - minimum: 0 - description: Unix timestamp for the end of the interval, in seconds, for which - the metrics are returned. + type: string + description: Identifier of the team responses: '200': - description: Successfully returned the team metrics + description: Successfully returned all templates content: application/json: schema: type: array items: - $ref: '#/components/schemas/TeamMetric' - '400': - $ref: '#/components/responses/400' + allOf: + - $ref: '#/components/schemas/Template' '401': $ref: '#/components/responses/401' - '403': - $ref: '#/components/responses/403' '500': $ref: '#/components/responses/500' - operationId: getTeamMetrics - servers: - - *id006 - /teams/{teamID}/metrics/max: - get: - description: Get the maximum metrics for the team in the given interval + operationId: listTemplates + post: + description: Create a new template + deprecated: true tags: - - Teams + - Templates security: - - ApiKeyAuth: [] - parameters: - - $ref: '#/components/parameters/teamID' - - in: query - name: start - schema: - type: integer - format: int64 - minimum: 0 - description: Unix timestamp for the start of the interval, in seconds, for - which the metrics are returned. - - in: query - name: end - schema: - type: integer - format: int64 - minimum: 0 - description: Unix timestamp for the end of the interval, in seconds, for which - the metrics are returned. - - in: query - name: metric + - AccessTokenAuth: [] + requestBody: required: true - schema: - type: string - enum: - - concurrent_sandboxes - - sandbox_start_rate - description: Metric to retrieve the maximum value for + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildRequest' responses: - '200': - description: Successfully returned the team metrics + '202': + description: The build was accepted content: application/json: schema: - $ref: '#/components/schemas/MaxTeamMetric' + $ref: '#/components/schemas/TemplateLegacy' '400': $ref: '#/components/responses/400' '401': $ref: '#/components/responses/401' - '403': - $ref: '#/components/responses/403' '500': $ref: '#/components/responses/500' - operationId: getTeamMetricsMax + operationId: postTemplates servers: - - *id006 - /sandboxes: + - *id001 + /templates/{templateID}: get: - description: List all running sandboxes + description: List all builds for a template tags: - - Sandboxes + - Templates security: - ApiKeyAuth: [] parameters: - - name: metadata - in: query - description: Metadata query used to filter the sandboxes (e.g. "user=abc&app=prod"). - Each key and values must be URL encoded. - required: false - schema: - type: string + - $ref: '#/components/parameters/templateID' + - $ref: '#/components/parameters/paginationNextToken' + - $ref: '#/components/parameters/paginationLimit' responses: '200': - description: Successfully returned all running sandboxes + description: Successfully returned the template with its builds content: application/json: schema: - type: array - items: - allOf: - - $ref: '#/components/schemas/ListedSandbox' + $ref: '#/components/schemas/TemplateWithBuilds' '401': $ref: '#/components/responses/401' - '400': - $ref: '#/components/responses/400' '500': $ref: '#/components/responses/500' - operationId: listSandboxes + operationId: getTemplate post: - description: Create a sandbox from the template + description: Rebuild an template + deprecated: true tags: - - Sandboxes + - Templates + security: + - AccessTokenAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildRequest' + responses: + '202': + description: The build was accepted + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateLegacy' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + operationId: postTemplate + delete: + description: Delete a template + tags: + - Templates + security: + - ApiKeyAuth: [] + - AccessTokenAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + responses: + '204': + description: The template was deleted successfully + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + operationId: deleteTemplate + patch: + description: Update template + deprecated: true + tags: + - Templates security: - ApiKeyAuth: [] + - AccessTokenAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' requestBody: required: true content: application/json: schema: - $ref: '#/components/schemas/NewSandbox' + $ref: '#/components/schemas/TemplateUpdateRequest' + responses: + '200': + description: The template was updated successfully + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateUpdateResponse' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + operationId: patchTemplate + servers: + - *id001 + /templates/{templateID}/builds/{buildID}: + post: + description: Start the build + deprecated: true + tags: + - Templates + security: + - AccessTokenAuth: [] + parameters: + - $ref: '#/components/parameters/templateID' + - $ref: '#/components/parameters/buildID' responses: - '201': - description: The sandbox was created successfully - content: + '202': + description: The build has started + content: &id002 application/json: schema: - $ref: '#/components/schemas/Sandbox' + type: object + description: Empty response '401': $ref: '#/components/responses/401' - '400': - $ref: '#/components/responses/400' '500': $ref: '#/components/responses/500' - operationId: postSandboxes + operationId: postTemplateBuild servers: - - *id006 - /v2/sandboxes: - get: - description: List all sandboxes + - *id001 + /v2/templates/{templateID}/builds/{buildID}: + post: + description: Start the build tags: - - Sandboxes + - Templates security: - ApiKeyAuth: [] parameters: - - name: metadata - in: query - description: Metadata query used to filter the sandboxes (e.g. "user=abc&app=prod"). - Each key and values must be URL encoded. - required: false - schema: - type: string - - name: state - in: query - description: Filter sandboxes by one or more states - required: false - schema: - type: array - items: - $ref: '#/components/schemas/SandboxState' - style: form - explode: false - - $ref: '#/components/parameters/paginationNextToken' - - $ref: '#/components/parameters/paginationLimit' + - $ref: '#/components/parameters/templateID' + - $ref: '#/components/parameters/buildID' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateBuildStartV2' responses: - '200': - description: Successfully returned all running sandboxes - content: - application/json: - schema: - type: array - items: - allOf: - - $ref: '#/components/schemas/ListedSandbox' + '202': + description: The build has started + content: *id002 '401': $ref: '#/components/responses/401' - '400': - $ref: '#/components/responses/400' '500': $ref: '#/components/responses/500' - operationId: listSandboxesV2 + operationId: postTemplateBuildV2 servers: - - *id006 - /sandboxes/metrics: - get: - description: List metrics for given sandboxes + - *id001 + /v2/templates/{templateID}: + patch: + description: Update template tags: - - Sandboxes + - Templates security: - ApiKeyAuth: [] + - AccessTokenAuth: [] parameters: - - name: sandbox_ids - in: query + - $ref: '#/components/parameters/templateID' + requestBody: required: true - description: Comma-separated list of sandbox IDs to get metrics for - explode: false - schema: - type: array - items: - type: string - maxItems: 100 - uniqueItems: true + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateUpdateRequest' responses: '200': - description: Successfully returned all running sandboxes with metrics + description: The template was updated successfully content: application/json: schema: - $ref: '#/components/schemas/SandboxesWithMetrics' - '401': - $ref: '#/components/responses/401' + $ref: '#/components/schemas/TemplateUpdateResponse' '400': $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: listSandboxesMetrics + operationId: patchTemplateV2 servers: - - *id006 - /sandboxes/{sandboxID}/logs: + - *id001 + /templates/{templateID}/builds/{buildID}/status: get: - description: Get sandbox logs. - deprecated: true + description: Get template build info tags: - - Sandboxes + - Templates security: + - AccessTokenAuth: [] - ApiKeyAuth: [] parameters: - - $ref: '#/components/parameters/sandboxID' + - $ref: '#/components/parameters/templateID' + - $ref: '#/components/parameters/buildID' - in: query - name: start + name: logsOffset schema: + default: 0 type: integer - format: int64 + format: int32 minimum: 0 - description: Starting timestamp of the logs that should be returned in milliseconds + description: Index of the starting build log that should be returned with + the template - in: query name: limit schema: - default: 1000 + default: 100 + type: integer format: int32 minimum: 0 - type: integer + maximum: 100 description: Maximum number of logs that should be returned + - in: query + name: level + schema: + $ref: '#/components/schemas/LogLevel' responses: '200': - description: Successfully returned the sandbox logs - content: - application/json: - schema: - $ref: '#/components/schemas/SandboxLogs' - '404': - $ref: '#/components/responses/404' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: getSandboxLogs - servers: - - *id006 - /sandboxes/{sandboxID}: - get: - description: Get a sandbox by id - tags: - - Sandboxes - security: - - ApiKeyAuth: [] - parameters: - - $ref: '#/components/parameters/sandboxID' - responses: - '200': - description: Successfully returned the sandbox + description: Successfully returned the template content: application/json: schema: - $ref: '#/components/schemas/SandboxDetail' - '404': - $ref: '#/components/responses/404' + $ref: '#/components/schemas/TemplateBuildInfo' '401': $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: getSandbox - delete: - description: Kill a sandbox - tags: - - Sandboxes - security: - - ApiKeyAuth: [] - parameters: - - $ref: '#/components/parameters/sandboxID' - responses: - '204': - description: The sandbox was killed successfully '404': $ref: '#/components/responses/404' - '401': - $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: deleteSandbox + operationId: getTemplateBuildStatus servers: - - *id006 - /sandboxes/{sandboxID}/metrics: + - *id001 + /templates/{templateID}/builds/{buildID}/logs: get: - description: Get sandbox metrics + description: Get template build logs tags: - - Sandboxes + - Templates security: + - AccessTokenAuth: [] - ApiKeyAuth: [] parameters: - - $ref: '#/components/parameters/sandboxID' + - $ref: '#/components/parameters/templateID' + - $ref: '#/components/parameters/buildID' - in: query - name: start + name: cursor schema: type: integer format: int64 minimum: 0 - description: Unix timestamp for the start of the interval, in seconds, for - which the metrics are returned. + description: Starting timestamp of the logs that should be returned in milliseconds - in: query - name: end + name: limit schema: + default: 100 type: integer - format: int64 + format: int32 minimum: 0 - description: Unix timestamp for the end of the interval, in seconds, for which - the metrics are returned. + maximum: 100 + description: Maximum number of logs that should be returned + - in: query + name: direction + schema: + $ref: '#/components/schemas/LogsDirection' + - in: query + name: level + schema: + $ref: '#/components/schemas/LogLevel' + - in: query + name: source + schema: + $ref: '#/components/schemas/LogsSource' + description: Source of the logs that should be returned from responses: '200': - description: Successfully returned the sandbox metrics + description: Successfully returned the template build logs content: application/json: schema: - type: array - items: - $ref: '#/components/schemas/SandboxMetric' - '400': - $ref: '#/components/responses/400' + $ref: '#/components/schemas/TemplateBuildLogsResponse' '401': $ref: '#/components/responses/401' '404': $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: getSandboxMetrics + operationId: getTemplateBuildLogs servers: - - *id006 - /sandboxes/{sandboxID}/pause: - post: - description: Pause the sandbox + - *id001 + /templates/aliases/{alias}: + get: + description: Check if template with given alias exists tags: - - Sandboxes + - Templates security: - ApiKeyAuth: [] parameters: - - $ref: '#/components/parameters/sandboxID' + - name: alias + in: path + required: true + schema: + type: string + description: Template alias responses: - '204': - description: The sandbox was paused successfully and can be resumed - '409': - $ref: '#/components/responses/409' + '200': + description: Successfully queried template by alias + content: + application/json: + schema: + $ref: '#/components/schemas/TemplateAliasResponse' + '400': + $ref: '#/components/responses/400' + '403': + $ref: '#/components/responses/403' '404': $ref: '#/components/responses/404' - '401': - $ref: '#/components/responses/401' '500': $ref: '#/components/responses/500' - operationId: postSandboxPause + operationId: getTemplatesAlias servers: - - *id006 - /sandboxes/{sandboxID}/resume: + - *id001 + /filesystem.Filesystem/CreateWatcher: post: - deprecated: true - description: Resume the sandbox tags: - - Sandboxes - security: - - ApiKeyAuth: [] + - Filesystem + summary: CreateWatcher + description: Non-streaming versions of WatchDir + operationId: filesystem.Filesystem.CreateWatcher parameters: - - $ref: '#/components/parameters/sandboxID' - requestBody: + - name: Connect-Protocol-Version + in: header required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: content: application/json: schema: - $ref: '#/components/schemas/ResumedSandbox' + $ref: '#/components/schemas/filesystem.CreateWatcherRequest' + required: true responses: - '201': - description: The sandbox was resumed successfully + '200': + description: Success content: application/json: schema: - $ref: '#/components/schemas/Sandbox' - '409': - $ref: '#/components/responses/409' - '404': - $ref: '#/components/responses/404' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: postSandboxResume + $ref: '#/components/schemas/filesystem.CreateWatcherResponse' + '502': &id003 + description: Sandbox not found + content: + application/json: + schema: + type: object + required: + - sandboxID + - message + - code + properties: + message: + type: string + description: Error message + example: The sandbox was not found + code: + type: integer + description: Error code + example: 502 + sandboxID: + type: string + description: Identifier of the sandbox + example: i1234abcd5678efgh90jk + security: + - &id004 + SandboxAccessTokenAuth: [] servers: - - *id006 - /sandboxes/{sandboxID}/connect: + - &id005 + url: https://{port}-{sandboxID}.e2b.app + description: Sandbox API (envd) — runs inside each sandbox + variables: + port: + default: '49983' + description: Port number + sandboxID: + default: $SANDBOX_ID + description: Sandbox identifier + /filesystem.Filesystem/GetWatcherEvents: post: - description: Returns sandbox details. If the sandbox is paused, it will be resumed. - TTL is only extended. tags: - - Sandboxes - security: - - ApiKeyAuth: [] + - Filesystem + summary: GetWatcherEvents + operationId: filesystem.Filesystem.GetWatcherEvents parameters: - - $ref: '#/components/parameters/sandboxID' - requestBody: + - name: Connect-Protocol-Version + in: header required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: content: application/json: schema: - $ref: '#/components/schemas/ConnectSandbox' + $ref: '#/components/schemas/filesystem.GetWatcherEventsRequest' + required: true responses: '200': - description: The sandbox was already running - content: - application/json: - schema: - $ref: '#/components/schemas/Sandbox' - '201': - description: The sandbox was resumed successfully + description: Success content: application/json: schema: - $ref: '#/components/schemas/Sandbox' - '400': - $ref: '#/components/responses/400' - '401': - $ref: '#/components/responses/401' - '404': - $ref: '#/components/responses/404' - '500': - $ref: '#/components/responses/500' - operationId: postSandboxConnect + $ref: '#/components/schemas/filesystem.GetWatcherEventsResponse' + '502': *id003 + security: + - *id004 servers: - - *id006 - /sandboxes/{sandboxID}/timeout: + - *id005 + /filesystem.Filesystem/ListDir: post: - description: Set the timeout for the sandbox. The sandbox will expire x seconds - from the time of the request. Calling this method multiple times overwrites - the TTL, each time using the current timestamp as the starting point to measure - the timeout duration. - security: - - ApiKeyAuth: [] tags: - - Sandboxes + - Filesystem + summary: ListDir + operationId: filesystem.Filesystem.ListDir + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' requestBody: content: application/json: schema: - type: object - required: - - timeout - properties: - timeout: - description: Timeout in seconds from the current time after which - the sandbox should expire - type: integer - format: int32 - minimum: 0 - parameters: - - $ref: '#/components/parameters/sandboxID' + $ref: '#/components/schemas/filesystem.ListDirRequest' + required: true responses: - '204': - description: Successfully set the sandbox timeout - '401': - $ref: '#/components/responses/401' - '404': - $ref: '#/components/responses/404' - '500': - $ref: '#/components/responses/500' - operationId: postSandboxTimeout + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.ListDirResponse' + '502': *id003 + security: + - *id004 servers: - - *id006 - /sandboxes/{sandboxID}/refreshes: + - *id005 + /filesystem.Filesystem/MakeDir: post: - description: Refresh the sandbox extending its time to live - security: - - ApiKeyAuth: [] tags: - - Sandboxes + - Filesystem + summary: MakeDir + operationId: filesystem.Filesystem.MakeDir + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' requestBody: content: application/json: schema: - type: object - properties: - duration: - description: Duration for which the sandbox should be kept alive - in seconds - type: integer - maximum: 3600 - minimum: 0 - parameters: - - $ref: '#/components/parameters/sandboxID' + $ref: '#/components/schemas/filesystem.MakeDirRequest' + required: true responses: - '204': - description: Successfully refreshed the sandbox - '401': - $ref: '#/components/responses/401' - '404': - $ref: '#/components/responses/404' - '500': - $ref: '#/components/responses/500' - operationId: postSandboxRefreshes + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.MakeDirResponse' + '502': *id003 + security: + - *id004 servers: - - *id006 - /v3/templates: + - *id005 + /filesystem.Filesystem/Move: post: - description: Create a new template tags: - - Templates - security: - - ApiKeyAuth: [] + - Filesystem + summary: Move + operationId: filesystem.Filesystem.Move + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.MoveRequest' + required: true + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.MoveResponse' + '502': *id003 + security: + - *id004 + servers: + - *id005 + /filesystem.Filesystem/Remove: + post: + tags: + - Filesystem + summary: Remove + operationId: filesystem.Filesystem.Remove + parameters: + - name: Connect-Protocol-Version + in: header required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: content: application/json: schema: - $ref: '#/components/schemas/TemplateBuildRequestV3' + $ref: '#/components/schemas/filesystem.RemoveRequest' + required: true responses: - '202': - description: The build was requested successfully + '200': + description: Success content: application/json: schema: - $ref: '#/components/schemas/TemplateRequestResponseV3' - '400': - $ref: '#/components/responses/400' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: postTemplatesV3 + $ref: '#/components/schemas/filesystem.RemoveResponse' + '502': *id003 + security: + - *id004 servers: - - *id006 - /v2/templates: + - *id005 + /filesystem.Filesystem/RemoveWatcher: post: - description: Create a new template - deprecated: true tags: - - Templates - security: - - ApiKeyAuth: [] - requestBody: + - Filesystem + summary: RemoveWatcher + operationId: filesystem.Filesystem.RemoveWatcher + parameters: + - name: Connect-Protocol-Version + in: header required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: content: application/json: schema: - $ref: '#/components/schemas/TemplateBuildRequestV2' + $ref: '#/components/schemas/filesystem.RemoveWatcherRequest' + required: true responses: - '202': - description: The build was requested successfully + '200': + description: Success content: application/json: schema: - $ref: '#/components/schemas/TemplateLegacy' - '400': - $ref: '#/components/responses/400' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: postTemplatesV2 + $ref: '#/components/schemas/filesystem.RemoveWatcherResponse' + '502': *id003 + security: + - *id004 servers: - - *id006 - /templates/{templateID}/files/{hash}: - get: - description: Get an upload link for a tar file containing build layer files + - *id005 + /filesystem.Filesystem/Stat: + post: tags: - - Templates - security: - - AccessTokenAuth: [] - - ApiKeyAuth: [] + - Filesystem + summary: Stat + operationId: filesystem.Filesystem.Stat parameters: - - $ref: '#/components/parameters/templateID' - - in: path - name: hash + - name: Connect-Protocol-Version + in: header required: true schema: - type: string - description: Hash of the files + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/filesystem.StatRequest' + required: true responses: - '400': - $ref: '#/components/responses/400' - '401': - $ref: '#/components/responses/401' - '404': - $ref: '#/components/responses/404' - '500': - $ref: '#/components/responses/500' '200': - description: Upload link for the tar file containing build layer files + description: Success content: application/json: schema: - $ref: '#/components/schemas/TemplateBuildFileUpload' - operationId: getTemplateFile + $ref: '#/components/schemas/filesystem.StatResponse' + '502': *id003 + security: + - *id004 servers: - - *id006 - /templates: - get: - description: List all templates + - *id005 + /filesystem.Filesystem/WatchDir: + post: tags: - - Templates + - Filesystem + summary: WatchDir + description: Server-streaming RPC. Use the Connect protocol with streaming support. + operationId: filesystem.Filesystem.WatchDir + requestBody: + content: + application/connect+json: + schema: + $ref: '#/components/schemas/filesystem.WatchDirRequest' + required: true + responses: + '200': + description: Stream of WatchDirResponse events + content: + application/connect+json: + schema: + $ref: '#/components/schemas/filesystem.WatchDirResponse' + '502': *id003 security: - - ApiKeyAuth: [] - - AccessTokenAuth: [] + - *id004 parameters: - - in: query - required: false - name: teamID + - &id006 + name: Connect-Protocol-Version + in: header + required: true schema: - type: string - description: Identifier of the team + $ref: '#/components/schemas/connect-protocol-version' + - &id007 + name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + servers: + - *id005 + /process.Process/CloseStdin: + post: + tags: + - Process + summary: CloseStdin + description: "Close stdin to signal EOF to the process.\n Only works for non-PTY\ + \ processes. For PTY, send Ctrl+D (0x04) instead." + operationId: process.Process.CloseStdin + parameters: + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/process.CloseStdinRequest' + required: true responses: '200': - description: Successfully returned all templates + description: Success content: application/json: schema: - type: array - items: - allOf: - - $ref: '#/components/schemas/Template' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: listTemplates + $ref: '#/components/schemas/process.CloseStdinResponse' + '502': *id003 + security: + - *id004 + servers: + - *id005 + /process.Process/Connect: post: - description: Create a new template - deprecated: true tags: - - Templates - security: - - AccessTokenAuth: [] + - Process + summary: Connect + description: Server-streaming RPC. Use the Connect protocol with streaming support. + operationId: process.Process.Connect requestBody: - required: true content: - application/json: + application/connect+json: schema: - $ref: '#/components/schemas/TemplateBuildRequest' + $ref: '#/components/schemas/process.ConnectRequest' + required: true responses: - '202': - description: The build was accepted + '200': + description: Stream of ConnectResponse events content: - application/json: + application/connect+json: schema: - $ref: '#/components/schemas/TemplateLegacy' - '400': - $ref: '#/components/responses/400' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: postTemplates + $ref: '#/components/schemas/process.ConnectResponse' + '502': *id003 + security: + - *id004 + parameters: + - *id006 + - *id007 servers: - - *id006 - /templates/{templateID}: - get: - description: List all builds for a template + - *id005 + /process.Process/List: + post: tags: - - Templates - security: - - ApiKeyAuth: [] + - Process + summary: List + operationId: process.Process.List parameters: - - $ref: '#/components/parameters/templateID' - - $ref: '#/components/parameters/paginationNextToken' - - $ref: '#/components/parameters/paginationLimit' + - name: Connect-Protocol-Version + in: header + required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/process.ListRequest' + required: true responses: '200': - description: Successfully returned the template with its builds + description: Success content: application/json: schema: - $ref: '#/components/schemas/TemplateWithBuilds' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: getTemplate + $ref: '#/components/schemas/process.ListResponse' + '502': *id003 + security: + - *id004 + servers: + - *id005 + /process.Process/SendInput: post: - description: Rebuild an template - deprecated: true tags: - - Templates - security: - - AccessTokenAuth: [] + - Process + summary: SendInput + operationId: process.Process.SendInput parameters: - - $ref: '#/components/parameters/templateID' - requestBody: + - name: Connect-Protocol-Version + in: header required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: content: application/json: schema: - $ref: '#/components/schemas/TemplateBuildRequest' + $ref: '#/components/schemas/process.SendInputRequest' + required: true responses: - '202': - description: The build was accepted + '200': + description: Success content: application/json: schema: - $ref: '#/components/schemas/TemplateLegacy' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: postTemplate - delete: - description: Delete a template - tags: - - Templates + $ref: '#/components/schemas/process.SendInputResponse' + '502': *id003 security: - - ApiKeyAuth: [] - - AccessTokenAuth: [] - parameters: - - $ref: '#/components/parameters/templateID' - responses: - '204': - description: The template was deleted successfully - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: deleteTemplate - patch: - description: Update template - deprecated: true + - *id004 + servers: + - *id005 + /process.Process/SendSignal: + post: tags: - - Templates - security: - - ApiKeyAuth: [] - - AccessTokenAuth: [] + - Process + summary: SendSignal + operationId: process.Process.SendSignal parameters: - - $ref: '#/components/parameters/templateID' - requestBody: + - name: Connect-Protocol-Version + in: header required: true + schema: + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header + schema: + $ref: '#/components/schemas/connect-timeout-header' + requestBody: content: application/json: schema: - $ref: '#/components/schemas/TemplateUpdateRequest' + $ref: '#/components/schemas/process.SendSignalRequest' + required: true responses: '200': - description: The template was updated successfully + description: Success content: application/json: schema: - $ref: '#/components/schemas/TemplateUpdateResponse' - '400': - $ref: '#/components/responses/400' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: patchTemplate - servers: - - *id006 - /templates/{templateID}/builds/{buildID}: - post: - description: Start the build - deprecated: true - tags: - - Templates + $ref: '#/components/schemas/process.SendSignalResponse' + '502': *id003 security: - - AccessTokenAuth: [] - parameters: - - $ref: '#/components/parameters/templateID' - - $ref: '#/components/parameters/buildID' - responses: - '202': - description: The build has started - content: &id007 - application/json: - schema: - type: object - description: Empty response - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: postTemplateBuild + - *id004 servers: - - *id006 - /v2/templates/{templateID}/builds/{buildID}: + - *id005 + /process.Process/Start: post: - description: Start the build tags: - - Templates - security: - - ApiKeyAuth: [] - parameters: - - $ref: '#/components/parameters/templateID' - - $ref: '#/components/parameters/buildID' + - Process + summary: Start + description: Server-streaming RPC. Use the Connect protocol with streaming support. + operationId: process.Process.Start requestBody: - required: true content: - application/json: + application/connect+json: schema: - $ref: '#/components/schemas/TemplateBuildStartV2' + $ref: '#/components/schemas/process.StartRequest' + required: true responses: - '202': - description: The build has started - content: *id007 - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: postTemplateBuildV2 - servers: - - *id006 - /v2/templates/{templateID}: - patch: - description: Update template - tags: - - Templates + '200': + description: Stream of StartResponse events + content: + application/connect+json: + schema: + $ref: '#/components/schemas/process.StartResponse' + '502': *id003 security: - - ApiKeyAuth: [] - - AccessTokenAuth: [] + - *id004 parameters: - - $ref: '#/components/parameters/templateID' + - *id006 + - *id007 + servers: + - *id005 + /process.Process/StreamInput: + post: + tags: + - Process + summary: StreamInput + description: Client-streaming RPC. Client input stream ensures ordering of messages. + Use the Connect protocol with streaming support. + operationId: process.Process.StreamInput requestBody: - required: true content: - application/json: + application/connect+json: schema: - $ref: '#/components/schemas/TemplateUpdateRequest' + $ref: '#/components/schemas/process.StreamInputRequest' + required: true responses: '200': - description: The template was updated successfully + description: Stream of StreamInputResponse events content: - application/json: + application/connect+json: schema: - $ref: '#/components/schemas/TemplateUpdateResponse' - '400': - $ref: '#/components/responses/400' - '401': - $ref: '#/components/responses/401' - '500': - $ref: '#/components/responses/500' - operationId: patchTemplateV2 - servers: - - *id006 - /templates/{templateID}/builds/{buildID}/status: - get: - description: Get template build info - tags: - - Templates + $ref: '#/components/schemas/process.StreamInputResponse' + '502': *id003 security: - - AccessTokenAuth: [] - - ApiKeyAuth: [] + - *id004 parameters: - - $ref: '#/components/parameters/templateID' - - $ref: '#/components/parameters/buildID' - - in: query - name: logsOffset - schema: - default: 0 - type: integer - format: int32 - minimum: 0 - description: Index of the starting build log that should be returned with - the template - - in: query - name: limit - schema: - default: 100 - type: integer - format: int32 - minimum: 0 - maximum: 100 - description: Maximum number of logs that should be returned - - in: query - name: level - schema: - $ref: '#/components/schemas/LogLevel' - responses: - '200': - description: Successfully returned the template - content: - application/json: - schema: - $ref: '#/components/schemas/TemplateBuildInfo' - '401': - $ref: '#/components/responses/401' - '404': - $ref: '#/components/responses/404' - '500': - $ref: '#/components/responses/500' - operationId: getTemplateBuildStatus + - *id006 + - *id007 servers: - - *id006 - /templates/{templateID}/builds/{buildID}/logs: - get: - description: Get template build logs + - *id005 + /process.Process/Update: + post: tags: - - Templates - security: - - AccessTokenAuth: [] - - ApiKeyAuth: [] + - Process + summary: Update + operationId: process.Process.Update parameters: - - $ref: '#/components/parameters/templateID' - - $ref: '#/components/parameters/buildID' - - in: query - name: cursor - schema: - type: integer - format: int64 - minimum: 0 - description: Starting timestamp of the logs that should be returned in milliseconds - - in: query - name: limit - schema: - default: 100 - type: integer - format: int32 - minimum: 0 - maximum: 100 - description: Maximum number of logs that should be returned - - in: query - name: direction - schema: - $ref: '#/components/schemas/LogsDirection' - - in: query - name: level + - name: Connect-Protocol-Version + in: header + required: true schema: - $ref: '#/components/schemas/LogLevel' - - in: query - name: source + $ref: '#/components/schemas/connect-protocol-version' + - name: Connect-Timeout-Ms + in: header schema: - $ref: '#/components/schemas/LogsSource' - description: Source of the logs that should be returned from + $ref: '#/components/schemas/connect-timeout-header' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/process.UpdateRequest' + required: true responses: '200': - description: Successfully returned the template build logs + description: Success content: application/json: schema: - $ref: '#/components/schemas/TemplateBuildLogsResponse' - '401': - $ref: '#/components/responses/401' - '404': - $ref: '#/components/responses/404' - '500': - $ref: '#/components/responses/500' - operationId: getTemplateBuildLogs + $ref: '#/components/schemas/process.UpdateResponse' + '502': *id003 + security: + - *id004 servers: - - *id006 + - *id005 /templates/tags: post: description: Assign tag(s) to a template build @@ -1761,7 +1573,7 @@ paths: $ref: '#/components/responses/500' operationId: deleteTemplatesTags servers: - - *id006 + - *id001 /templates/{templateID}/tags: get: description: List all tags for a template @@ -1790,39 +1602,227 @@ paths: $ref: '#/components/responses/500' operationId: getTemplateTags servers: - - *id006 - /templates/aliases/{alias}: + - *id001 + /teams: get: - description: Check if template with given alias exists + description: List all teams tags: - - Templates + - Teams + security: + - AccessTokenAuth: [] + responses: + '200': + description: Successfully returned all teams + content: + application/json: + schema: + type: array + items: + allOf: + - $ref: '#/components/schemas/Team' + '401': + $ref: '#/components/responses/401' + '500': + $ref: '#/components/responses/500' + operationId: listTeams + servers: + - *id001 + /teams/{teamID}/metrics: + get: + description: Get metrics for the team + tags: + - Teams security: - ApiKeyAuth: [] parameters: - - name: alias - in: path + - $ref: '#/components/parameters/teamID' + - in: query + name: start + schema: + type: integer + format: int64 + minimum: 0 + description: Unix timestamp for the start of the interval, in seconds, for + which the metrics are returned. + - in: query + name: end + schema: + type: integer + format: int64 + minimum: 0 + description: Unix timestamp for the end of the interval, in seconds, for which + the metrics are returned. + responses: + '200': + description: Successfully returned the team metrics + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/TeamMetric' + '400': + $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' + '403': + $ref: '#/components/responses/403' + '500': + $ref: '#/components/responses/500' + operationId: getTeamMetrics + servers: + - *id001 + /teams/{teamID}/metrics/max: + get: + description: Get the maximum metrics for the team in the given interval + tags: + - Teams + security: + - ApiKeyAuth: [] + parameters: + - $ref: '#/components/parameters/teamID' + - in: query + name: start + schema: + type: integer + format: int64 + minimum: 0 + description: Unix timestamp for the start of the interval, in seconds, for + which the metrics are returned. + - in: query + name: end + schema: + type: integer + format: int64 + minimum: 0 + description: Unix timestamp for the end of the interval, in seconds, for which + the metrics are returned. + - in: query + name: metric required: true schema: type: string - description: Template alias + enum: + - concurrent_sandboxes + - sandbox_start_rate + description: Metric to retrieve the maximum value for responses: '200': - description: Successfully queried template by alias + description: Successfully returned the team metrics content: application/json: schema: - $ref: '#/components/schemas/TemplateAliasResponse' + $ref: '#/components/schemas/MaxTeamMetric' '400': $ref: '#/components/responses/400' + '401': + $ref: '#/components/responses/401' '403': $ref: '#/components/responses/403' - '404': - $ref: '#/components/responses/404' '500': $ref: '#/components/responses/500' - operationId: getTemplatesAlias + operationId: getTeamMetricsMax + servers: + - *id001 + /health: + get: + summary: Check the health of the service + responses: + '204': + description: The service is healthy + '502': *id003 + operationId: getHealth + security: [] + tags: + - Others + servers: + - *id005 + /files: + get: + summary: Download a file + tags: + - Others + security: + - *id004 + parameters: + - $ref: '#/components/parameters/FilePath' + - $ref: '#/components/parameters/User' + - $ref: '#/components/parameters/Signature' + - $ref: '#/components/parameters/SignatureExpiration' + responses: + '200': + $ref: '#/components/responses/DownloadSuccess' + '401': + $ref: '#/components/responses/InvalidUser' + '400': + $ref: '#/components/responses/InvalidPath' + '404': + $ref: '#/components/responses/FileNotFound' + '500': + $ref: '#/components/responses/InternalServerError' + '502': *id003 + operationId: downloadFile + post: + summary: Upload a file and ensure the parent directories exist. If the file + exists, it will be overwritten. + tags: + - Others + security: + - *id004 + parameters: + - $ref: '#/components/parameters/FilePath' + - $ref: '#/components/parameters/User' + - $ref: '#/components/parameters/Signature' + - $ref: '#/components/parameters/SignatureExpiration' + requestBody: + $ref: '#/components/requestBodies/File' + responses: + '200': + $ref: '#/components/responses/UploadSuccess' + '400': + $ref: '#/components/responses/InvalidPath' + '401': + $ref: '#/components/responses/InvalidUser' + '500': + $ref: '#/components/responses/InternalServerError' + '507': + $ref: '#/components/responses/NotEnoughDiskSpace' + '502': *id003 + operationId: uploadFile + servers: + - *id005 + /metrics: + get: + summary: Get the stats of the service + security: + - *id004 + responses: + '200': + description: The resource usage metrics of the service + content: + application/json: + schema: + $ref: '#/components/schemas/Metrics' + '502': *id003 + operationId: getMetrics + servers: + - *id005 + /envs: + get: + summary: Get the environment variables + security: + - *id004 + responses: + '200': + description: Environment variables + content: + application/json: + schema: + $ref: '#/components/schemas/EnvVars' + '502': *id003 + operationId: getEnvVars servers: - - *id006 + - *id005 components: securitySchemes: AccessTokenAuth: diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index 32549133..fa6c0924 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -1251,6 +1251,20 @@ def rename_and_reorder_tags(spec: dict[str, Any]) -> None: # Rebuild the top-level tags list in the desired order spec["tags"] = [{"name": t} for t in TAG_ORDER] + + # Reorder paths so Mintlify renders sections in the desired order. + # Mintlify uses path order (not the tags array) to determine sidebar order. + tag_priority = {t: i for i, t in enumerate(TAG_ORDER)} + + def path_sort_key(item: tuple[str, dict]) -> int: + path_str, path_item = item + for method in ("get", "post", "put", "patch", "delete", "head", "options"): + op = path_item.get(method) + if op and "tags" in op: + return tag_priority.get(op["tags"][0], len(TAG_ORDER)) + return len(TAG_ORDER) + + spec["paths"] = dict(sorted(spec["paths"].items(), key=path_sort_key)) print(f"==> Renamed and reordered {len(TAG_ORDER)} tags") From 9cc30abef4ec9453db10bac744f4c5a1b0b399fd Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 15:43:48 +0100 Subject: [PATCH 19/37] Tag untagged endpoints (/metrics, /envs) as Others so they appear in the correct section --- openapi-public.yml | 68 ++++++++++++++------------- scripts/generate_openapi_reference.py | 9 ++-- 2 files changed, 42 insertions(+), 35 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index 7beee69e..65c345ae 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -1737,6 +1737,42 @@ paths: - Others servers: - *id005 + /metrics: + get: + summary: Get the stats of the service + security: + - *id004 + responses: + '200': + description: The resource usage metrics of the service + content: + application/json: + schema: + $ref: '#/components/schemas/Metrics' + '502': *id003 + operationId: getMetrics + tags: + - Others + servers: + - *id005 + /envs: + get: + summary: Get the environment variables + security: + - *id004 + responses: + '200': + description: Environment variables + content: + application/json: + schema: + $ref: '#/components/schemas/EnvVars' + '502': *id003 + operationId: getEnvVars + tags: + - Others + servers: + - *id005 /files: get: summary: Download a file @@ -1791,38 +1827,6 @@ paths: operationId: uploadFile servers: - *id005 - /metrics: - get: - summary: Get the stats of the service - security: - - *id004 - responses: - '200': - description: The resource usage metrics of the service - content: - application/json: - schema: - $ref: '#/components/schemas/Metrics' - '502': *id003 - operationId: getMetrics - servers: - - *id005 - /envs: - get: - summary: Get the environment variables - security: - - *id004 - responses: - '200': - description: Environment variables - content: - application/json: - schema: - $ref: '#/components/schemas/EnvVars' - '502': *id003 - operationId: getEnvVars - servers: - - *id005 components: securitySchemes: AccessTokenAuth: diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index fa6c0924..e8a2be45 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -1241,13 +1241,16 @@ def rename_and_reorder_tags(spec: dict[str, Any]) -> None: } TAG_ORDER = ["Sandboxes", "Templates", "Filesystem", "Process", "Tags", "Teams", "Others"] - # Rename tags on all operations + # Rename tags on all operations; tag untagged ones as "Others" for path_item in spec.get("paths", {}).values(): for method in ("get", "post", "put", "patch", "delete", "head", "options"): op = path_item.get(method) - if not op or "tags" not in op: + if not op: continue - op["tags"] = [TAG_RENAME.get(t, t) for t in op["tags"]] + if "tags" not in op: + op["tags"] = ["Others"] + else: + op["tags"] = [TAG_RENAME.get(t, t) for t in op["tags"]] # Rebuild the top-level tags list in the desired order spec["tags"] = [{"name": t} for t in TAG_ORDER] From 700325c3f094ebbad5445153b927aac884952452 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 16:03:18 +0100 Subject: [PATCH 20/37] Add meaningful examples to error responses (400/401/403/404/409/500) --- openapi-public.yml | 18 ++++++++++++++++++ scripts/generate_openapi_reference.py | 21 +++++++++++++++++++++ 2 files changed, 39 insertions(+) diff --git a/openapi-public.yml b/openapi-public.yml index 65c345ae..f4126748 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -2020,36 +2020,54 @@ components: application/json: schema: $ref: '#/components/schemas/Error' + example: + code: 400 + message: 'Bad request: invalid or missing request parameters' '401': description: Authentication error content: application/json: schema: $ref: '#/components/schemas/Error' + example: + code: 401 + message: 'Authentication error: missing or invalid API key' '403': description: Forbidden content: application/json: schema: $ref: '#/components/schemas/Error' + example: + code: 403 + message: 'Forbidden: insufficient permissions' '404': description: Not found content: application/json: schema: $ref: '#/components/schemas/Error' + example: + code: 404 + message: 'Not found: the requested resource does not exist' '409': description: Conflict content: application/json: schema: $ref: '#/components/schemas/Error' + example: + code: 409 + message: 'Conflict: the resource is in a conflicting state' '500': description: Server error content: application/json: schema: $ref: '#/components/schemas/Error' + example: + code: 500 + message: 'Server error: an unexpected error occurred' schemas: Error: required: diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index e8a2be45..8576a038 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -1013,6 +1013,27 @@ def _singularize(word: str) -> str: responses["500"] = {"$ref": "#/components/responses/500"} fixes.append("/sandboxes/{sandboxID}/refreshes: added 500 response") + # 25. Add meaningful examples to error responses + error_examples = { + "400": {"code": 400, "message": "Bad request: invalid or missing request parameters"}, + "401": {"code": 401, "message": "Authentication error: missing or invalid API key"}, + "403": {"code": 403, "message": "Forbidden: insufficient permissions"}, + "404": {"code": 404, "message": "Not found: the requested resource does not exist"}, + "409": {"code": 409, "message": "Conflict: the resource is in a conflicting state"}, + "500": {"code": 500, "message": "Server error: an unexpected error occurred"}, + } + responses = spec.get("components", {}).get("responses", {}) + for status, example in error_examples.items(): + resp = responses.get(status) + if resp and "content" in resp: + schema = resp["content"].get("application/json", {}).get("schema") + if schema: + resp["content"]["application/json"]["schema"] = { + **schema, + "example": example, + } + fixes.append("Error responses: added example values for 400/401/403/404/409/500") + if fixes: print(f"==> Fixed {len(fixes)} spec issues:") for f in fixes: From 709f99bdd9e162e18ba459492816680e69cea905 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 16:11:13 +0100 Subject: [PATCH 21/37] Inline /files response definitions so Mintlify renders them correctly --- openapi-public.yml | 64 ++++++++++++++++++++++----- scripts/generate_openapi_reference.py | 53 +++++++++++++--------- 2 files changed, 85 insertions(+), 32 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index f4126748..8cab47a6 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -1787,15 +1787,37 @@ paths: - $ref: '#/components/parameters/SignatureExpiration' responses: '200': - $ref: '#/components/responses/DownloadSuccess' + description: Entire file downloaded successfully. + content: + application/octet-stream: + schema: + type: string + format: binary + description: The file content '401': - $ref: '#/components/responses/InvalidUser' + description: Invalid user + content: + application/json: + schema: + $ref: '#/components/schemas/Error' '400': - $ref: '#/components/responses/InvalidPath' + description: Invalid path + content: + application/json: + schema: + $ref: '#/components/schemas/Error' '404': - $ref: '#/components/responses/FileNotFound' + description: File not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' '500': - $ref: '#/components/responses/InternalServerError' + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' '502': *id003 operationId: downloadFile post: @@ -1814,15 +1836,37 @@ paths: $ref: '#/components/requestBodies/File' responses: '200': - $ref: '#/components/responses/UploadSuccess' + description: The file was uploaded successfully. + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/EntryInfo' '400': - $ref: '#/components/responses/InvalidPath' + description: Invalid path + content: + application/json: + schema: + $ref: '#/components/schemas/Error' '401': - $ref: '#/components/responses/InvalidUser' + description: Invalid user + content: + application/json: + schema: + $ref: '#/components/schemas/Error' '500': - $ref: '#/components/responses/InternalServerError' + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' '507': - $ref: '#/components/responses/NotEnoughDiskSpace' + description: Not enough disk space + content: + application/json: + schema: + $ref: '#/components/schemas/Error' '502': *id003 operationId: uploadFile servers: diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index 8576a038..60511b5b 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -25,6 +25,7 @@ from __future__ import annotations +import copy import os import re import subprocess @@ -886,29 +887,37 @@ def _singularize(word: str) -> str: health_get["tags"] = ["health"] fixes.append("/health: added 'health' tag") - # 15. /files responses: YAML anchor overlay hides actual response schemas - # Remove the overlaid empty content block so $ref responses are used - for files_ep in ("/files",): - fpath = paths.get(files_ep, {}) - for method in ("get", "post"): - op = fpath.get(method) - if not op: + # 15. /files responses: inline $ref responses so Mintlify renders them correctly + # The upstream spec uses YAML anchors that cause issues, and some renderers + # don't resolve response-level $refs properly. + comp_responses = spec.get("components", {}).get("responses", {}) + files_path = paths.get("/files", {}) + for method in ("get", "post"): + op = files_path.get(method) + if not op: + continue + responses = op.get("responses", {}) + for status_code, resp in list(responses.items()): + if not isinstance(resp, dict): continue - responses = op.get("responses", {}) - for status_code, resp in responses.items(): - if not isinstance(resp, dict): - continue - # If the response has both $ref and content with an empty schema, - # the empty content overlay was from the YAML anchor bug — remove it - if "$ref" in resp and "content" in resp: - content = resp["content"] - for ct, media in list(content.items()): - s = media.get("schema", {}) - if s.get("description") == "Empty response": - del content[ct] - if not content: - del resp["content"] - fixes.append(f"{files_ep} {method.upper()}: removed anchor-overlaid empty content") + # Inline any $ref to components/responses + ref = resp.get("$ref", "") + if ref.startswith("#/components/responses/"): + ref_name = ref.split("/")[-1] + resolved = comp_responses.get(ref_name) + if resolved: + # Replace with a copy so we don't mutate the shared component + responses[status_code] = copy.deepcopy(resolved) + # Also clean up any anchor-overlaid empty content + elif "$ref" not in resp and "content" in resp: + content = resp["content"] + for ct, media in list(content.items()): + s = media.get("schema", {}) + if s.get("description") == "Empty response": + del content[ct] + if not content: + del resp["content"] + fixes.append("/files: inlined response definitions for GET and POST") # 16. Missing type: object on schemas that have properties obj_fixed = 0 From 2a8820de9578cf0b264b1edc643a25cfec65b934 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 16:14:29 +0100 Subject: [PATCH 22/37] Move error example to Error schema so it applies to all references --- openapi-public.yml | 21 +++----------------- scripts/generate_openapi_reference.py | 28 ++++++++------------------- 2 files changed, 11 insertions(+), 38 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index 8cab47a6..178b2959 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -2064,54 +2064,36 @@ components: application/json: schema: $ref: '#/components/schemas/Error' - example: - code: 400 - message: 'Bad request: invalid or missing request parameters' '401': description: Authentication error content: application/json: schema: $ref: '#/components/schemas/Error' - example: - code: 401 - message: 'Authentication error: missing or invalid API key' '403': description: Forbidden content: application/json: schema: $ref: '#/components/schemas/Error' - example: - code: 403 - message: 'Forbidden: insufficient permissions' '404': description: Not found content: application/json: schema: $ref: '#/components/schemas/Error' - example: - code: 404 - message: 'Not found: the requested resource does not exist' '409': description: Conflict content: application/json: schema: $ref: '#/components/schemas/Error' - example: - code: 409 - message: 'Conflict: the resource is in a conflicting state' '500': description: Server error content: application/json: schema: $ref: '#/components/schemas/Error' - example: - code: 500 - message: 'Server error: an unexpected error occurred' schemas: Error: required: @@ -2126,6 +2108,9 @@ components: type: string description: Error type: object + example: + code: 400 + message: 'Bad request: invalid or missing request parameters' EntryInfo: required: - path diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index 60511b5b..7a977f3f 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -1022,26 +1022,14 @@ def _singularize(word: str) -> str: responses["500"] = {"$ref": "#/components/responses/500"} fixes.append("/sandboxes/{sandboxID}/refreshes: added 500 response") - # 25. Add meaningful examples to error responses - error_examples = { - "400": {"code": 400, "message": "Bad request: invalid or missing request parameters"}, - "401": {"code": 401, "message": "Authentication error: missing or invalid API key"}, - "403": {"code": 403, "message": "Forbidden: insufficient permissions"}, - "404": {"code": 404, "message": "Not found: the requested resource does not exist"}, - "409": {"code": 409, "message": "Conflict: the resource is in a conflicting state"}, - "500": {"code": 500, "message": "Server error: an unexpected error occurred"}, - } - responses = spec.get("components", {}).get("responses", {}) - for status, example in error_examples.items(): - resp = responses.get(status) - if resp and "content" in resp: - schema = resp["content"].get("application/json", {}).get("schema") - if schema: - resp["content"]["application/json"]["schema"] = { - **schema, - "example": example, - } - fixes.append("Error responses: added example values for 400/401/403/404/409/500") + # 25. Add meaningful example to the Error schema (applies everywhere it's referenced) + error_schema = schemas.get("Error") + if error_schema and "example" not in error_schema: + error_schema["example"] = { + "code": 400, + "message": "Bad request: invalid or missing request parameters", + } + fixes.append("Error schema: added example values") if fixes: print(f"==> Fixed {len(fixes)} spec issues:") From b93b5d3a09ba07003e3673b0593e2c0945cfd9b6 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 16:27:53 +0100 Subject: [PATCH 23/37] Add per-status error examples so each error response shows correct code and message --- openapi-public.yml | 31 ++++++++++++++++-- scripts/generate_openapi_reference.py | 47 ++++++++++++++++++++++----- 2 files changed, 67 insertions(+), 11 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index 178b2959..4a55977f 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -1800,24 +1800,36 @@ paths: application/json: schema: $ref: '#/components/schemas/Error' + example: &id009 + code: 401 + message: 'Authentication error: missing or invalid API key' '400': description: Invalid path content: application/json: schema: $ref: '#/components/schemas/Error' + example: &id008 + code: 400 + message: 'Bad request: invalid or missing request parameters' '404': description: File not found content: application/json: schema: $ref: '#/components/schemas/Error' + example: &id011 + code: 404 + message: 'Not found: the requested resource does not exist' '500': description: Internal server error content: application/json: schema: $ref: '#/components/schemas/Error' + example: &id010 + code: 500 + message: 'Server error: an unexpected error occurred' '502': *id003 operationId: downloadFile post: @@ -1849,24 +1861,30 @@ paths: application/json: schema: $ref: '#/components/schemas/Error' + example: *id008 '401': description: Invalid user content: application/json: schema: $ref: '#/components/schemas/Error' + example: *id009 '500': description: Internal server error content: application/json: schema: $ref: '#/components/schemas/Error' + example: *id010 '507': description: Not enough disk space content: application/json: schema: $ref: '#/components/schemas/Error' + example: + code: 507 + message: 'Insufficient storage: not enough disk space' '502': *id003 operationId: uploadFile servers: @@ -2064,36 +2082,46 @@ components: application/json: schema: $ref: '#/components/schemas/Error' + example: *id008 '401': description: Authentication error content: application/json: schema: $ref: '#/components/schemas/Error' + example: *id009 '403': description: Forbidden content: application/json: schema: $ref: '#/components/schemas/Error' + example: + code: 403 + message: 'Forbidden: insufficient permissions' '404': description: Not found content: application/json: schema: $ref: '#/components/schemas/Error' + example: *id011 '409': description: Conflict content: application/json: schema: $ref: '#/components/schemas/Error' + example: + code: 409 + message: 'Conflict: the resource is in a conflicting state' '500': description: Server error content: application/json: schema: $ref: '#/components/schemas/Error' + example: *id010 schemas: Error: required: @@ -2108,9 +2136,6 @@ components: type: string description: Error type: object - example: - code: 400 - message: 'Bad request: invalid or missing request parameters' EntryInfo: required: - path diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index 7a977f3f..c01a678b 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -1022,14 +1022,45 @@ def _singularize(word: str) -> str: responses["500"] = {"$ref": "#/components/responses/500"} fixes.append("/sandboxes/{sandboxID}/refreshes: added 500 response") - # 25. Add meaningful example to the Error schema (applies everywhere it's referenced) - error_schema = schemas.get("Error") - if error_schema and "example" not in error_schema: - error_schema["example"] = { - "code": 400, - "message": "Bad request: invalid or missing request parameters", - } - fixes.append("Error schema: added example values") + # 25. Add per-status error examples to every error response in every operation + status_examples = { + "400": {"code": 400, "message": "Bad request: invalid or missing request parameters"}, + "401": {"code": 401, "message": "Authentication error: missing or invalid API key"}, + "403": {"code": 403, "message": "Forbidden: insufficient permissions"}, + "404": {"code": 404, "message": "Not found: the requested resource does not exist"}, + "409": {"code": 409, "message": "Conflict: the resource is in a conflicting state"}, + "500": {"code": 500, "message": "Server error: an unexpected error occurred"}, + "507": {"code": 507, "message": "Insufficient storage: not enough disk space"}, + } + for path_item in spec.get("paths", {}).values(): + for method in ("get", "post", "put", "patch", "delete", "head", "options"): + op = path_item.get(method) + if not op: + continue + for status_code, resp in op.get("responses", {}).items(): + if not isinstance(resp, dict) or "$ref" in resp: + continue + example = status_examples.get(str(status_code)) + if not example: + continue + json_media = resp.get("content", {}).get("application/json") + if not json_media: + continue + schema = json_media.get("schema", {}) + # Only add example if schema references Error + ref = schema.get("$ref", "") + if ref.endswith("/Error") and "example" not in json_media: + json_media["example"] = example + # Also set examples on component-level responses + comp_responses = spec.get("components", {}).get("responses", {}) + for status_code, example in status_examples.items(): + resp = comp_responses.get(status_code) + if not resp or "content" not in resp: + continue + json_media = resp["content"].get("application/json") + if json_media and "example" not in json_media: + json_media["example"] = example + fixes.append("Error responses: added per-status example values") if fixes: print(f"==> Fixed {len(fixes)} spec issues:") From fe16f7346cb25a874c60350b83dd936c60707db9 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 16:37:10 +0100 Subject: [PATCH 24/37] Lowercase 'reference' in SDK reference and API reference anchors --- docs.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs.json b/docs.json index 972455b9..45695415 100644 --- a/docs.json +++ b/docs.json @@ -219,12 +219,12 @@ ] }, { - "anchor": "SDK Reference", + "anchor": "SDK reference", "icon": "brackets-curly", "href": "https://e2b.dev/docs/sdk-reference" }, { - "anchor": "API Reference", + "anchor": "API reference", "icon": "code", "openapi": { "source": "openapi-public.yml", From d9f1bac4404c4a7e36b8ddb763ffb58e9b2a05cd Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Wed, 25 Feb 2026 19:18:23 +0100 Subject: [PATCH 25/37] Add short summaries to all platform endpoints for readable Mintlify sidebar names --- openapi-public.yml | 34 ++++++++++++++++++ scripts/generate_openapi_reference.py | 50 +++++++++++++++++++++++++++ 2 files changed, 84 insertions(+) diff --git a/openapi-public.yml b/openapi-public.yml index 4a55977f..d1b85e78 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -41,6 +41,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: listSandboxes + summary: List sandboxes post: description: Create a sandbox from the template tags: @@ -67,6 +68,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postSandboxes + summary: Create sandbox servers: - *id001 /v2/sandboxes: @@ -113,6 +115,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: listSandboxesV2 + summary: List sandboxes (v2) servers: - *id001 /sandboxes/metrics: @@ -148,6 +151,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: listSandboxesMetrics + summary: List sandbox metrics servers: - *id001 /sandboxes/{sandboxID}/logs: @@ -189,6 +193,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: getSandboxLogs + summary: Get sandbox logs servers: - *id001 /sandboxes/{sandboxID}: @@ -214,6 +219,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: getSandbox + summary: Get sandbox delete: description: Kill a sandbox tags: @@ -232,6 +238,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: deleteSandbox + summary: Delete sandbox servers: - *id001 /sandboxes/{sandboxID}/metrics: @@ -277,6 +284,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: getSandboxMetrics + summary: Get sandbox metrics servers: - *id001 /sandboxes/{sandboxID}/pause: @@ -300,6 +308,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postSandboxPause + summary: Pause sandbox servers: - *id001 /sandboxes/{sandboxID}/resume: @@ -334,6 +343,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postSandboxResume + summary: Resume sandbox servers: - *id001 /sandboxes/{sandboxID}/connect: @@ -374,6 +384,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postSandboxConnect + summary: Connect to sandbox servers: - *id001 /sandboxes/{sandboxID}/timeout: @@ -412,6 +423,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postSandboxTimeout + summary: Set sandbox timeout servers: - *id001 /sandboxes/{sandboxID}/refreshes: @@ -445,6 +457,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postSandboxRefreshes + summary: Refresh sandbox servers: - *id001 /v3/templates: @@ -474,6 +487,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postTemplatesV3 + summary: Create template (v3) servers: - *id001 /v2/templates: @@ -504,6 +518,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postTemplatesV2 + summary: Create template (v2) servers: - *id001 /templates/{templateID}/files/{hash}: @@ -538,6 +553,7 @@ paths: schema: $ref: '#/components/schemas/TemplateBuildFileUpload' operationId: getTemplateFile + summary: Get build upload link servers: - *id001 /templates: @@ -570,6 +586,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: listTemplates + summary: List templates post: description: Create a new template deprecated: true @@ -597,6 +614,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postTemplates + summary: Create template servers: - *id001 /templates/{templateID}: @@ -622,6 +640,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: getTemplate + summary: Get template post: description: Rebuild an template deprecated: true @@ -649,6 +668,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postTemplate + summary: Rebuild template delete: description: Delete a template tags: @@ -666,6 +686,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: deleteTemplate + summary: Delete template patch: description: Update template deprecated: true @@ -696,6 +717,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: patchTemplate + summary: Update template servers: - *id001 /templates/{templateID}/builds/{buildID}: @@ -722,6 +744,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postTemplateBuild + summary: Start build servers: - *id001 /v2/templates/{templateID}/builds/{buildID}: @@ -749,6 +772,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postTemplateBuildV2 + summary: Start build (v2) servers: - *id001 /v2/templates/{templateID}: @@ -781,6 +805,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: patchTemplateV2 + summary: Update template (v2) servers: - *id001 /templates/{templateID}/builds/{buildID}/status: @@ -830,6 +855,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: getTemplateBuildStatus + summary: Get build status servers: - *id001 /templates/{templateID}/builds/{buildID}/logs: @@ -886,6 +912,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: getTemplateBuildLogs + summary: Get build logs servers: - *id001 /templates/aliases/{alias}: @@ -918,6 +945,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: getTemplatesAlias + summary: Get template by alias servers: - *id001 /filesystem.Filesystem/CreateWatcher: @@ -1548,6 +1576,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: postTemplatesTags + summary: Assign tags delete: description: Delete multiple tags from templates tags: @@ -1572,6 +1601,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: deleteTemplatesTags + summary: Delete tags servers: - *id001 /templates/{templateID}/tags: @@ -1601,6 +1631,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: getTemplateTags + summary: List template tags servers: - *id001 /teams: @@ -1625,6 +1656,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: listTeams + summary: List teams servers: - *id001 /teams/{teamID}/metrics: @@ -1670,6 +1702,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: getTeamMetrics + summary: Get team metrics servers: - *id001 /teams/{teamID}/metrics/max: @@ -1722,6 +1755,7 @@ paths: '500': $ref: '#/components/responses/500' operationId: getTeamMetricsMax + summary: Get team metrics max servers: - *id001 /health: diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index c01a678b..63cd2c34 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -1062,6 +1062,56 @@ def _singularize(word: str) -> str: json_media["example"] = example fixes.append("Error responses: added per-status example values") + # 26. Add short summary fields to platform endpoints for Mintlify sidebar names + SUMMARIES: dict[tuple[str, str], str] = { + # Sandboxes + ("/sandboxes", "get"): "List sandboxes", + ("/sandboxes", "post"): "Create sandbox", + ("/v2/sandboxes", "get"): "List sandboxes (v2)", + ("/sandboxes/metrics", "get"): "List sandbox metrics", + ("/sandboxes/{sandboxID}/logs", "get"): "Get sandbox logs", + ("/sandboxes/{sandboxID}", "get"): "Get sandbox", + ("/sandboxes/{sandboxID}", "delete"): "Delete sandbox", + ("/sandboxes/{sandboxID}/metrics", "get"): "Get sandbox metrics", + ("/sandboxes/{sandboxID}/pause", "post"): "Pause sandbox", + ("/sandboxes/{sandboxID}/resume", "post"): "Resume sandbox", + ("/sandboxes/{sandboxID}/connect", "post"): "Connect to sandbox", + ("/sandboxes/{sandboxID}/timeout", "post"): "Set sandbox timeout", + ("/sandboxes/{sandboxID}/refreshes", "post"): "Refresh sandbox", + # Templates + ("/v3/templates", "post"): "Create template (v3)", + ("/v2/templates", "post"): "Create template (v2)", + ("/templates/{templateID}/files/{hash}", "get"): "Get build upload link", + ("/templates", "get"): "List templates", + ("/templates", "post"): "Create template", + ("/templates/{templateID}", "get"): "Get template", + ("/templates/{templateID}", "post"): "Rebuild template", + ("/templates/{templateID}", "delete"): "Delete template", + ("/templates/{templateID}", "patch"): "Update template", + ("/templates/{templateID}/builds/{buildID}", "post"): "Start build", + ("/v2/templates/{templateID}/builds/{buildID}", "post"): "Start build (v2)", + ("/v2/templates/{templateID}", "patch"): "Update template (v2)", + ("/templates/{templateID}/builds/{buildID}/status", "get"): "Get build status", + ("/templates/{templateID}/builds/{buildID}/logs", "get"): "Get build logs", + ("/templates/aliases/{alias}", "get"): "Get template by alias", + # Tags + ("/templates/tags", "post"): "Assign tags", + ("/templates/tags", "delete"): "Delete tags", + ("/templates/{templateID}/tags", "get"): "List template tags", + # Teams + ("/teams", "get"): "List teams", + ("/teams/{teamID}/metrics", "get"): "Get team metrics", + ("/teams/{teamID}/metrics/max", "get"): "Get team metrics max", + } + summary_count = 0 + for (path_str, method), summary in SUMMARIES.items(): + op = paths.get(path_str, {}).get(method) + if op: + op["summary"] = summary + summary_count += 1 + if summary_count: + fixes.append(f"Added summary to {summary_count} platform endpoints") + if fixes: print(f"==> Fixed {len(fixes)} spec issues:") for f in fixes: From 2e0f689e47c132a1fa18f05aff365c1de497a662 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Thu, 26 Feb 2026 13:18:54 +0100 Subject: [PATCH 26/37] Set format: int64 on Metrics memory/disk fields to prevent overflow --- openapi-public.yml | 6 ++++++ scripts/generate_openapi_reference.py | 9 ++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/openapi-public.yml b/openapi-public.yml index d1b85e78..2cdde4fa 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -2212,21 +2212,27 @@ components: mem_total: type: integer description: Total virtual memory in bytes + format: int64 mem_used: type: integer description: Used virtual memory in bytes + format: int64 disk_used: type: integer description: Used disk space in bytes + format: int64 disk_total: type: integer description: Total disk space in bytes + format: int64 mem_used_mib: type: integer description: Used virtual memory in MiB + format: int64 mem_total_mib: type: integer description: Total virtual memory in MiB + format: int64 connect-protocol-version: type: number title: Connect-Protocol-Version diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index 63cd2c34..1ed0b556 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -649,7 +649,7 @@ def fix_spec_issues(spec: dict[str, Any]) -> None: log_level["description"] = "Severity level for log entries (e.g. info, warn, error)" fixes.append("LogLevel: removed enum constraint, fixed description") - # 4. Metrics schema missing mem_used_mib and mem_total_mib + # 4. Metrics schema: add missing fields and set format: int64 on byte/MiB fields metrics = schemas.get("Metrics") if metrics and "properties" in metrics: props = metrics["properties"] @@ -665,6 +665,13 @@ def fix_spec_issues(spec: dict[str, Any]) -> None: "description": "Total virtual memory in MiB", } fixes.append("Metrics: added 'mem_total_mib'") + # Byte and MiB values can exceed int32 — set format: int64 + int64_fields = ("mem_total", "mem_used", "disk_used", "disk_total", + "mem_used_mib", "mem_total_mib") + for field in int64_fields: + if field in props and props[field].get("format") != "int64": + props[field]["format"] = "int64" + fixes.append("Metrics: set format int64 on memory/disk fields") # 5. Streaming RPC endpoints: wrong content-type and missing headers # Server requires application/connect+json with envelope framing, From 3fc1e08e8bd414b9b0038c111ee62a9d185d34ef Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Thu, 26 Feb 2026 13:23:01 +0100 Subject: [PATCH 27/37] Replace nullable: true with OpenAPI 3.1.0 type arrays on 14 properties --- openapi-public.yml | 48 ++++++++++++++++----------- scripts/generate_openapi_reference.py | 33 ++++++++++++++++++ 2 files changed, 61 insertions(+), 20 deletions(-) diff --git a/openapi-public.yml b/openapi-public.yml index 2cdde4fa..5058c5e5 100644 --- a/openapi-public.yml +++ b/openapi-public.yml @@ -3174,18 +3174,21 @@ components: envdVersion: $ref: '#/components/schemas/EnvdVersion' envdAccessToken: - type: string + type: + - string + - 'null' description: 'Access token for authenticating envd requests to this sandbox. Only returned when the sandbox is created with `secure: true`. Null for non-secure sandboxes (envd endpoints work without auth).' - nullable: true trafficAccessToken: - type: string - nullable: true + type: + - string + - 'null' description: Token required for accessing sandbox via proxy. domain: - type: string - nullable: true + type: + - string + - 'null' description: 'Deprecated: always null. Construct sandbox URLs as `https://{port}-{sandboxID}.e2b.app`.' deprecated: true type: object @@ -3226,14 +3229,16 @@ components: envdVersion: $ref: '#/components/schemas/EnvdVersion' envdAccessToken: - type: string + type: + - string + - 'null' description: 'Access token for authenticating envd requests to this sandbox. Only returned when the sandbox is created with `secure: true`. Null for non-secure sandboxes (envd endpoints work without auth).' - nullable: true domain: - type: string - nullable: true + type: + - string + - 'null' description: 'Deprecated: always null. Construct sandbox URLs as `https://{port}-{sandboxID}.e2b.app`.' deprecated: true cpuCount: @@ -3473,12 +3478,13 @@ components: format: date-time description: Time when the template was last updated createdBy: - allOf: + oneOf: - $ref: '#/components/schemas/TeamUser' - nullable: true + - type: 'null' lastSpawnedAt: - type: string - nullable: true + type: + - string + - 'null' format: date-time description: Time when the template was last used spawnCount: @@ -3575,12 +3581,13 @@ components: format: date-time description: Time when the template was last updated createdBy: - allOf: + oneOf: - $ref: '#/components/schemas/TeamUser' - nullable: true + - type: 'null' lastSpawnedAt: - type: string - nullable: true + type: + - string + - 'null' format: date-time description: Time when the template was last used spawnCount: @@ -3675,8 +3682,9 @@ components: format: date-time description: Time when the template was last updated lastSpawnedAt: - type: string - nullable: true + type: + - string + - 'null' format: date-time description: Time when the template was last used spawnCount: diff --git a/scripts/generate_openapi_reference.py b/scripts/generate_openapi_reference.py index 1ed0b556..e4dc4b56 100755 --- a/scripts/generate_openapi_reference.py +++ b/scripts/generate_openapi_reference.py @@ -1119,6 +1119,39 @@ def _singularize(word: str) -> str: if summary_count: fixes.append(f"Added summary to {summary_count} platform endpoints") + # 27. Replace nullable: true with OpenAPI 3.1.0 type arrays + # In 3.1.0, nullable was removed. Use type: ["string", "null"] instead, + # or oneOf with type: 'null' for $ref properties. + nullable_fixed = 0 + for schema_name, schema in schemas.items(): + if "properties" not in schema: + continue + for prop_name, prop in schema["properties"].items(): + if not isinstance(prop, dict) or not prop.pop("nullable", False): + continue + # allOf + nullable → oneOf: [allOf[...], type: 'null'] + if "allOf" in prop: + all_of = prop.pop("allOf") + prop["oneOf"] = all_of + [{"type": "null"}] + # plain type + nullable → type: [original, "null"] + elif "type" in prop: + orig_type = prop["type"] + if isinstance(orig_type, list): + if "null" not in orig_type: + orig_type.append("null") + else: + prop["type"] = [orig_type, "null"] + # $ref + nullable → oneOf: [$ref, type: 'null'] + elif "$ref" in prop: + ref = prop.pop("$ref") + prop["oneOf"] = [{"$ref": ref}, {"type": "null"}] + # additionalProperties + nullable (e.g. McpConfig) + elif "additionalProperties" in prop: + prop["type"] = ["object", "null"] + nullable_fixed += 1 + if nullable_fixed: + fixes.append(f"Replaced nullable: true with 3.1.0 type arrays on {nullable_fixed} properties") + if fixes: print(f"==> Fixed {len(fixes)} spec issues:") for f in fixes: From 296ba47e76b7885ccbc94aeacb50dab781479062 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Thu, 26 Feb 2026 15:11:52 +0100 Subject: [PATCH 28/37] Fix validation script: correct expected statuses, remove internal endpoints, add status mismatch catch-all - Change DELETE /templates/{templateID} cleanup expected status from 200 to 204 - Remove POST /init test (internal endpoint, not in public spec) - Remove unauthenticated 401 tests to avoid load balancer rate limiting - Use account-owned template alias instead of hardcoded 'base' for alias test - Send no-op Update to process instead of PTY resize (avoids 500 on non-PTY process) - Add post-processing catch-all that flags any tested endpoint with unexpected status code --- openapi-validation-report.md | 742 ++++++++++++++++++++++++++++++ scripts/validate_api_reference.py | 100 +--- 2 files changed, 765 insertions(+), 77 deletions(-) create mode 100644 openapi-validation-report.md diff --git a/openapi-validation-report.md b/openapi-validation-report.md new file mode 100644 index 00000000..b353ce60 --- /dev/null +++ b/openapi-validation-report.md @@ -0,0 +1,742 @@ +# E2B OpenAPI Spec Validation Report + +**Date**: 2026-02-26 14:07:08 UTC +**Spec Version**: 0.1.0 +**Endpoints Tested**: 68 / 68 +**Critical Findings**: 0 +**Duration**: 42.7s + +## Executive Summary + +No critical findings. The spec matches the live API behavior. + +## Endpoint Results + +### Platform API + +#### GET /teams +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /teams/{teamID}/metrics +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /teams/{teamID}/metrics +- **Tested**: YES +- **Expected Status**: 400 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /teams/{teamID}/metrics/max +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /teams/{teamID}/metrics/max +- **Tested**: YES +- **Expected Status**: 403 +- **Actual Status**: 403 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /templates +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /templates/{templateID} +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /templates/{templateID}/tags +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /templates/{templateID} +- **Tested**: YES +- **Expected Status**: 404 +- **Actual Status**: 404 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /templates/aliases/{alias} +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /templates/{templateID}/builds/{buildID}/status +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /templates/{templateID}/builds/{buildID}/logs +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /templates/{templateID}/files/{hash} +- **Tested**: YES +- **Expected Status**: 404 +- **Actual Status**: 201 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /v3/templates +- **Tested**: YES +- **Expected Status**: 202 +- **Actual Status**: 202 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /v3/templates +- **Tested**: YES +- **Expected Status**: 400 +- **Actual Status**: 400 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### PATCH /v2/templates/{templateID} +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### PATCH /templates/{templateID} +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /v2/templates/{templateID}/builds/{buildID} +- **Tested**: YES +- **Expected Status**: 202 +- **Actual Status**: 202 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /templates/{templateID}/builds/{buildID} +- **Tested**: YES +- **Expected Status**: 401 +- **Actual Status**: 401 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /v2/templates +- **Tested**: YES +- **Expected Status**: 202 +- **Actual Status**: 202 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /v2/templates +- **Tested**: YES +- **Expected Status**: 400 +- **Actual Status**: 400 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /templates +- **Tested**: YES +- **Expected Status**: 401 +- **Actual Status**: 401 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /templates/{templateID} +- **Tested**: YES +- **Expected Status**: 401 +- **Actual Status**: 401 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /templates/tags +- **Tested**: YES +- **Expected Status**: 400 +- **Actual Status**: 400 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /templates/tags +- **Tested**: YES +- **Expected Status**: 201 +- **Actual Status**: 201 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### DELETE /templates/tags +- **Tested**: YES +- **Expected Status**: 204 +- **Actual Status**: 204 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### DELETE /templates/tags +- **Tested**: YES +- **Expected Status**: 400 +- **Actual Status**: 400 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### DELETE /templates/{templateID} +- **Tested**: YES +- **Expected Status**: 204 +- **Actual Status**: 204 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### DELETE /templates/{templateID} +- **Tested**: YES +- **Expected Status**: 204 +- **Actual Status**: 204 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### DELETE /templates/{templateID} +- **Tested**: YES +- **Expected Status**: 404 +- **Actual Status**: 404 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /sandboxes +- **Tested**: YES +- **Expected Status**: 201 +- **Actual Status**: 201 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /sandboxes +- **Tested**: YES +- **Expected Status**: 400 +- **Actual Status**: 400 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /sandboxes +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /v2/sandboxes +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /sandboxes/{sandboxID} +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /sandboxes/{sandboxID} +- **Tested**: YES +- **Expected Status**: 404 +- **Actual Status**: 404 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /sandboxes/{sandboxID}/timeout +- **Tested**: YES +- **Expected Status**: 204 +- **Actual Status**: 204 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /sandboxes/{sandboxID}/refreshes +- **Tested**: YES +- **Expected Status**: 204 +- **Actual Status**: 204 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /sandboxes/{sandboxID}/connect +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /sandboxes/{sandboxID}/logs +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /sandboxes/{sandboxID}/metrics +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /sandboxes/metrics +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /sandboxes/{sandboxID}/pause +- **Tested**: YES +- **Expected Status**: 204 +- **Actual Status**: 204 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /sandboxes/{sandboxID}/resume +- **Tested**: YES +- **Expected Status**: 201 +- **Actual Status**: 201 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### DELETE /sandboxes/{sandboxID} +- **Tested**: YES +- **Expected Status**: 204 +- **Actual Status**: 204 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +### Sandbox API (envd) + +#### GET /health +- **Tested**: YES +- **Expected Status**: 204 +- **Actual Status**: 204 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /metrics +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /envs +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /filesystem.Filesystem/MakeDir +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /filesystem.Filesystem/Stat +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /filesystem.Filesystem/ListDir +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /filesystem.Filesystem/Move +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /filesystem.Filesystem/Remove +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /filesystem.Filesystem/Stat +- **Tested**: YES +- **Expected Status**: 404 +- **Actual Status**: 404 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /files +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /files +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### GET /files +- **Tested**: YES +- **Expected Status**: 404 +- **Actual Status**: 404 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /filesystem.Filesystem/CreateWatcher +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /filesystem.Filesystem/GetWatcherEvents +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /filesystem.Filesystem/RemoveWatcher +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /process.Process/Start +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /process.Process/List +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /process.Process/Connect +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /process.Process/SendInput +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /process.Process/StreamInput +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /process.Process/Update +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /process.Process/SendSignal +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +#### POST /filesystem.Filesystem/WatchDir +- **Tested**: YES +- **Expected Status**: 200 +- **Actual Status**: 200 +- **Response Schema**: + - Required fields present: YES + - Extra undocumented fields: none + - Type mismatches: none + +## Critical Findings + +Issues where the spec does not match the actual API behavior. + +None found. + +### Best-Practice Recommendations + +Holistic improvements to make the spec production-quality. + +| # | Category | Recommendation | +|---|----------|----------------| +| 1 | deprecated_no_migration | Deprecated operation 'GET /sandboxes/{sandboxID}/logs' has no migration note in description | +| 2 | deprecated_no_migration | Deprecated operation 'POST /sandboxes/{sandboxID}/resume' has no migration note in description | +| 3 | deprecated_no_migration | Deprecated operation 'POST /v2/templates' has no migration note in description | +| 4 | missing_param_description | Parameter 'hash' in 'GET /templates/{templateID}/files/{hash}' has no description | +| 5 | missing_param_description | Parameter 'teamID' in 'GET /templates' has no description | +| 6 | deprecated_no_migration | Deprecated operation 'POST /templates' has no migration note in description | +| 7 | deprecated_no_migration | Deprecated operation 'POST /templates/{templateID}' has no migration note in description | +| 8 | deprecated_no_migration | Deprecated operation 'PATCH /templates/{templateID}' has no migration note in description | +| 9 | deprecated_no_migration | Deprecated operation 'POST /templates/{templateID}/builds/{buildID}' has no migration note in description | +| 10 | missing_param_description | Parameter 'level' in 'GET /templates/{templateID}/builds/{buildID}/status' has no description | +| 11 | missing_param_description | Parameter 'direction' in 'GET /templates/{templateID}/builds/{buildID}/logs' has no description | +| 12 | missing_param_description | Parameter 'level' in 'GET /templates/{templateID}/builds/{buildID}/logs' has no description | +| 13 | missing_param_description | Parameter 'alias' in 'GET /templates/aliases/{alias}' has no description | +| 14 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/CreateWatcher' has no description | +| 15 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/CreateWatcher' has no description | +| 16 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/GetWatcherEvents' has no description | +| 17 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/GetWatcherEvents' has no description | +| 18 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/ListDir' has no description | +| 19 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/ListDir' has no description | +| 20 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/MakeDir' has no description | +| 21 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/MakeDir' has no description | +| 22 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/Move' has no description | +| 23 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/Move' has no description | +| 24 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/Remove' has no description | +| 25 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/Remove' has no description | +| 26 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/RemoveWatcher' has no description | +| 27 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/RemoveWatcher' has no description | +| 28 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/Stat' has no description | +| 29 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/Stat' has no description | +| 30 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/WatchDir' has no description | +| 31 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/WatchDir' has no description | +| 32 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/CloseStdin' has no description | +| 33 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/CloseStdin' has no description | +| 34 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/Connect' has no description | +| 35 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/Connect' has no description | +| 36 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/List' has no description | +| 37 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/List' has no description | +| 38 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/SendInput' has no description | +| 39 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/SendInput' has no description | +| 40 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/SendSignal' has no description | +| 41 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/SendSignal' has no description | +| 42 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/Start' has no description | +| 43 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/Start' has no description | +| 44 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/StreamInput' has no description | +| 45 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/StreamInput' has no description | +| 46 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/Update' has no description | +| 47 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/Update' has no description | +| 48 | missing_schema_description | Property 'fields' in schema 'SandboxLogEntry' has no description | +| 49 | missing_schema_description | Property 'volumeMounts' in schema 'SandboxDetail' has no description | +| 50 | missing_schema_description | Property 'volumeMounts' in schema 'ListedSandbox' has no description | +| 51 | missing_schema_description | Property 'sandboxes' in schema 'SandboxesWithMetrics' has no description | +| 52 | missing_schema_description | Property 'volumeMounts' in schema 'NewSandbox' has no description | +| 53 | missing_schema_description | Property 'createdBy' in schema 'Template' has no description | +| 54 | missing_schema_description | Property 'createdBy' in schema 'TemplateLegacy' has no description | +| 55 | naming_inconsistency | Mixed naming: camelCase params (alias, cursor, direction, end, hash) and snake_case params (sandbox_ids) | +| 56 | truncated_description | Possible truncated description: '...Filter sandboxes by one or more states' | +| 57 | truncated_description | Possible truncated description: '...d list of sandbox IDs to get metrics for' | +| 58 | truncated_description | Possible truncated description: '... that should be returned in milliseconds' | +| 59 | truncated_description | Possible truncated description: '...m number of logs that should be returned' | +| 60 | truncated_description | Possible truncated description: '...hat should be returned with the template' | +| 61 | truncated_description | Possible truncated description: '...m number of logs that should be returned' | +| 62 | truncated_description | Possible truncated description: '... that should be returned in milliseconds' | +| 63 | truncated_description | Possible truncated description: '...m number of logs that should be returned' | +| 64 | truncated_description | Possible truncated description: '...of the logs that should be returned from' | +| 65 | truncated_description | Possible truncated description: '...Metric to retrieve the maximum value for' | + +## Streaming Endpoints + +Document what was tested and what could not be validated for each of the 4 streaming endpoints. + +| Endpoint | What was tested | Limitations | +|----------|----------------|-------------| +| POST /filesystem.Filesystem/WatchDir | Initial HTTP response captured | Server-streaming: only first frame via stdlib urllib | +| POST /process.Process/Connect | Initial HTTP response captured | Server-streaming: only first frame via stdlib urllib | +| POST /process.Process/Start | Initial HTTP response captured | Server-streaming: only first frame via stdlib urllib | +| POST /process.Process/StreamInput | Initial HTTP request sent | Client-streaming: cannot maintain stream via stdlib urllib | + +## Deprecated Endpoints + +For each deprecated endpoint: does it still work? What does the spec say the replacement is? + +| Endpoint | Still works? | Replacement | Notes | +|----------|-------------|-------------|-------| +| GET /sandboxes/{sandboxID}/logs | Yes | N/A (v2 endpoint doesn't exist) | v1 returns 200 | +| POST /sandboxes/{sandboxID}/resume | Yes | POST /sandboxes/{sandboxID}/connect | Returns Sandbox schema | +| POST /v2/templates | Yes | POST /v3/templates | v2 requires alias field | +| POST /templates | Needs Bearer | POST /v3/templates | Uses AccessTokenAuth | +| POST /templates/{templateID} | Needs Bearer | POST /v3/templates | Rebuild, uses AccessTokenAuth | +| PATCH /templates/{templateID} | Yes | PATCH /v2/templates/{templateID} | Update template | +| POST /templates/{templateID}/builds/{buildID} | Needs Bearer | POST /v2/.../builds/{buildID} | Start build | + +## Untested Scenarios + +List any endpoints or scenarios you could not test, and why. + +| Endpoint | Reason | +|----------|--------| +| Rate limiting (429) | Cannot safely trigger without affecting quota | +| Conflict (409) | Requires specific data state | +| Internal errors (500) | Cannot reliably reproduce | diff --git a/scripts/validate_api_reference.py b/scripts/validate_api_reference.py index f130988a..e4e760bf 100755 --- a/scripts/validate_api_reference.py +++ b/scripts/validate_api_reference.py @@ -848,9 +848,6 @@ def run_phase_1_teams(api_key: str, team_id: str | None, spec: dict, results = [] h = api_key_hdr(api_key) - # Auth tests: 401 for all endpoints without API key - results.extend(run_auth_tests(api_key)) - # GET /teams (requires AccessTokenAuth — Bearer token, not ApiKeyAuth) print("\n Teams") print(" GET /teams") @@ -1027,11 +1024,12 @@ def run_phase_2_templates_read(api_key: str, spec: dict) -> tuple[list[EndpointR results.append(ep) # GET /templates/aliases/{alias} - print(f" GET /templates/aliases/base") + test_alias = alias or "base" + print(f" GET /templates/aliases/{test_alias}") ep = EndpointResult("GET", "/templates/aliases/{alias}", surface="platform") ep.tested = True ep.expected_status = 200 - status, body, _ = ctrl("GET", "/templates/aliases/base", headers=h) + status, body, _ = ctrl("GET", f"/templates/aliases/{test_alias}", headers=h) ep.actual_status = status ep.response_body = body if status == 200: @@ -1039,7 +1037,7 @@ def run_phase_2_templates_read(api_key: str, spec: dict) -> tuple[list[EndpointR ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /templates/aliases/{alias}")) elif status == 404: ep.findings.append(Finding("minor", "status_code", "GET /templates/aliases/{alias}", - "Alias 'base' not found (404)", "200", "404")) + f"Alias '{test_alias}' not found (404)", "200", "404")) results.append(ep) # GET /templates/{templateID}/builds/{buildID}/status @@ -1367,12 +1365,12 @@ def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | Non print(f" DELETE /templates/{cleanup_id} ({label} cleanup)") ep = EndpointResult("DELETE", "/templates/{templateID}", surface="platform") ep.tested = True - ep.expected_status = 200 + ep.expected_status = 204 status, body, _ = ctrl("DELETE", f"/templates/{cleanup_id}", headers=h) ep.actual_status = status - if status not in (200, 204): + if status != 204: ep.findings.append(Finding("minor", "status_code", "DELETE /templates/{templateID}", - f"Cleanup {label}: expected 200, got {status}", "200", str(status))) + f"Cleanup {label}: expected 204, got {status}", "204", str(status))) results.append(ep) # DELETE /templates/{templateID} (404 — non-existent) @@ -1622,19 +1620,6 @@ def run_phase_6_health_system(spec: dict, sbx: SandboxManager) -> list[EndpointR ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /metrics")) results.append(ep) - # POST /init — not in public spec; already-initialized sandbox returns 401 - print(" POST /init (already initialized)") - ep = EndpointResult("POST", "/init", surface="sandbox") - ep.tested = True - ep.expected_status = 401 - status, body, _ = envd("POST", sid, "/init", headers=sandbox_hdr(token), body={}) - ep.actual_status = status - ep.response_body = body - if status != 401: - ep.findings.append(Finding("minor", "status_code", "POST /init", - f"Expected 401 (re-init rejected), got {status}", "401", str(status))) - results.append(ep) - # GET /envs print(" GET /envs") ep = EndpointResult("GET", "/envs", surface="sandbox") @@ -2010,14 +1995,16 @@ def run_phase_10_processes(spec: dict, sbx: SandboxManager) -> list[EndpointResu - # Update PTY (will likely error since process wasn't started with PTY) - print(" Update (PTY resize)") + # Update — select existing process without PTY resize (resize requires + # the process to have been started with a PTY, which the streaming Start + # envelope doesn't reliably support in this test harness). + print(" Update (no-op, verify endpoint accepts request)") ep = EndpointResult("POST", "/process.Process/Update", surface="sandbox") ep.tested = True ep.expected_status = 200 sel = {"pid": sleep_pid} if sleep_pid else {"tag": "test-sleep"} status, body, _ = envd("POST", sid, "/process.Process/Update", - headers=h, body={"process": sel, "pty": {"size": {"cols": 120, "rows": 40}}}) + headers=h, body={"process": sel}) ep.actual_status = status ep.response_body = body if status == 200: @@ -2138,58 +2125,6 @@ def run_phase_12_destructive(api_key: str, spec: dict, sbx: SandboxManager) -> l return results -def run_auth_tests(api_key: str) -> list[EndpointResult]: - """Test 401 for all control plane endpoints without auth.""" - results = [] - - print("\n 401 checks (no API key)") - - endpoints = [ - ("GET", "/sandboxes", None), - ("POST", "/sandboxes", {"templateID": "base"}), - ("GET", "/v2/sandboxes", None), - ("GET", f"/sandboxes/{FAKE_SANDBOX_ID}", None), - ("DELETE", f"/sandboxes/{FAKE_SANDBOX_ID}", None), - ("POST", f"/sandboxes/{FAKE_SANDBOX_ID}/pause", None), - ("POST", f"/sandboxes/{FAKE_SANDBOX_ID}/resume", None), - ("POST", f"/sandboxes/{FAKE_SANDBOX_ID}/connect", None), - ("POST", f"/sandboxes/{FAKE_SANDBOX_ID}/timeout", {"timeout": 60}), - ("POST", f"/sandboxes/{FAKE_SANDBOX_ID}/refreshes", None), - ("GET", f"/sandboxes/{FAKE_SANDBOX_ID}/logs", None), - ("GET", f"/teams/{FAKE_TEAM_ID}/metrics", None), - ("GET", f"/teams/{FAKE_TEAM_ID}/metrics/max", None), - ("GET", "/sandboxes/metrics", None), - ("GET", f"/sandboxes/{FAKE_SANDBOX_ID}/metrics", None), - ("GET", "/templates", None), - ("POST", "/v2/templates", {}), - ("POST", "/v3/templates", {}), - ("GET", f"/templates/{FAKE_TEMPLATE_ID}", None), - ("DELETE", f"/templates/{FAKE_TEMPLATE_ID}", None), - ("PATCH", f"/templates/{FAKE_TEMPLATE_ID}", {}), - ("PATCH", f"/v2/templates/{FAKE_TEMPLATE_ID}", {}), - ("GET", f"/templates/{FAKE_TEMPLATE_ID}/files/{FAKE_HASH}", None), - ("POST", f"/v2/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}", {}), - ("GET", f"/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}/status", None), - ("GET", f"/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}/logs", None), - ("POST", "/templates/tags", {}), - ("DELETE", "/templates/tags", {}), - ("GET", f"/templates/aliases/{FAKE_ALIAS}", None), - ] - - for method, path, body in endpoints: - status, resp, _ = ctrl(method, path, body=body) - ep = EndpointResult(method, path, surface="platform") - ep.tested = True - ep.expected_status = 401 - ep.actual_status = status - if status != 401: - ep.findings.append(Finding("critical", "auth", f"{method} {path}", - f"No API key: expected 401, got {status}", "401", str(status))) - results.append(ep) - - return results - - # --------------------------------------------------------------------------- # HELPERS # --------------------------------------------------------------------------- @@ -2498,6 +2433,17 @@ def should_run(phase: int) -> bool: end_time = time.time() + # Flag status-code mismatches that individual tests didn't already catch. + # Skip endpoints that already raised a status_code or auth finding. + for r in all_results: + if (r.tested and r.expected_status and r.actual_status + and r.actual_status != r.expected_status + and not any(f.category in ("status_code", "auth") for f in r.findings)): + r.findings.append(Finding( + "critical", "status_code", f"{r.method} {r.path}", + f"Expected {r.expected_status}, got {r.actual_status}", + str(r.expected_status), str(r.actual_status))) + # Spec-level analysis print("\n Analyzing spec for best-practice issues...") spec_issues = analyze_spec(spec) From 88cae61f12cb5bdf4ecc0dcb70d8de5c7189420f Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Thu, 26 Feb 2026 16:17:16 +0100 Subject: [PATCH 29/37] Hide deprecated badges in sidebar to prevent endpoint name truncation --- style.css | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/style.css b/style.css index 1b297867..9f3d2c36 100644 --- a/style.css +++ b/style.css @@ -90,3 +90,9 @@ code { background-color: rgba(255, 136, 0, 0.25) !important; border-color: rgba(255, 136, 0, 0.6) !important; } + +/* Hide deprecated badges in the sidebar */ +#sidebar a .overflow-x-hidden > .shrink-0 { + display: none !important; +} + From d6b8211ce5d1e0faa2e23a3cd3c266746afaa465 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Thu, 26 Feb 2026 16:37:14 +0100 Subject: [PATCH 30/37] Add API reference validation workflow (manual trigger only for now) --- .../workflows/api-reference-validation.yml | 88 +++++++++++++++++++ 1 file changed, 88 insertions(+) create mode 100644 .github/workflows/api-reference-validation.yml diff --git a/.github/workflows/api-reference-validation.yml b/.github/workflows/api-reference-validation.yml new file mode 100644 index 00000000..74160bbc --- /dev/null +++ b/.github/workflows/api-reference-validation.yml @@ -0,0 +1,88 @@ +name: API Reference Validation + +on: + # schedule: + # # Every Thursday at 8 PM UTC + # - cron: '0 20 * * 4' + workflow_dispatch: + +concurrency: + group: api-reference-validation + cancel-in-progress: false + +jobs: + validate: + runs-on: ubuntu-latest + timeout-minutes: 15 + permissions: + contents: write + pull-requests: write + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install dependencies + run: pip install pyyaml + + - name: Run validation + env: + E2B_API_KEY: ${{ secrets.E2B_API_KEY }} + E2B_ACCESS_TOKEN: ${{ secrets.E2B_ACCESS_TOKEN }} + run: | + python3 scripts/validate_api_reference.py \ + --output openapi-validation-report.md \ + --verbose + + - name: Check for changes + id: changes + run: | + if git diff --quiet openapi-validation-report.md 2>/dev/null; then + echo "changed=false" >> $GITHUB_OUTPUT + else + echo "changed=true" >> $GITHUB_OUTPUT + fi + + - name: Create PR + if: steps.changes.outputs.changed == 'true' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + BRANCH="api-validation-$(date +%Y-%m-%d)" + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + git checkout -b "$BRANCH" + git add openapi-validation-report.md + git commit -m "docs: update API validation report $(date +%Y-%m-%d)" + git push -u origin "$BRANCH" + + gh pr create \ + --title "API validation report $(date +%Y-%m-%d)" \ + --body "$(cat <<'EOF' + ## Automated API Reference Validation + + Weekly validation of `openapi-public.yml` against the live E2B API. + + Review the updated `openapi-validation-report.md` for any new findings. + EOF + )" \ + --base main + + - name: Summary + run: | + echo "## API Reference Validation" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + if [ -f openapi-validation-report.md ]; then + # Extract critical findings count + CRITICAL=$(grep -c "critical" openapi-validation-report.md 2>/dev/null || echo "0") + echo "| Metric | Value |" >> $GITHUB_STEP_SUMMARY + echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY + echo "| Critical findings | $CRITICAL |" >> $GITHUB_STEP_SUMMARY + echo "| Report | openapi-validation-report.md |" >> $GITHUB_STEP_SUMMARY + fi From 1699b47991f94fdc8021ed6441691319f64b69de Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Mon, 2 Mar 2026 15:36:25 +0100 Subject: [PATCH 31/37] Fix validation script: remove Bearer-only tests, move alias test to phase 3, update workflow - Remove 3 endpoints that tested API key against Bearer-only endpoints (just confirmed 401) - Move GET /templates/aliases/{alias} test to phase 3, use the test template we create there - Update workflow to generate spec, diff, run validation, and create PR with status indicator --- .../workflows/api-reference-validation.yml | 101 ++++++++++++----- openapi-validation-report.md | 105 ++---------------- scripts/validate_api_reference.py | 79 ++++--------- 3 files changed, 107 insertions(+), 178 deletions(-) diff --git a/.github/workflows/api-reference-validation.yml b/.github/workflows/api-reference-validation.yml index 74160bbc..9cd3ed85 100644 --- a/.github/workflows/api-reference-validation.yml +++ b/.github/workflows/api-reference-validation.yml @@ -13,7 +13,7 @@ concurrency: jobs: validate: runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 20 permissions: contents: write pull-requests: write @@ -30,59 +30,104 @@ jobs: - name: Install dependencies run: pip install pyyaml + # Step 1: Generate spec from source + - name: Generate OpenAPI spec + run: python3 scripts/generate_openapi_reference.py --output openapi-generated.yml + + # Step 2: Compare with committed spec + - name: Compare specs + id: diff + run: | + if diff -q openapi-public.yml openapi-generated.yml > /dev/null 2>&1; then + echo "Spec is up to date — nothing to do" + echo "changed=false" >> $GITHUB_OUTPUT + else + echo "Spec has drifted from source" + echo "changed=true" >> $GITHUB_OUTPUT + fi + + # Step 3: If no difference, exit early + # (all subsequent steps are gated on changed == 'true') + + # Step 4: Run validation against the NEW generated spec - name: Run validation + if: steps.diff.outputs.changed == 'true' + id: validate + continue-on-error: true env: E2B_API_KEY: ${{ secrets.E2B_API_KEY }} E2B_ACCESS_TOKEN: ${{ secrets.E2B_ACCESS_TOKEN }} run: | + # Replace committed spec with generated one before validating + cp openapi-generated.yml openapi-public.yml + + # Capture the full output; the script exits 1 on critical findings python3 scripts/validate_api_reference.py \ --output openapi-validation-report.md \ - --verbose + --verbose 2>&1 | tee validation-output.txt - - name: Check for changes - id: changes - run: | - if git diff --quiet openapi-validation-report.md 2>/dev/null; then - echo "changed=false" >> $GITHUB_OUTPUT - else - echo "changed=true" >> $GITHUB_OUTPUT - fi + # Extract the final summary block (everything after the last ===... line) + SUMMARY=$(awk '/^={50,}/{buf=""} {buf=buf"\n"$0} END{print buf}' validation-output.txt) + # Store for PR body (escape newlines for GitHub output) + EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64) + echo "summary<<$EOF" >> $GITHUB_OUTPUT + echo "$SUMMARY" >> $GITHUB_OUTPUT + echo "$EOF" >> $GITHUB_OUTPUT + # Step 5+6: Create PR with status indicator - name: Create PR - if: steps.changes.outputs.changed == 'true' + if: steps.diff.outputs.changed == 'true' env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + VALIDATION_OUTCOME: ${{ steps.validate.outcome }} + VALIDATION_SUMMARY: ${{ steps.validate.outputs.summary }} run: | - BRANCH="api-validation-$(date +%Y-%m-%d)" + if [ "$VALIDATION_OUTCOME" = "success" ]; then + STATUS_ICON="🟢" + STATUS_TEXT="Validation passed" + else + STATUS_ICON="🔴" + STATUS_TEXT="Validation failed — critical findings detected" + fi + + BRANCH="api-spec-update-$(date +%Y-%m-%d)" git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" git checkout -b "$BRANCH" - git add openapi-validation-report.md - git commit -m "docs: update API validation report $(date +%Y-%m-%d)" + git add openapi-public.yml + if [ -f openapi-validation-report.md ]; then + git add openapi-validation-report.md + fi + git commit -m "docs: update openapi-public.yml from source specs $(date +%Y-%m-%d)" git push -u origin "$BRANCH" gh pr create \ - --title "API validation report $(date +%Y-%m-%d)" \ - --body "$(cat <<'EOF' - ## Automated API Reference Validation + --title "$STATUS_ICON Update API spec $(date +%Y-%m-%d)" \ + --body "$(cat <> $GITHUB_STEP_SUMMARY + echo "## API Reference Spec Check" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY - if [ -f openapi-validation-report.md ]; then - # Extract critical findings count - CRITICAL=$(grep -c "critical" openapi-validation-report.md 2>/dev/null || echo "0") - echo "| Metric | Value |" >> $GITHUB_STEP_SUMMARY - echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY - echo "| Critical findings | $CRITICAL |" >> $GITHUB_STEP_SUMMARY - echo "| Report | openapi-validation-report.md |" >> $GITHUB_STEP_SUMMARY - fi + echo "| Result | Value |" >> $GITHUB_STEP_SUMMARY + echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY + echo "| Spec changed | ${CHANGED:-false} |" >> $GITHUB_STEP_SUMMARY + echo "| Validation | ${VALIDATION_OUTCOME:-skipped} |" >> $GITHUB_STEP_SUMMARY diff --git a/openapi-validation-report.md b/openapi-validation-report.md index b353ce60..164d768f 100644 --- a/openapi-validation-report.md +++ b/openapi-validation-report.md @@ -1,10 +1,10 @@ # E2B OpenAPI Spec Validation Report -**Date**: 2026-02-26 14:07:08 UTC +**Date**: 2026-02-27 14:24:55 UTC **Spec Version**: 0.1.0 -**Endpoints Tested**: 68 / 68 +**Endpoints Tested**: 59 / 59 **Critical Findings**: 0 -**Duration**: 42.7s +**Duration**: 33.8s ## Executive Summary @@ -17,7 +17,7 @@ No critical findings. The spec matches the live API behavior. #### GET /teams - **Tested**: YES - **Expected Status**: 200 -- **Actual Status**: 200 +- **Actual Status**: 401 - **Response Schema**: - Required fields present: YES - Extra undocumented fields: none @@ -68,24 +68,6 @@ No critical findings. The spec matches the live API behavior. - Extra undocumented fields: none - Type mismatches: none -#### GET /templates/{templateID} -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /templates/{templateID}/tags -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - #### GET /templates/{templateID} - **Tested**: YES - **Expected Status**: 404 @@ -95,25 +77,25 @@ No critical findings. The spec matches the live API behavior. - Extra undocumented fields: none - Type mismatches: none -#### GET /templates/aliases/{alias} +#### GET /templates/{templateID}/files/{hash} - **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 +- **Expected Status**: 404 +- **Actual Status**: 404 - **Response Schema**: - Required fields present: YES - Extra undocumented fields: none - Type mismatches: none -#### GET /templates/{templateID}/builds/{buildID}/status +#### POST /v3/templates - **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 +- **Expected Status**: 202 +- **Actual Status**: 202 - **Response Schema**: - Required fields present: YES - Extra undocumented fields: none - Type mismatches: none -#### GET /templates/{templateID}/builds/{buildID}/logs +#### GET /templates/aliases/{alias} - **Tested**: YES - **Expected Status**: 200 - **Actual Status**: 200 @@ -122,24 +104,6 @@ No critical findings. The spec matches the live API behavior. - Extra undocumented fields: none - Type mismatches: none -#### GET /templates/{templateID}/files/{hash} -- **Tested**: YES -- **Expected Status**: 404 -- **Actual Status**: 201 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /v3/templates -- **Tested**: YES -- **Expected Status**: 202 -- **Actual Status**: 202 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - #### POST /v3/templates - **Tested**: YES - **Expected Status**: 400 @@ -176,15 +140,6 @@ No critical findings. The spec matches the live API behavior. - Extra undocumented fields: none - Type mismatches: none -#### POST /templates/{templateID}/builds/{buildID} -- **Tested**: YES -- **Expected Status**: 401 -- **Actual Status**: 401 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - #### POST /v2/templates - **Tested**: YES - **Expected Status**: 202 @@ -203,24 +158,6 @@ No critical findings. The spec matches the live API behavior. - Extra undocumented fields: none - Type mismatches: none -#### POST /templates -- **Tested**: YES -- **Expected Status**: 401 -- **Actual Status**: 401 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /templates/{templateID} -- **Tested**: YES -- **Expected Status**: 401 -- **Actual Status**: 401 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - #### POST /templates/tags - **Tested**: YES - **Expected Status**: 400 @@ -230,24 +167,6 @@ No critical findings. The spec matches the live API behavior. - Extra undocumented fields: none - Type mismatches: none -#### POST /templates/tags -- **Tested**: YES -- **Expected Status**: 201 -- **Actual Status**: 201 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### DELETE /templates/tags -- **Tested**: YES -- **Expected Status**: 204 -- **Actual Status**: 204 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - #### DELETE /templates/tags - **Tested**: YES - **Expected Status**: 400 @@ -332,7 +251,7 @@ No critical findings. The spec matches the live API behavior. #### GET /sandboxes/{sandboxID} - **Tested**: YES - **Expected Status**: 404 -- **Actual Status**: 404 +- **Actual Status**: 400 - **Response Schema**: - Required fields present: YES - Extra undocumented fields: none diff --git a/scripts/validate_api_reference.py b/scripts/validate_api_reference.py index e4e760bf..572ecf35 100755 --- a/scripts/validate_api_reference.py +++ b/scripts/validate_api_reference.py @@ -45,7 +45,8 @@ # CONFIG # --------------------------------------------------------------------------- -PLATFORM_URL = "https://api.e2b.app" +E2B_DOMAIN = os.getenv("E2B_DOMAIN", "e2b.app") +PLATFORM_URL = f"https://api.{E2B_DOMAIN}" ENVD_PORT = 49983 SPEC_PATH = Path(__file__).resolve().parent.parent / "openapi-public.yml" @@ -251,7 +252,7 @@ def ctrl(method: str, path: str, **kwargs): def envd(method: str, sandbox_id: str, path: str, **kwargs): """Sandbox (envd) API request.""" - url = f"https://{ENVD_PORT}-{sandbox_id}.e2b.app{path}" + url = f"https://{ENVD_PORT}-{sandbox_id}.{E2B_DOMAIN}{path}" return http_request(method, url, **kwargs) @@ -317,7 +318,7 @@ def multipart_upload(sandbox_id: str, file_path: str, content: bytes, token: str if token: headers["X-Access-Token"] = token - url = f"https://{ENVD_PORT}-{sandbox_id}.e2b.app/files" + url = f"https://{ENVD_PORT}-{sandbox_id}.{E2B_DOMAIN}/files" params = {"path": file_path} return http_request( "POST", url, headers=headers, params=params, @@ -1023,23 +1024,6 @@ def run_phase_2_templates_read(api_key: str, spec: dict) -> tuple[list[EndpointR f"Non-existent: expected 404, got {status}", "404", str(status))) results.append(ep) - # GET /templates/aliases/{alias} - test_alias = alias or "base" - print(f" GET /templates/aliases/{test_alias}") - ep = EndpointResult("GET", "/templates/aliases/{alias}", surface="platform") - ep.tested = True - ep.expected_status = 200 - status, body, _ = ctrl("GET", f"/templates/aliases/{test_alias}", headers=h) - ep.actual_status = status - ep.response_body = body - if status == 200: - schema = {"$ref": "#/components/schemas/TemplateAliasResponse"} - ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /templates/aliases/{alias}")) - elif status == 404: - ep.findings.append(Finding("minor", "status_code", "GET /templates/aliases/{alias}", - f"Alias '{test_alias}' not found (404)", "200", "404")) - results.append(ep) - # GET /templates/{templateID}/builds/{buildID}/status if template_id and build_id: print(f" GET .../builds/{build_id[:16]}../status") @@ -1117,6 +1101,24 @@ def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | Non f"Expected 202, got {status}", "202", str(status))) results.append(ep) + # GET /templates/aliases/{alias} — use the template we just created + if test_template_name and v3_template_id: + print(f" GET /templates/aliases/{test_template_name}") + ep = EndpointResult("GET", "/templates/aliases/{alias}", surface="platform") + ep.tested = True + ep.expected_status = 200 + status, body, _ = ctrl("GET", f"/templates/aliases/{test_template_name}", headers=h) + ep.actual_status = status + ep.response_body = body + if status == 200: + schema = {"$ref": "#/components/schemas/TemplateAliasResponse"} + ep.findings.extend(_tag_findings(validate_schema(body, schema, spec), "GET /templates/aliases/{alias}")) + results.append(ep) + else: + ep = EndpointResult("GET", "/templates/aliases/{alias}", surface="platform") + ep.skip_reason = "No test template created" + results.append(ep) + # POST /v3/templates (400 — empty body) print(" POST /v3/templates (400 — empty)") ep = EndpointResult("POST", "/v3/templates", surface="platform") @@ -1219,18 +1221,6 @@ def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | Non f"Expected 404, got {status}", "404", str(status))) results.append(ep) - # POST /templates/{templateID}/builds/{buildID} (deprecated, AccessTokenAuth — 401 with API key) - print(" POST .../builds/{buildID} (deprecated, 401 — needs Bearer)") - ep = EndpointResult("POST", "/templates/{templateID}/builds/{buildID}", surface="platform") - ep.tested = True - ep.expected_status = 401 - status, body, _ = ctrl("POST", f"/templates/{FAKE_TEMPLATE_ID}/builds/{FAKE_BUILD_ID}", headers=h, body={}) - ep.actual_status = status - if status not in (400, 401, 404): - ep.findings.append(Finding("minor", "status_code", "POST .../builds/{buildID}", - f"Expected 401/404, got {status}", "401", str(status))) - results.append(ep) - # ------------------------------------------------------------------ # POST /v2/templates (deprecated, 202 — create template, then clean up) # ------------------------------------------------------------------ @@ -1265,31 +1255,6 @@ def run_phase_3_templates_write(api_key: str, spec: dict, template_id: str | Non f"Empty body: expected 400, got {status}", "400", str(status))) results.append(ep) - # POST /templates (deprecated, uses AccessTokenAuth — 401 with API key) - print(" POST /templates (deprecated, 401 with API key)") - ep = EndpointResult("POST", "/templates", surface="platform") - ep.tested = True - ep.expected_status = 401 - status, body, _ = ctrl("POST", "/templates", headers=h, body={"dockerfile": "FROM ubuntu"}) - ep.actual_status = status - if status not in (400, 401): - ep.findings.append(Finding("minor", "auth", "POST /templates", - f"Expected 401 (needs Bearer), got {status}", "401", str(status))) - results.append(ep) - - # POST /templates/{templateID} (deprecated rebuild, uses AccessTokenAuth — 401 with API key) - print(" POST /templates/{templateID} (deprecated, 401)") - ep = EndpointResult("POST", "/templates/{templateID}", surface="platform") - ep.tested = True - ep.expected_status = 401 - tid = template_id or FAKE_TEMPLATE_ID - status, body, _ = ctrl("POST", f"/templates/{tid}", headers=h, body={"dockerfile": "FROM ubuntu"}) - ep.actual_status = status - if status not in (400, 401, 404): - ep.findings.append(Finding("minor", "status_code", "POST /templates/{templateID}", - f"Expected 401/404, got {status}", "401 or 404", str(status))) - results.append(ep) - # ------------------------------------------------------------------ # Tags # ------------------------------------------------------------------ From 517a91eb1104e089bae7e1f56de73d775e519690 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Mon, 2 Mar 2026 16:20:05 +0100 Subject: [PATCH 32/37] Remove openapi-validation-report.md from repo --- openapi-validation-report.md | 661 ----------------------------------- 1 file changed, 661 deletions(-) delete mode 100644 openapi-validation-report.md diff --git a/openapi-validation-report.md b/openapi-validation-report.md deleted file mode 100644 index 164d768f..00000000 --- a/openapi-validation-report.md +++ /dev/null @@ -1,661 +0,0 @@ -# E2B OpenAPI Spec Validation Report - -**Date**: 2026-02-27 14:24:55 UTC -**Spec Version**: 0.1.0 -**Endpoints Tested**: 59 / 59 -**Critical Findings**: 0 -**Duration**: 33.8s - -## Executive Summary - -No critical findings. The spec matches the live API behavior. - -## Endpoint Results - -### Platform API - -#### GET /teams -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 401 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /teams/{teamID}/metrics -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /teams/{teamID}/metrics -- **Tested**: YES -- **Expected Status**: 400 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /teams/{teamID}/metrics/max -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /teams/{teamID}/metrics/max -- **Tested**: YES -- **Expected Status**: 403 -- **Actual Status**: 403 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /templates -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /templates/{templateID} -- **Tested**: YES -- **Expected Status**: 404 -- **Actual Status**: 404 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /templates/{templateID}/files/{hash} -- **Tested**: YES -- **Expected Status**: 404 -- **Actual Status**: 404 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /v3/templates -- **Tested**: YES -- **Expected Status**: 202 -- **Actual Status**: 202 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /templates/aliases/{alias} -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /v3/templates -- **Tested**: YES -- **Expected Status**: 400 -- **Actual Status**: 400 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### PATCH /v2/templates/{templateID} -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### PATCH /templates/{templateID} -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /v2/templates/{templateID}/builds/{buildID} -- **Tested**: YES -- **Expected Status**: 202 -- **Actual Status**: 202 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /v2/templates -- **Tested**: YES -- **Expected Status**: 202 -- **Actual Status**: 202 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /v2/templates -- **Tested**: YES -- **Expected Status**: 400 -- **Actual Status**: 400 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /templates/tags -- **Tested**: YES -- **Expected Status**: 400 -- **Actual Status**: 400 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### DELETE /templates/tags -- **Tested**: YES -- **Expected Status**: 400 -- **Actual Status**: 400 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### DELETE /templates/{templateID} -- **Tested**: YES -- **Expected Status**: 204 -- **Actual Status**: 204 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### DELETE /templates/{templateID} -- **Tested**: YES -- **Expected Status**: 204 -- **Actual Status**: 204 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### DELETE /templates/{templateID} -- **Tested**: YES -- **Expected Status**: 404 -- **Actual Status**: 404 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /sandboxes -- **Tested**: YES -- **Expected Status**: 201 -- **Actual Status**: 201 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /sandboxes -- **Tested**: YES -- **Expected Status**: 400 -- **Actual Status**: 400 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /sandboxes -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /v2/sandboxes -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /sandboxes/{sandboxID} -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /sandboxes/{sandboxID} -- **Tested**: YES -- **Expected Status**: 404 -- **Actual Status**: 400 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /sandboxes/{sandboxID}/timeout -- **Tested**: YES -- **Expected Status**: 204 -- **Actual Status**: 204 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /sandboxes/{sandboxID}/refreshes -- **Tested**: YES -- **Expected Status**: 204 -- **Actual Status**: 204 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /sandboxes/{sandboxID}/connect -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /sandboxes/{sandboxID}/logs -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /sandboxes/{sandboxID}/metrics -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /sandboxes/metrics -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /sandboxes/{sandboxID}/pause -- **Tested**: YES -- **Expected Status**: 204 -- **Actual Status**: 204 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /sandboxes/{sandboxID}/resume -- **Tested**: YES -- **Expected Status**: 201 -- **Actual Status**: 201 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### DELETE /sandboxes/{sandboxID} -- **Tested**: YES -- **Expected Status**: 204 -- **Actual Status**: 204 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -### Sandbox API (envd) - -#### GET /health -- **Tested**: YES -- **Expected Status**: 204 -- **Actual Status**: 204 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /metrics -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /envs -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /filesystem.Filesystem/MakeDir -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /filesystem.Filesystem/Stat -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /filesystem.Filesystem/ListDir -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /filesystem.Filesystem/Move -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /filesystem.Filesystem/Remove -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /filesystem.Filesystem/Stat -- **Tested**: YES -- **Expected Status**: 404 -- **Actual Status**: 404 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /files -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /files -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### GET /files -- **Tested**: YES -- **Expected Status**: 404 -- **Actual Status**: 404 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /filesystem.Filesystem/CreateWatcher -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /filesystem.Filesystem/GetWatcherEvents -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /filesystem.Filesystem/RemoveWatcher -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /process.Process/Start -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /process.Process/List -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /process.Process/Connect -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /process.Process/SendInput -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /process.Process/StreamInput -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /process.Process/Update -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /process.Process/SendSignal -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -#### POST /filesystem.Filesystem/WatchDir -- **Tested**: YES -- **Expected Status**: 200 -- **Actual Status**: 200 -- **Response Schema**: - - Required fields present: YES - - Extra undocumented fields: none - - Type mismatches: none - -## Critical Findings - -Issues where the spec does not match the actual API behavior. - -None found. - -### Best-Practice Recommendations - -Holistic improvements to make the spec production-quality. - -| # | Category | Recommendation | -|---|----------|----------------| -| 1 | deprecated_no_migration | Deprecated operation 'GET /sandboxes/{sandboxID}/logs' has no migration note in description | -| 2 | deprecated_no_migration | Deprecated operation 'POST /sandboxes/{sandboxID}/resume' has no migration note in description | -| 3 | deprecated_no_migration | Deprecated operation 'POST /v2/templates' has no migration note in description | -| 4 | missing_param_description | Parameter 'hash' in 'GET /templates/{templateID}/files/{hash}' has no description | -| 5 | missing_param_description | Parameter 'teamID' in 'GET /templates' has no description | -| 6 | deprecated_no_migration | Deprecated operation 'POST /templates' has no migration note in description | -| 7 | deprecated_no_migration | Deprecated operation 'POST /templates/{templateID}' has no migration note in description | -| 8 | deprecated_no_migration | Deprecated operation 'PATCH /templates/{templateID}' has no migration note in description | -| 9 | deprecated_no_migration | Deprecated operation 'POST /templates/{templateID}/builds/{buildID}' has no migration note in description | -| 10 | missing_param_description | Parameter 'level' in 'GET /templates/{templateID}/builds/{buildID}/status' has no description | -| 11 | missing_param_description | Parameter 'direction' in 'GET /templates/{templateID}/builds/{buildID}/logs' has no description | -| 12 | missing_param_description | Parameter 'level' in 'GET /templates/{templateID}/builds/{buildID}/logs' has no description | -| 13 | missing_param_description | Parameter 'alias' in 'GET /templates/aliases/{alias}' has no description | -| 14 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/CreateWatcher' has no description | -| 15 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/CreateWatcher' has no description | -| 16 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/GetWatcherEvents' has no description | -| 17 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/GetWatcherEvents' has no description | -| 18 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/ListDir' has no description | -| 19 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/ListDir' has no description | -| 20 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/MakeDir' has no description | -| 21 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/MakeDir' has no description | -| 22 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/Move' has no description | -| 23 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/Move' has no description | -| 24 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/Remove' has no description | -| 25 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/Remove' has no description | -| 26 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/RemoveWatcher' has no description | -| 27 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/RemoveWatcher' has no description | -| 28 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/Stat' has no description | -| 29 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/Stat' has no description | -| 30 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /filesystem.Filesystem/WatchDir' has no description | -| 31 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /filesystem.Filesystem/WatchDir' has no description | -| 32 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/CloseStdin' has no description | -| 33 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/CloseStdin' has no description | -| 34 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/Connect' has no description | -| 35 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/Connect' has no description | -| 36 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/List' has no description | -| 37 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/List' has no description | -| 38 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/SendInput' has no description | -| 39 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/SendInput' has no description | -| 40 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/SendSignal' has no description | -| 41 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/SendSignal' has no description | -| 42 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/Start' has no description | -| 43 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/Start' has no description | -| 44 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/StreamInput' has no description | -| 45 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/StreamInput' has no description | -| 46 | missing_param_description | Parameter 'Connect-Protocol-Version' in 'POST /process.Process/Update' has no description | -| 47 | missing_param_description | Parameter 'Connect-Timeout-Ms' in 'POST /process.Process/Update' has no description | -| 48 | missing_schema_description | Property 'fields' in schema 'SandboxLogEntry' has no description | -| 49 | missing_schema_description | Property 'volumeMounts' in schema 'SandboxDetail' has no description | -| 50 | missing_schema_description | Property 'volumeMounts' in schema 'ListedSandbox' has no description | -| 51 | missing_schema_description | Property 'sandboxes' in schema 'SandboxesWithMetrics' has no description | -| 52 | missing_schema_description | Property 'volumeMounts' in schema 'NewSandbox' has no description | -| 53 | missing_schema_description | Property 'createdBy' in schema 'Template' has no description | -| 54 | missing_schema_description | Property 'createdBy' in schema 'TemplateLegacy' has no description | -| 55 | naming_inconsistency | Mixed naming: camelCase params (alias, cursor, direction, end, hash) and snake_case params (sandbox_ids) | -| 56 | truncated_description | Possible truncated description: '...Filter sandboxes by one or more states' | -| 57 | truncated_description | Possible truncated description: '...d list of sandbox IDs to get metrics for' | -| 58 | truncated_description | Possible truncated description: '... that should be returned in milliseconds' | -| 59 | truncated_description | Possible truncated description: '...m number of logs that should be returned' | -| 60 | truncated_description | Possible truncated description: '...hat should be returned with the template' | -| 61 | truncated_description | Possible truncated description: '...m number of logs that should be returned' | -| 62 | truncated_description | Possible truncated description: '... that should be returned in milliseconds' | -| 63 | truncated_description | Possible truncated description: '...m number of logs that should be returned' | -| 64 | truncated_description | Possible truncated description: '...of the logs that should be returned from' | -| 65 | truncated_description | Possible truncated description: '...Metric to retrieve the maximum value for' | - -## Streaming Endpoints - -Document what was tested and what could not be validated for each of the 4 streaming endpoints. - -| Endpoint | What was tested | Limitations | -|----------|----------------|-------------| -| POST /filesystem.Filesystem/WatchDir | Initial HTTP response captured | Server-streaming: only first frame via stdlib urllib | -| POST /process.Process/Connect | Initial HTTP response captured | Server-streaming: only first frame via stdlib urllib | -| POST /process.Process/Start | Initial HTTP response captured | Server-streaming: only first frame via stdlib urllib | -| POST /process.Process/StreamInput | Initial HTTP request sent | Client-streaming: cannot maintain stream via stdlib urllib | - -## Deprecated Endpoints - -For each deprecated endpoint: does it still work? What does the spec say the replacement is? - -| Endpoint | Still works? | Replacement | Notes | -|----------|-------------|-------------|-------| -| GET /sandboxes/{sandboxID}/logs | Yes | N/A (v2 endpoint doesn't exist) | v1 returns 200 | -| POST /sandboxes/{sandboxID}/resume | Yes | POST /sandboxes/{sandboxID}/connect | Returns Sandbox schema | -| POST /v2/templates | Yes | POST /v3/templates | v2 requires alias field | -| POST /templates | Needs Bearer | POST /v3/templates | Uses AccessTokenAuth | -| POST /templates/{templateID} | Needs Bearer | POST /v3/templates | Rebuild, uses AccessTokenAuth | -| PATCH /templates/{templateID} | Yes | PATCH /v2/templates/{templateID} | Update template | -| POST /templates/{templateID}/builds/{buildID} | Needs Bearer | POST /v2/.../builds/{buildID} | Start build | - -## Untested Scenarios - -List any endpoints or scenarios you could not test, and why. - -| Endpoint | Reason | -|----------|--------| -| Rate limiting (429) | Cannot safely trigger without affecting quota | -| Conflict (409) | Requires specific data state | -| Internal errors (500) | Cannot reliably reproduce | From f3a2039b2bac4757ef0b62490c0d86f9d2f8856c Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Mon, 2 Mar 2026 16:42:00 +0100 Subject: [PATCH 33/37] Hide scrollbar on sidebar API endpoint rows with deprecated badges --- style.css | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/style.css b/style.css index 9f3d2c36..42c4455d 100644 --- a/style.css +++ b/style.css @@ -91,8 +91,12 @@ code { border-color: rgba(255, 136, 0, 0.6) !important; } -/* Hide deprecated badges in the sidebar */ -#sidebar a .overflow-x-hidden > .shrink-0 { - display: none !important; +/* Hide scrollbar on sidebar API endpoint rows with deprecated badges */ +#sidebar a .overflow-x-hidden { + scrollbar-width: none !important; + -ms-overflow-style: none !important; } +#sidebar a .overflow-x-hidden::-webkit-scrollbar { + display: none !important; +} From 6ea99a4127b36b1b10a9ef2c9a0b4c67911de5c2 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Mon, 2 Mar 2026 17:17:07 +0100 Subject: [PATCH 34/37] edit workflow --- .github/workflows/api-reference-validation.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/api-reference-validation.yml b/.github/workflows/api-reference-validation.yml index 9cd3ed85..c30c5bbb 100644 --- a/.github/workflows/api-reference-validation.yml +++ b/.github/workflows/api-reference-validation.yml @@ -4,7 +4,10 @@ on: # schedule: # # Every Thursday at 8 PM UTC # - cron: '0 20 * * 4' - workflow_dispatch: + pull_request: + # Sequence of patterns matched against refs/heads + branches: + - main concurrency: group: api-reference-validation From 171c2bf76047ccae132125360e9f8a967e819c96 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Mon, 2 Mar 2026 17:26:54 +0100 Subject: [PATCH 35/37] test true --- .github/workflows/api-reference-validation.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/api-reference-validation.yml b/.github/workflows/api-reference-validation.yml index c30c5bbb..def41c2e 100644 --- a/.github/workflows/api-reference-validation.yml +++ b/.github/workflows/api-reference-validation.yml @@ -43,7 +43,7 @@ jobs: run: | if diff -q openapi-public.yml openapi-generated.yml > /dev/null 2>&1; then echo "Spec is up to date — nothing to do" - echo "changed=false" >> $GITHUB_OUTPUT + echo "changed=true" >> $GITHUB_OUTPUT else echo "Spec has drifted from source" echo "changed=true" >> $GITHUB_OUTPUT @@ -99,9 +99,6 @@ jobs: git checkout -b "$BRANCH" git add openapi-public.yml - if [ -f openapi-validation-report.md ]; then - git add openapi-validation-report.md - fi git commit -m "docs: update openapi-public.yml from source specs $(date +%Y-%m-%d)" git push -u origin "$BRANCH" From af07c2ec966cdfd95f2beba9d0c4f4fab23502c4 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Tue, 3 Mar 2026 15:10:22 +0100 Subject: [PATCH 36/37] works, back to false --- .github/workflows/api-reference-validation.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/api-reference-validation.yml b/.github/workflows/api-reference-validation.yml index def41c2e..34a14483 100644 --- a/.github/workflows/api-reference-validation.yml +++ b/.github/workflows/api-reference-validation.yml @@ -43,7 +43,7 @@ jobs: run: | if diff -q openapi-public.yml openapi-generated.yml > /dev/null 2>&1; then echo "Spec is up to date — nothing to do" - echo "changed=true" >> $GITHUB_OUTPUT + echo "changed=false" >> $GITHUB_OUTPUT else echo "Spec has drifted from source" echo "changed=true" >> $GITHUB_OUTPUT From 35eb5243a7067239562566239195650b1e890a20 Mon Sep 17 00:00:00 2001 From: Tomas Beran Date: Tue, 3 Mar 2026 15:11:37 +0100 Subject: [PATCH 37/37] Make workflow run on schedule (disabled for now) --- .github/workflows/api-reference-validation.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/api-reference-validation.yml b/.github/workflows/api-reference-validation.yml index 34a14483..578b7ebd 100644 --- a/.github/workflows/api-reference-validation.yml +++ b/.github/workflows/api-reference-validation.yml @@ -4,10 +4,7 @@ on: # schedule: # # Every Thursday at 8 PM UTC # - cron: '0 20 * * 4' - pull_request: - # Sequence of patterns matched against refs/heads - branches: - - main + workflow_dispatch: concurrency: group: api-reference-validation