diff --git a/.codespellignore b/.codespellignore
index 546a19270..835c0e538 100644
--- a/.codespellignore
+++ b/.codespellignore
@@ -1 +1,3 @@
iTerm
+iTerm2
+psuedo
\ No newline at end of file
diff --git a/.codespellrc b/.codespellrc
index da831d895..84b4495e3 100644
--- a/.codespellrc
+++ b/.codespellrc
@@ -3,4 +3,4 @@
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt
check-hidden = true
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
-ignore-words-list = ratatui,ser
+ignore-words-list = ratatui,ser,iTerm,iterm2,iterm
diff --git a/.github/actions/linux-code-sign/action.yml b/.github/actions/linux-code-sign/action.yml
new file mode 100644
index 000000000..5a117b080
--- /dev/null
+++ b/.github/actions/linux-code-sign/action.yml
@@ -0,0 +1,44 @@
+name: linux-code-sign
+description: Sign Linux artifacts with cosign.
+inputs:
+ target:
+ description: Target triple for the artifacts to sign.
+ required: true
+ artifacts-dir:
+ description: Absolute path to the directory containing built binaries to sign.
+ required: true
+
+runs:
+ using: composite
+ steps:
+ - name: Install cosign
+ uses: sigstore/cosign-installer@v3.7.0
+
+ - name: Cosign Linux artifacts
+ shell: bash
+ env:
+ COSIGN_EXPERIMENTAL: "1"
+ COSIGN_YES: "true"
+ COSIGN_OIDC_CLIENT_ID: "sigstore"
+ COSIGN_OIDC_ISSUER: "https://oauth2.sigstore.dev/auth"
+ run: |
+ set -euo pipefail
+
+ dest="${{ inputs.artifacts-dir }}"
+ if [[ ! -d "$dest" ]]; then
+ echo "Destination $dest does not exist"
+ exit 1
+ fi
+
+ for binary in codex codex-responses-api-proxy; do
+ artifact="${dest}/${binary}"
+ if [[ ! -f "$artifact" ]]; then
+ echo "Binary $artifact not found"
+ exit 1
+ fi
+
+ cosign sign-blob \
+ --yes \
+ --bundle "${artifact}.sigstore" \
+ "$artifact"
+ done
diff --git a/.github/actions/macos-code-sign/action.yml b/.github/actions/macos-code-sign/action.yml
new file mode 100644
index 000000000..75b3a2ba2
--- /dev/null
+++ b/.github/actions/macos-code-sign/action.yml
@@ -0,0 +1,246 @@
+name: macos-code-sign
+description: Configure, sign, notarize, and clean up macOS code signing artifacts.
+inputs:
+ target:
+ description: Rust compilation target triple (e.g. aarch64-apple-darwin).
+ required: true
+ sign-binaries:
+ description: Whether to sign and notarize the macOS binaries.
+ required: false
+ default: "true"
+ sign-dmg:
+ description: Whether to sign and notarize the macOS dmg.
+ required: false
+ default: "true"
+ apple-certificate:
+ description: Base64-encoded Apple signing certificate (P12).
+ required: true
+ apple-certificate-password:
+ description: Password for the signing certificate.
+ required: true
+ apple-notarization-key-p8:
+ description: Base64-encoded Apple notarization key (P8).
+ required: true
+ apple-notarization-key-id:
+ description: Apple notarization key ID.
+ required: true
+ apple-notarization-issuer-id:
+ description: Apple notarization issuer ID.
+ required: true
+runs:
+ using: composite
+ steps:
+ - name: Configure Apple code signing
+ shell: bash
+ env:
+ KEYCHAIN_PASSWORD: actions
+ APPLE_CERTIFICATE: ${{ inputs.apple-certificate }}
+ APPLE_CERTIFICATE_PASSWORD: ${{ inputs.apple-certificate-password }}
+ run: |
+ set -euo pipefail
+
+ if [[ -z "${APPLE_CERTIFICATE:-}" ]]; then
+ echo "APPLE_CERTIFICATE is required for macOS signing"
+ exit 1
+ fi
+
+ if [[ -z "${APPLE_CERTIFICATE_PASSWORD:-}" ]]; then
+ echo "APPLE_CERTIFICATE_PASSWORD is required for macOS signing"
+ exit 1
+ fi
+
+ cert_path="${RUNNER_TEMP}/apple_signing_certificate.p12"
+ echo "$APPLE_CERTIFICATE" | base64 -d > "$cert_path"
+
+ keychain_path="${RUNNER_TEMP}/codex-signing.keychain-db"
+ security create-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
+ security set-keychain-settings -lut 21600 "$keychain_path"
+ security unlock-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
+
+ keychain_args=()
+ cleanup_keychain() {
+ if ((${#keychain_args[@]} > 0)); then
+ security list-keychains -s "${keychain_args[@]}" || true
+ security default-keychain -s "${keychain_args[0]}" || true
+ else
+ security list-keychains -s || true
+ fi
+ if [[ -f "$keychain_path" ]]; then
+ security delete-keychain "$keychain_path" || true
+ fi
+ }
+
+ while IFS= read -r keychain; do
+ [[ -n "$keychain" ]] && keychain_args+=("$keychain")
+ done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
+
+ if ((${#keychain_args[@]} > 0)); then
+ security list-keychains -s "$keychain_path" "${keychain_args[@]}"
+ else
+ security list-keychains -s "$keychain_path"
+ fi
+
+ security default-keychain -s "$keychain_path"
+ security import "$cert_path" -k "$keychain_path" -P "$APPLE_CERTIFICATE_PASSWORD" -T /usr/bin/codesign -T /usr/bin/security
+ security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" "$keychain_path" > /dev/null
+
+ codesign_hashes=()
+ while IFS= read -r hash; do
+ [[ -n "$hash" ]] && codesign_hashes+=("$hash")
+ done < <(security find-identity -v -p codesigning "$keychain_path" \
+ | sed -n 's/.*\([0-9A-F]\{40\}\).*/\1/p' \
+ | sort -u)
+
+ if ((${#codesign_hashes[@]} == 0)); then
+ echo "No signing identities found in $keychain_path"
+ cleanup_keychain
+ rm -f "$cert_path"
+ exit 1
+ fi
+
+ if ((${#codesign_hashes[@]} > 1)); then
+ echo "Multiple signing identities found in $keychain_path:"
+ printf ' %s\n' "${codesign_hashes[@]}"
+ cleanup_keychain
+ rm -f "$cert_path"
+ exit 1
+ fi
+
+ APPLE_CODESIGN_IDENTITY="${codesign_hashes[0]}"
+
+ rm -f "$cert_path"
+
+ echo "APPLE_CODESIGN_IDENTITY=$APPLE_CODESIGN_IDENTITY" >> "$GITHUB_ENV"
+ echo "APPLE_CODESIGN_KEYCHAIN=$keychain_path" >> "$GITHUB_ENV"
+ echo "::add-mask::$APPLE_CODESIGN_IDENTITY"
+
+ - name: Sign macOS binaries
+ if: ${{ inputs.sign-binaries == 'true' }}
+ shell: bash
+ run: |
+ set -euo pipefail
+
+ if [[ -z "${APPLE_CODESIGN_IDENTITY:-}" ]]; then
+ echo "APPLE_CODESIGN_IDENTITY is required for macOS signing"
+ exit 1
+ fi
+
+ keychain_args=()
+ if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" && -f "${APPLE_CODESIGN_KEYCHAIN}" ]]; then
+ keychain_args+=(--keychain "${APPLE_CODESIGN_KEYCHAIN}")
+ fi
+
+ for binary in codex codex-responses-api-proxy; do
+ path="codex-rs/target/${{ inputs.target }}/release/${binary}"
+ codesign --force --options runtime --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$path"
+ done
+
+ - name: Notarize macOS binaries
+ if: ${{ inputs.sign-binaries == 'true' }}
+ shell: bash
+ env:
+ APPLE_NOTARIZATION_KEY_P8: ${{ inputs.apple-notarization-key-p8 }}
+ APPLE_NOTARIZATION_KEY_ID: ${{ inputs.apple-notarization-key-id }}
+ APPLE_NOTARIZATION_ISSUER_ID: ${{ inputs.apple-notarization-issuer-id }}
+ run: |
+ set -euo pipefail
+
+ for var in APPLE_NOTARIZATION_KEY_P8 APPLE_NOTARIZATION_KEY_ID APPLE_NOTARIZATION_ISSUER_ID; do
+ if [[ -z "${!var:-}" ]]; then
+ echo "$var is required for notarization"
+ exit 1
+ fi
+ done
+
+ notary_key_path="${RUNNER_TEMP}/notarytool.key.p8"
+ echo "$APPLE_NOTARIZATION_KEY_P8" | base64 -d > "$notary_key_path"
+ cleanup_notary() {
+ rm -f "$notary_key_path"
+ }
+ trap cleanup_notary EXIT
+
+ source "$GITHUB_ACTION_PATH/notary_helpers.sh"
+
+ notarize_binary() {
+ local binary="$1"
+ local source_path="codex-rs/target/${{ inputs.target }}/release/${binary}"
+ local archive_path="${RUNNER_TEMP}/${binary}.zip"
+
+ if [[ ! -f "$source_path" ]]; then
+ echo "Binary $source_path not found"
+ exit 1
+ fi
+
+ rm -f "$archive_path"
+ ditto -c -k --keepParent "$source_path" "$archive_path"
+
+ notarize_submission "$binary" "$archive_path" "$notary_key_path"
+ }
+
+ notarize_binary "codex"
+ notarize_binary "codex-responses-api-proxy"
+
+ - name: Sign and notarize macOS dmg
+ if: ${{ inputs.sign-dmg == 'true' }}
+ shell: bash
+ env:
+ APPLE_NOTARIZATION_KEY_P8: ${{ inputs.apple-notarization-key-p8 }}
+ APPLE_NOTARIZATION_KEY_ID: ${{ inputs.apple-notarization-key-id }}
+ APPLE_NOTARIZATION_ISSUER_ID: ${{ inputs.apple-notarization-issuer-id }}
+ run: |
+ set -euo pipefail
+
+ for var in APPLE_CODESIGN_IDENTITY APPLE_NOTARIZATION_KEY_P8 APPLE_NOTARIZATION_KEY_ID APPLE_NOTARIZATION_ISSUER_ID; do
+ if [[ -z "${!var:-}" ]]; then
+ echo "$var is required"
+ exit 1
+ fi
+ done
+
+ notary_key_path="${RUNNER_TEMP}/notarytool.key.p8"
+ echo "$APPLE_NOTARIZATION_KEY_P8" | base64 -d > "$notary_key_path"
+ cleanup_notary() {
+ rm -f "$notary_key_path"
+ }
+ trap cleanup_notary EXIT
+
+ source "$GITHUB_ACTION_PATH/notary_helpers.sh"
+
+ dmg_path="codex-rs/target/${{ inputs.target }}/release/codex-${{ inputs.target }}.dmg"
+
+ if [[ ! -f "$dmg_path" ]]; then
+ echo "dmg $dmg_path not found"
+ exit 1
+ fi
+
+ keychain_args=()
+ if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" && -f "${APPLE_CODESIGN_KEYCHAIN}" ]]; then
+ keychain_args+=(--keychain "${APPLE_CODESIGN_KEYCHAIN}")
+ fi
+
+ codesign --force --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$dmg_path"
+ notarize_submission "codex-${{ inputs.target }}.dmg" "$dmg_path" "$notary_key_path"
+ xcrun stapler staple "$dmg_path"
+
+ - name: Remove signing keychain
+ if: ${{ always() }}
+ shell: bash
+ env:
+ APPLE_CODESIGN_KEYCHAIN: ${{ env.APPLE_CODESIGN_KEYCHAIN }}
+ run: |
+ set -euo pipefail
+ if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" ]]; then
+ keychain_args=()
+ while IFS= read -r keychain; do
+ [[ "$keychain" == "$APPLE_CODESIGN_KEYCHAIN" ]] && continue
+ [[ -n "$keychain" ]] && keychain_args+=("$keychain")
+ done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
+ if ((${#keychain_args[@]} > 0)); then
+ security list-keychains -s "${keychain_args[@]}"
+ security default-keychain -s "${keychain_args[0]}"
+ fi
+
+ if [[ -f "$APPLE_CODESIGN_KEYCHAIN" ]]; then
+ security delete-keychain "$APPLE_CODESIGN_KEYCHAIN"
+ fi
+ fi
diff --git a/.github/actions/macos-code-sign/notary_helpers.sh b/.github/actions/macos-code-sign/notary_helpers.sh
new file mode 100644
index 000000000..ad9757fe3
--- /dev/null
+++ b/.github/actions/macos-code-sign/notary_helpers.sh
@@ -0,0 +1,46 @@
+#!/usr/bin/env bash
+
+notarize_submission() {
+ local label="$1"
+ local path="$2"
+ local notary_key_path="$3"
+
+ if [[ -z "${APPLE_NOTARIZATION_KEY_ID:-}" || -z "${APPLE_NOTARIZATION_ISSUER_ID:-}" ]]; then
+ echo "APPLE_NOTARIZATION_KEY_ID and APPLE_NOTARIZATION_ISSUER_ID are required for notarization"
+ exit 1
+ fi
+
+ if [[ -z "$notary_key_path" || ! -f "$notary_key_path" ]]; then
+ echo "Notary key file $notary_key_path not found"
+ exit 1
+ fi
+
+ if [[ ! -f "$path" ]]; then
+ echo "Notarization payload $path not found"
+ exit 1
+ fi
+
+ local submission_json
+ submission_json=$(xcrun notarytool submit "$path" \
+ --key "$notary_key_path" \
+ --key-id "$APPLE_NOTARIZATION_KEY_ID" \
+ --issuer "$APPLE_NOTARIZATION_ISSUER_ID" \
+ --output-format json \
+ --wait)
+
+ local status submission_id
+ status=$(printf '%s\n' "$submission_json" | jq -r '.status // "Unknown"')
+ submission_id=$(printf '%s\n' "$submission_json" | jq -r '.id // ""')
+
+ if [[ -z "$submission_id" ]]; then
+ echo "Failed to retrieve submission ID for $label"
+ exit 1
+ fi
+
+ echo "::notice title=Notarization::$label submission ${submission_id} completed with status ${status}"
+
+ if [[ "$status" != "Accepted" ]]; then
+ echo "Notarization failed for ${label} (submission ${submission_id}, status ${status})"
+ exit 1
+ fi
+}
diff --git a/.github/actions/windows-code-sign/action.yml b/.github/actions/windows-code-sign/action.yml
new file mode 100644
index 000000000..f6cf73791
--- /dev/null
+++ b/.github/actions/windows-code-sign/action.yml
@@ -0,0 +1,57 @@
+name: windows-code-sign
+description: Sign Windows binaries with Azure Trusted Signing.
+inputs:
+ target:
+ description: Target triple for the artifacts to sign.
+ required: true
+ client-id:
+ description: Azure Trusted Signing client ID.
+ required: true
+ tenant-id:
+ description: Azure tenant ID for Trusted Signing.
+ required: true
+ subscription-id:
+ description: Azure subscription ID for Trusted Signing.
+ required: true
+ endpoint:
+ description: Azure Trusted Signing endpoint.
+ required: true
+ account-name:
+ description: Azure Trusted Signing account name.
+ required: true
+ certificate-profile-name:
+ description: Certificate profile name for signing.
+ required: true
+
+runs:
+ using: composite
+ steps:
+ - name: Azure login for Trusted Signing (OIDC)
+ uses: azure/login@v2
+ with:
+ client-id: ${{ inputs.client-id }}
+ tenant-id: ${{ inputs.tenant-id }}
+ subscription-id: ${{ inputs.subscription-id }}
+
+ - name: Sign Windows binaries with Azure Trusted Signing
+ uses: azure/trusted-signing-action@v0
+ with:
+ endpoint: ${{ inputs.endpoint }}
+ trusted-signing-account-name: ${{ inputs.account-name }}
+ certificate-profile-name: ${{ inputs.certificate-profile-name }}
+ exclude-environment-credential: true
+ exclude-workload-identity-credential: true
+ exclude-managed-identity-credential: true
+ exclude-shared-token-cache-credential: true
+ exclude-visual-studio-credential: true
+ exclude-visual-studio-code-credential: true
+ exclude-azure-cli-credential: false
+ exclude-azure-powershell-credential: true
+ exclude-azure-developer-cli-credential: true
+ exclude-interactive-browser-credential: true
+ cache-dependencies: false
+ files: |
+ ${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex.exe
+ ${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex-responses-api-proxy.exe
+ ${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex-windows-sandbox-setup.exe
+ ${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex-command-runner.exe
diff --git a/.github/codex-cli-login.png b/.github/codex-cli-login.png
deleted file mode 100644
index 0d4543ee1..000000000
Binary files a/.github/codex-cli-login.png and /dev/null differ
diff --git a/.github/codex-cli-permissions.png b/.github/codex-cli-permissions.png
deleted file mode 100644
index bb48e4a53..000000000
Binary files a/.github/codex-cli-permissions.png and /dev/null differ
diff --git a/.github/codex-cli-splash.png b/.github/codex-cli-splash.png
index 06e625ca4..d0f50e55b 100644
Binary files a/.github/codex-cli-splash.png and b/.github/codex-cli-splash.png differ
diff --git a/.github/demo.gif b/.github/demo.gif
deleted file mode 100644
index 12752744c..000000000
Binary files a/.github/demo.gif and /dev/null differ
diff --git a/.github/dotslash-config.json b/.github/dotslash-config.json
index 5e28cdf20..00e9032cf 100644
--- a/.github/dotslash-config.json
+++ b/.github/dotslash-config.json
@@ -55,6 +55,30 @@
"path": "codex-responses-api-proxy.exe"
}
}
+ },
+ "codex-command-runner": {
+ "platforms": {
+ "windows-x86_64": {
+ "regex": "^codex-command-runner-x86_64-pc-windows-msvc\\.exe\\.zst$",
+ "path": "codex-command-runner.exe"
+ },
+ "windows-aarch64": {
+ "regex": "^codex-command-runner-aarch64-pc-windows-msvc\\.exe\\.zst$",
+ "path": "codex-command-runner.exe"
+ }
+ }
+ },
+ "codex-windows-sandbox-setup": {
+ "platforms": {
+ "windows-x86_64": {
+ "regex": "^codex-windows-sandbox-setup-x86_64-pc-windows-msvc\\.exe\\.zst$",
+ "path": "codex-windows-sandbox-setup.exe"
+ },
+ "windows-aarch64": {
+ "regex": "^codex-windows-sandbox-setup-aarch64-pc-windows-msvc\\.exe\\.zst$",
+ "path": "codex-windows-sandbox-setup.exe"
+ }
+ }
}
}
}
diff --git a/.github/workflows/cargo-deny.yml b/.github/workflows/cargo-deny.yml
new file mode 100644
index 000000000..b32e366fb
--- /dev/null
+++ b/.github/workflows/cargo-deny.yml
@@ -0,0 +1,22 @@
+name: cargo-deny
+
+on: workflow_dispatch
+
+jobs:
+ cargo-deny:
+ runs-on: ubuntu-latest
+ defaults:
+ run:
+ working-directory: ./codex-rs
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v6
+
+ - name: Install Rust toolchain
+ uses: dtolnay/rust-toolchain@stable
+
+ - name: Run cargo-deny
+ uses: EmbarkStudios/cargo-deny-action@v2
+ with:
+ rust-version: stable
+ manifest-path: ./codex-rs/Cargo.toml
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 38773bb9f..1ee3fea52 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,8 +1,6 @@
name: ci
-on:
- pull_request: {}
- push: { branches: [main] }
+on: workflow_dispatch
jobs:
build-test:
@@ -12,7 +10,7 @@ jobs:
NODE_OPTIONS: --max-old-space-size=4096
steps:
- name: Checkout repository
- uses: actions/checkout@v5
+ uses: actions/checkout@v6
- name: Setup pnpm
uses: pnpm/action-setup@v4
@@ -20,7 +18,7 @@ jobs:
run_install: false
- name: Setup Node.js
- uses: actions/setup-node@v5
+ uses: actions/setup-node@v6
with:
node-version: 22
@@ -36,7 +34,8 @@ jobs:
GH_TOKEN: ${{ github.token }}
run: |
set -euo pipefail
- CODEX_VERSION=0.40.0
+ # Use a rust-release version that includes all native binaries.
+ CODEX_VERSION=0.74.0
OUTPUT_DIR="${RUNNER_TEMP}"
python3 ./scripts/stage_npm_packages.py \
--release-version "$CODEX_VERSION" \
@@ -46,7 +45,7 @@ jobs:
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
- name: Upload staged npm package artifact
- uses: actions/upload-artifact@v5
+ uses: actions/upload-artifact@v6
with:
name: codex-npm-staging
path: ${{ steps.stage_npm_package.outputs.pack_output }}
diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml
index 17d54f214..248d38d40 100644
--- a/.github/workflows/cla.yml
+++ b/.github/workflows/cla.yml
@@ -1,9 +1,5 @@
name: CLA Assistant
-on:
- issue_comment:
- types: [created]
- pull_request_target:
- types: [opened, closed, synchronize]
+on: workflow_dispatch
permissions:
actions: write
@@ -46,6 +42,4 @@ jobs:
path-to-document: https://github.com/openai/codex/blob/main/docs/CLA.md
path-to-signatures: signatures/cla.json
branch: cla-signatures
- allowlist: |
- codex
- dependabot[bot]
+ allowlist: codex,dependabot,dependabot[bot],github-actions[bot]
diff --git a/.github/workflows/close-stale-contributor-prs.yml b/.github/workflows/close-stale-contributor-prs.yml
index e01bc3881..28c7dbc5b 100644
--- a/.github/workflows/close-stale-contributor-prs.yml
+++ b/.github/workflows/close-stale-contributor-prs.yml
@@ -1,9 +1,6 @@
name: Close stale contributor PRs
-on:
- workflow_dispatch:
- schedule:
- - cron: "0 6 * * *"
+on: workflow_dispatch
permissions:
contents: read
@@ -12,6 +9,8 @@ permissions:
jobs:
close-stale-contributor-prs:
+ # Prevent scheduled runs on forks
+ if: github.repository == 'openai/codex'
runs-on: ubuntu-latest
steps:
- name: Close inactive PRs from contributors
diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml
index c03658132..9374c8e53 100644
--- a/.github/workflows/codespell.yml
+++ b/.github/workflows/codespell.yml
@@ -2,11 +2,7 @@
---
name: Codespell
-on:
- push:
- branches: [main]
- pull_request:
- branches: [main]
+on: workflow_dispatch
permissions:
contents: read
@@ -18,7 +14,7 @@ jobs:
steps:
- name: Checkout
- uses: actions/checkout@v5
+ uses: actions/checkout@v6
- name: Annotate locations with typos
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
- name: Codespell
diff --git a/.github/workflows/issue-deduplicator.yml b/.github/workflows/issue-deduplicator.yml
index 579b6a368..ae9154123 100644
--- a/.github/workflows/issue-deduplicator.yml
+++ b/.github/workflows/issue-deduplicator.yml
@@ -1,22 +1,19 @@
name: Issue Deduplicator
-on:
- issues:
- types:
- - opened
- - labeled
+on: workflow_dispatch
jobs:
gather-duplicates:
name: Identify potential duplicates
- if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate') }}
+ # Prevent runs on forks (requires OpenAI API key, wastes Actions minutes)
+ if: github.repository == 'openai/codex' && (github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate'))
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
codex_output: ${{ steps.codex.outputs.final-message }}
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@v6
- name: Prepare Codex inputs
env:
@@ -46,7 +43,6 @@ jobs:
with:
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
allow-users: "*"
- model: gpt-5.1
prompt: |
You are an assistant that triages new GitHub issues by identifying potential duplicates.
diff --git a/.github/workflows/issue-labeler.yml b/.github/workflows/issue-labeler.yml
index 39f9d47f1..25b22fd24 100644
--- a/.github/workflows/issue-labeler.yml
+++ b/.github/workflows/issue-labeler.yml
@@ -1,22 +1,19 @@
name: Issue Labeler
-on:
- issues:
- types:
- - opened
- - labeled
+on: workflow_dispatch
jobs:
gather-labels:
name: Generate label suggestions
- if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label') }}
+ # Prevent runs on forks (requires OpenAI API key, wastes Actions minutes)
+ if: github.repository == 'openai/codex' && (github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label'))
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
codex_output: ${{ steps.codex.outputs.final-message }}
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@v6
- id: codex
uses: openai/codex-action@main
diff --git a/.github/workflows/rust-ci.yml b/.github/workflows/rust-ci.yml
index 0bd91ca53..1af0bf2f4 100644
--- a/.github/workflows/rust-ci.yml
+++ b/.github/workflows/rust-ci.yml
@@ -17,7 +17,7 @@ jobs:
codex: ${{ steps.detect.outputs.codex }}
workflows: ${{ steps.detect.outputs.workflows }}
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Detect changed paths (no external action)
@@ -28,9 +28,11 @@ jobs:
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
BASE_SHA='${{ github.event.pull_request.base.sha }}'
+ HEAD_SHA='${{ github.event.pull_request.head.sha }}'
echo "Base SHA: $BASE_SHA"
- # List files changed between base and current HEAD (merge-base aware)
- mapfile -t files < <(git diff --name-only --no-renames "$BASE_SHA"...HEAD)
+ echo "Head SHA: $HEAD_SHA"
+ # List files changed between base and PR head
+ mapfile -t files < <(git diff --name-only --no-renames "$BASE_SHA" "$HEAD_SHA")
else
# On push / manual runs, default to running everything
files=("codex-rs/force" ".github/force")
@@ -56,7 +58,7 @@ jobs:
run:
working-directory: codex-rs
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@v6
- uses: dtolnay/rust-toolchain@1.90
with:
components: rustfmt
@@ -74,7 +76,7 @@ jobs:
run:
working-directory: codex-rs
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@v6
- uses: dtolnay/rust-toolchain@1.90
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
@@ -147,24 +149,33 @@ jobs:
profile: release
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@v6
- uses: dtolnay/rust-toolchain@1.90
with:
targets: ${{ matrix.target }}
components: clippy
+ - name: Compute lockfile hash
+ id: lockhash
+ working-directory: codex-rs
+ shell: bash
+ run: |
+ set -euo pipefail
+ echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
+ echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
+
# Explicit cache restore: split cargo home vs target, so we can
# avoid caching the large target dir on the gnu-dev job.
- name: Restore cargo home cache
id: cache_cargo_home_restore
- uses: actions/cache/restore@v4
+ uses: actions/cache/restore@v5
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
- key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
+ key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
restore-keys: |
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
@@ -198,12 +209,12 @@ jobs:
- name: Restore sccache cache (fallback)
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
id: cache_sccache_restore
- uses: actions/cache/restore@v4
+ uses: actions/cache/restore@v5
with:
path: ${{ github.workspace }}/.sccache/
- key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
+ key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
restore-keys: |
- sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
+ sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
@@ -217,7 +228,7 @@ jobs:
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
name: Restore APT cache (musl)
id: cache_apt_restore
- uses: actions/cache/restore@v4
+ uses: actions/cache/restore@v5
with:
path: |
/var/cache/apt
@@ -271,22 +282,22 @@ jobs:
- name: Save cargo home cache
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
continue-on-error: true
- uses: actions/cache/save@v4
+ uses: actions/cache/save@v5
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
- key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
+ key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
- name: Save sccache cache (fallback)
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
continue-on-error: true
- uses: actions/cache/save@v4
+ uses: actions/cache/save@v5
with:
path: ${{ github.workspace }}/.sccache/
- key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
+ key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
- name: sccache stats
if: always() && env.USE_SCCACHE == 'true'
@@ -308,7 +319,7 @@ jobs:
- name: Save APT cache (musl)
if: always() && !cancelled() && (matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl') && steps.cache_apt_restore.outputs.cache-hit != 'true'
continue-on-error: true
- uses: actions/cache/save@v4
+ uses: actions/cache/save@v5
with:
path: |
/var/cache/apt
@@ -359,21 +370,51 @@ jobs:
profile: dev
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@v6
+
+ # We have been running out of space when running this job on Linux for
+ # x86_64-unknown-linux-gnu, so remove some unnecessary dependencies.
+ - name: Remove unnecessary dependencies to save space
+ if: ${{ startsWith(matrix.runner, 'ubuntu') }}
+ shell: bash
+ run: |
+ set -euo pipefail
+ sudo rm -rf \
+ /usr/local/lib/android \
+ /usr/share/dotnet \
+ /usr/local/share/boost \
+ /usr/local/lib/node_modules \
+ /opt/ghc
+ sudo apt-get remove -y docker.io docker-compose podman buildah
+
+ # Some integration tests rely on DotSlash being installed.
+ # See https://github.com/openai/codex/pull/7617.
+ - name: Install DotSlash
+ uses: facebook/install-dotslash@v2
+
- uses: dtolnay/rust-toolchain@1.90
with:
targets: ${{ matrix.target }}
+ - name: Compute lockfile hash
+ id: lockhash
+ working-directory: codex-rs
+ shell: bash
+ run: |
+ set -euo pipefail
+ echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
+ echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
+
- name: Restore cargo home cache
id: cache_cargo_home_restore
- uses: actions/cache/restore@v4
+ uses: actions/cache/restore@v5
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
- key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
+ key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
restore-keys: |
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
@@ -406,12 +447,12 @@ jobs:
- name: Restore sccache cache (fallback)
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
id: cache_sccache_restore
- uses: actions/cache/restore@v4
+ uses: actions/cache/restore@v5
with:
path: ${{ github.workspace }}/.sccache/
- key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
+ key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
restore-keys: |
- sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
+ sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
@@ -429,22 +470,22 @@ jobs:
- name: Save cargo home cache
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
continue-on-error: true
- uses: actions/cache/save@v4
+ uses: actions/cache/save@v5
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
- key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
+ key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
- name: Save sccache cache (fallback)
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
continue-on-error: true
- uses: actions/cache/save@v4
+ uses: actions/cache/save@v5
with:
path: ${{ github.workspace }}/.sccache/
- key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
+ key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
- name: sccache stats
if: always() && env.USE_SCCACHE == 'true'
diff --git a/.github/workflows/rust-release-prepare.yml b/.github/workflows/rust-release-prepare.yml
new file mode 100644
index 000000000..f4c6970e4
--- /dev/null
+++ b/.github/workflows/rust-release-prepare.yml
@@ -0,0 +1,50 @@
+name: rust-release-prepare
+on: workflow_dispatch
+
+concurrency:
+ group: ${{ github.workflow }}
+ cancel-in-progress: false
+
+permissions:
+ contents: write
+ pull-requests: write
+
+jobs:
+ prepare:
+ # Prevent scheduled runs on forks (no secrets, wastes Actions minutes)
+ if: github.repository == 'openai/codex'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v6
+ with:
+ ref: main
+ fetch-depth: 0
+
+ - name: Update models.json
+ env:
+ OPENAI_API_KEY: ${{ secrets.CODEX_OPENAI_API_KEY }}
+ run: |
+ set -euo pipefail
+
+ client_version="99.99.99"
+ terminal_info="github-actions"
+ user_agent="codex_cli_rs/99.99.99 (Linux $(uname -r); $(uname -m)) ${terminal_info}"
+ base_url="${OPENAI_BASE_URL:-https://chatgpt.com/backend-api/codex}"
+
+ headers=(
+ -H "Authorization: Bearer ${OPENAI_API_KEY}"
+ -H "User-Agent: ${user_agent}"
+ )
+
+ url="${base_url%/}/models?client_version=${client_version}"
+ curl --http1.1 --fail --show-error --location "${headers[@]}" "${url}" | jq '.' > codex-rs/core/models.json
+
+ - name: Open pull request (if changed)
+ uses: peter-evans/create-pull-request@v8
+ with:
+ commit-message: "Update models.json"
+ title: "Update models.json"
+ body: "Automated update of models.json."
+ branch: "bot/update-models-json"
+ reviewers: "pakrym-oai,aibrahim-oai"
+ delete-branch: true
diff --git a/.github/workflows/rust-release.yml b/.github/workflows/rust-release.yml
index 6f27fbf54..6269188ed 100644
--- a/.github/workflows/rust-release.yml
+++ b/.github/workflows/rust-release.yml
@@ -6,10 +6,7 @@
# ```
name: rust-release
-on:
- push:
- tags:
- - "rust-v*.*.*"
+on: workflow_dispatch
concurrency:
group: ${{ github.workflow }}
@@ -19,7 +16,7 @@ jobs:
tag-check:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@v6
- name: Validate tag matches Cargo.toml version
shell: bash
@@ -50,6 +47,9 @@ jobs:
name: Build - ${{ matrix.runner }} - ${{ matrix.target }}
runs-on: ${{ matrix.runner }}
timeout-minutes: 30
+ permissions:
+ contents: read
+ id-token: write
defaults:
run:
working-directory: codex-rs
@@ -76,12 +76,12 @@ jobs:
target: aarch64-pc-windows-msvc
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@v6
- uses: dtolnay/rust-toolchain@1.90
with:
targets: ${{ matrix.target }}
- - uses: actions/cache@v4
+ - uses: actions/cache@v5
with:
path: |
~/.cargo/bin/
@@ -98,176 +98,104 @@ jobs:
sudo apt-get install -y musl-tools pkg-config
- name: Cargo build
- run: cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
-
- - if: ${{ matrix.runner == 'macos-15-xlarge' }}
- name: Configure Apple code signing
shell: bash
- env:
- KEYCHAIN_PASSWORD: actions
- APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE_P12 }}
- APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
run: |
- set -euo pipefail
-
- if [[ -z "${APPLE_CERTIFICATE:-}" ]]; then
- echo "APPLE_CERTIFICATE is required for macOS signing"
- exit 1
+ if [[ "${{ contains(matrix.target, 'windows') }}" == 'true' ]]; then
+ cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy --bin codex-windows-sandbox-setup --bin codex-command-runner
+ else
+ cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
fi
- if [[ -z "${APPLE_CERTIFICATE_PASSWORD:-}" ]]; then
- echo "APPLE_CERTIFICATE_PASSWORD is required for macOS signing"
- exit 1
- fi
+ - if: ${{ contains(matrix.target, 'linux') }}
+ name: Cosign Linux artifacts
+ uses: ./.github/actions/linux-code-sign
+ with:
+ target: ${{ matrix.target }}
+ artifacts-dir: ${{ github.workspace }}/codex-rs/target/${{ matrix.target }}/release
- cert_path="${RUNNER_TEMP}/apple_signing_certificate.p12"
- echo "$APPLE_CERTIFICATE" | base64 -d > "$cert_path"
-
- keychain_path="${RUNNER_TEMP}/codex-signing.keychain-db"
- security create-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
- security set-keychain-settings -lut 21600 "$keychain_path"
- security unlock-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
-
- keychain_args=()
- cleanup_keychain() {
- if ((${#keychain_args[@]} > 0)); then
- security list-keychains -s "${keychain_args[@]}" || true
- security default-keychain -s "${keychain_args[0]}" || true
- else
- security list-keychains -s || true
- fi
- if [[ -f "$keychain_path" ]]; then
- security delete-keychain "$keychain_path" || true
- fi
- }
+ - if: ${{ contains(matrix.target, 'windows') }}
+ name: Sign Windows binaries with Azure Trusted Signing
+ uses: ./.github/actions/windows-code-sign
+ with:
+ target: ${{ matrix.target }}
+ client-id: ${{ secrets.AZURE_TRUSTED_SIGNING_CLIENT_ID }}
+ tenant-id: ${{ secrets.AZURE_TRUSTED_SIGNING_TENANT_ID }}
+ subscription-id: ${{ secrets.AZURE_TRUSTED_SIGNING_SUBSCRIPTION_ID }}
+ endpoint: ${{ secrets.AZURE_TRUSTED_SIGNING_ENDPOINT }}
+ account-name: ${{ secrets.AZURE_TRUSTED_SIGNING_ACCOUNT_NAME }}
+ certificate-profile-name: ${{ secrets.AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE_NAME }}
+
+ - if: ${{ runner.os == 'macOS' }}
+ name: MacOS code signing (binaries)
+ uses: ./.github/actions/macos-code-sign
+ with:
+ target: ${{ matrix.target }}
+ sign-binaries: "true"
+ sign-dmg: "false"
+ apple-certificate: ${{ secrets.APPLE_CERTIFICATE_P12 }}
+ apple-certificate-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
+ apple-notarization-key-p8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }}
+ apple-notarization-key-id: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
+ apple-notarization-issuer-id: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
+
+ - if: ${{ runner.os == 'macOS' }}
+ name: Build macOS dmg
+ shell: bash
+ run: |
+ set -euo pipefail
- while IFS= read -r keychain; do
- [[ -n "$keychain" ]] && keychain_args+=("$keychain")
- done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
+ target="${{ matrix.target }}"
+ release_dir="target/${target}/release"
+ dmg_root="${RUNNER_TEMP}/codex-dmg-root"
+ volname="Codex (${target})"
+ dmg_path="${release_dir}/codex-${target}.dmg"
- if ((${#keychain_args[@]} > 0)); then
- security list-keychains -s "$keychain_path" "${keychain_args[@]}"
- else
- security list-keychains -s "$keychain_path"
- fi
+ # The previous "MacOS code signing (binaries)" step signs + notarizes the
+ # built artifacts in `${release_dir}`. This step packages *those same*
+ # signed binaries into a dmg.
+ codex_binary_path="${release_dir}/codex"
+ proxy_binary_path="${release_dir}/codex-responses-api-proxy"
- security default-keychain -s "$keychain_path"
- security import "$cert_path" -k "$keychain_path" -P "$APPLE_CERTIFICATE_PASSWORD" -T /usr/bin/codesign -T /usr/bin/security
- security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" "$keychain_path" > /dev/null
-
- codesign_hashes=()
- while IFS= read -r hash; do
- [[ -n "$hash" ]] && codesign_hashes+=("$hash")
- done < <(security find-identity -v -p codesigning "$keychain_path" \
- | sed -n 's/.*\([0-9A-F]\{40\}\).*/\1/p' \
- | sort -u)
-
- if ((${#codesign_hashes[@]} == 0)); then
- echo "No signing identities found in $keychain_path"
- cleanup_keychain
- rm -f "$cert_path"
+ rm -rf "$dmg_root"
+ mkdir -p "$dmg_root"
+
+ if [[ ! -f "$codex_binary_path" ]]; then
+ echo "Binary $codex_binary_path not found"
exit 1
fi
-
- if ((${#codesign_hashes[@]} > 1)); then
- echo "Multiple signing identities found in $keychain_path:"
- printf ' %s\n' "${codesign_hashes[@]}"
- cleanup_keychain
- rm -f "$cert_path"
+ if [[ ! -f "$proxy_binary_path" ]]; then
+ echo "Binary $proxy_binary_path not found"
exit 1
fi
- APPLE_CODESIGN_IDENTITY="${codesign_hashes[0]}"
-
- rm -f "$cert_path"
+ ditto "$codex_binary_path" "${dmg_root}/codex"
+ ditto "$proxy_binary_path" "${dmg_root}/codex-responses-api-proxy"
- echo "APPLE_CODESIGN_IDENTITY=$APPLE_CODESIGN_IDENTITY" >> "$GITHUB_ENV"
- echo "APPLE_CODESIGN_KEYCHAIN=$keychain_path" >> "$GITHUB_ENV"
- echo "::add-mask::$APPLE_CODESIGN_IDENTITY"
-
- - if: ${{ matrix.runner == 'macos-15-xlarge' }}
- name: Sign macOS binaries
- shell: bash
- run: |
- set -euo pipefail
+ rm -f "$dmg_path"
+ hdiutil create \
+ -volname "$volname" \
+ -srcfolder "$dmg_root" \
+ -format UDZO \
+ -ov \
+ "$dmg_path"
- if [[ -z "${APPLE_CODESIGN_IDENTITY:-}" ]]; then
- echo "APPLE_CODESIGN_IDENTITY is required for macOS signing"
+ if [[ ! -f "$dmg_path" ]]; then
+ echo "dmg $dmg_path not found after build"
exit 1
fi
- keychain_args=()
- if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" && -f "${APPLE_CODESIGN_KEYCHAIN}" ]]; then
- keychain_args+=(--keychain "${APPLE_CODESIGN_KEYCHAIN}")
- fi
-
- for binary in codex codex-responses-api-proxy; do
- path="target/${{ matrix.target }}/release/${binary}"
- codesign --force --options runtime --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$path"
- done
-
- - if: ${{ matrix.runner == 'macos-15-xlarge' }}
- name: Notarize macOS binaries
- shell: bash
- env:
- APPLE_NOTARIZATION_KEY_P8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }}
- APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
- APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
- run: |
- set -euo pipefail
-
- for var in APPLE_NOTARIZATION_KEY_P8 APPLE_NOTARIZATION_KEY_ID APPLE_NOTARIZATION_ISSUER_ID; do
- if [[ -z "${!var:-}" ]]; then
- echo "$var is required for notarization"
- exit 1
- fi
- done
-
- notary_key_path="${RUNNER_TEMP}/notarytool.key.p8"
- echo "$APPLE_NOTARIZATION_KEY_P8" | base64 -d > "$notary_key_path"
- cleanup_notary() {
- rm -f "$notary_key_path"
- }
- trap cleanup_notary EXIT
-
- notarize_binary() {
- local binary="$1"
- local source_path="target/${{ matrix.target }}/release/${binary}"
- local archive_path="${RUNNER_TEMP}/${binary}.zip"
-
- if [[ ! -f "$source_path" ]]; then
- echo "Binary $source_path not found"
- exit 1
- fi
-
- rm -f "$archive_path"
- ditto -c -k --keepParent "$source_path" "$archive_path"
-
- submission_json=$(xcrun notarytool submit "$archive_path" \
- --key "$notary_key_path" \
- --key-id "$APPLE_NOTARIZATION_KEY_ID" \
- --issuer "$APPLE_NOTARIZATION_ISSUER_ID" \
- --output-format json \
- --wait)
-
- status=$(printf '%s\n' "$submission_json" | jq -r '.status // "Unknown"')
- submission_id=$(printf '%s\n' "$submission_json" | jq -r '.id // ""')
-
- if [[ -z "$submission_id" ]]; then
- echo "Failed to retrieve submission ID for $binary"
- exit 1
- fi
-
- echo "::notice title=Notarization::$binary submission ${submission_id} completed with status ${status}"
-
- if [[ "$status" != "Accepted" ]]; then
- echo "Notarization failed for ${binary} (submission ${submission_id}, status ${status})"
- exit 1
- fi
- }
-
- notarize_binary "codex"
- notarize_binary "codex-responses-api-proxy"
+ - if: ${{ runner.os == 'macOS' }}
+ name: MacOS code signing (dmg)
+ uses: ./.github/actions/macos-code-sign
+ with:
+ target: ${{ matrix.target }}
+ sign-binaries: "false"
+ sign-dmg: "true"
+ apple-certificate: ${{ secrets.APPLE_CERTIFICATE_P12 }}
+ apple-certificate-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
+ apple-notarization-key-p8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }}
+ apple-notarization-key-id: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
+ apple-notarization-issuer-id: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
- name: Stage artifacts
shell: bash
@@ -278,11 +206,22 @@ jobs:
if [[ "${{ matrix.runner }}" == windows* ]]; then
cp target/${{ matrix.target }}/release/codex.exe "$dest/codex-${{ matrix.target }}.exe"
cp target/${{ matrix.target }}/release/codex-responses-api-proxy.exe "$dest/codex-responses-api-proxy-${{ matrix.target }}.exe"
+ cp target/${{ matrix.target }}/release/codex-windows-sandbox-setup.exe "$dest/codex-windows-sandbox-setup-${{ matrix.target }}.exe"
+ cp target/${{ matrix.target }}/release/codex-command-runner.exe "$dest/codex-command-runner-${{ matrix.target }}.exe"
else
cp target/${{ matrix.target }}/release/codex "$dest/codex-${{ matrix.target }}"
cp target/${{ matrix.target }}/release/codex-responses-api-proxy "$dest/codex-responses-api-proxy-${{ matrix.target }}"
fi
+ if [[ "${{ matrix.target }}" == *linux* ]]; then
+ cp target/${{ matrix.target }}/release/codex.sigstore "$dest/codex-${{ matrix.target }}.sigstore"
+ cp target/${{ matrix.target }}/release/codex-responses-api-proxy.sigstore "$dest/codex-responses-api-proxy-${{ matrix.target }}.sigstore"
+ fi
+
+ if [[ "${{ matrix.target }}" == *apple-darwin ]]; then
+ cp target/${{ matrix.target }}/release/codex-${{ matrix.target }}.dmg "$dest/codex-${{ matrix.target }}.dmg"
+ fi
+
- if: ${{ matrix.runner == 'windows-11-arm' }}
name: Install zstd
shell: powershell
@@ -317,7 +256,12 @@ jobs:
base="$(basename "$f")"
# Skip files that are already archives (shouldn't happen, but be
# safe).
- if [[ "$base" == *.tar.gz || "$base" == *.zip ]]; then
+ if [[ "$base" == *.tar.gz || "$base" == *.zip || "$base" == *.dmg ]]; then
+ continue
+ fi
+
+ # Don't try to compress signature bundles.
+ if [[ "$base" == *.sigstore ]]; then
continue
fi
@@ -340,30 +284,7 @@ jobs:
zstd "${zstd_args[@]}" "$dest/$base"
done
- - name: Remove signing keychain
- if: ${{ always() && matrix.runner == 'macos-15-xlarge' }}
- shell: bash
- env:
- APPLE_CODESIGN_KEYCHAIN: ${{ env.APPLE_CODESIGN_KEYCHAIN }}
- run: |
- set -euo pipefail
- if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" ]]; then
- keychain_args=()
- while IFS= read -r keychain; do
- [[ "$keychain" == "$APPLE_CODESIGN_KEYCHAIN" ]] && continue
- [[ -n "$keychain" ]] && keychain_args+=("$keychain")
- done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
- if ((${#keychain_args[@]} > 0)); then
- security list-keychains -s "${keychain_args[@]}"
- security default-keychain -s "${keychain_args[0]}"
- fi
-
- if [[ -f "$APPLE_CODESIGN_KEYCHAIN" ]]; then
- security delete-keychain "$APPLE_CODESIGN_KEYCHAIN"
- fi
- fi
-
- - uses: actions/upload-artifact@v5
+ - uses: actions/upload-artifact@v6
with:
name: ${{ matrix.target }}
# Upload the per-binary .zst files as well as the new .tar.gz
@@ -371,8 +292,19 @@ jobs:
path: |
codex-rs/dist/${{ matrix.target }}/*
+ shell-tool-mcp:
+ name: shell-tool-mcp
+ needs: tag-check
+ uses: ./.github/workflows/shell-tool-mcp.yml
+ with:
+ release-tag: ${{ github.ref_name }}
+ publish: true
+ secrets: inherit
+
release:
- needs: build
+ needs:
+ - build
+ - shell-tool-mcp
name: release
runs-on: ubuntu-latest
permissions:
@@ -386,15 +318,43 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v5
+ uses: actions/checkout@v6
- - uses: actions/download-artifact@v4
+ - name: Generate release notes from tag commit message
+ id: release_notes
+ shell: bash
+ run: |
+ set -euo pipefail
+
+ # On tag pushes, GITHUB_SHA may be a tag object for annotated tags;
+ # peel it to the underlying commit.
+ commit="$(git rev-parse "${GITHUB_SHA}^{commit}")"
+ notes_path="${RUNNER_TEMP}/release-notes.md"
+
+ # Use the commit message for the commit the tag points at (not the
+ # annotated tag message).
+ git log -1 --format=%B "${commit}" > "${notes_path}"
+ # Ensure trailing newline so GitHub's markdown renderer doesn't
+ # occasionally run the last line into subsequent content.
+ echo >> "${notes_path}"
+
+ echo "path=${notes_path}" >> "${GITHUB_OUTPUT}"
+
+ - uses: actions/download-artifact@v7
with:
path: dist
- name: List
run: ls -R dist/
+ # This is a temporary fix: we should modify shell-tool-mcp.yml so these
+ # files do not end up in dist/ in the first place.
+ - name: Delete entries from dist/ that should not go in the release
+ run: |
+ rm -rf dist/shell-tool-mcp*
+
+ ls -R dist/
+
- name: Define release name
id: release_name
run: |
@@ -428,7 +388,7 @@ jobs:
run_install: false
- name: Setup Node.js for npm packaging
- uses: actions/setup-node@v5
+ uses: actions/setup-node@v6
with:
node-version: 22
@@ -452,6 +412,7 @@ jobs:
with:
name: ${{ steps.release_name.outputs.name }}
tag_name: ${{ github.ref_name }}
+ body_path: ${{ steps.release_notes.outputs.path }}
files: dist/**
# Mark as prerelease only when the version has a suffix after x.y.z
# (e.g. -alpha, -beta). Otherwise publish a normal release.
@@ -479,7 +440,7 @@ jobs:
steps:
- name: Setup Node.js
- uses: actions/setup-node@v5
+ uses: actions/setup-node@v6
with:
node-version: 22
registry-url: "https://registry.npmjs.org"
diff --git a/.github/workflows/sdk.yml b/.github/workflows/sdk.yml
index 0f3a7a194..4679be05e 100644
--- a/.github/workflows/sdk.yml
+++ b/.github/workflows/sdk.yml
@@ -1,9 +1,6 @@
name: sdk
-on:
- push:
- branches: [main]
- pull_request: {}
+on: workflow_dispatch
jobs:
sdks:
@@ -11,7 +8,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Checkout repository
- uses: actions/checkout@v5
+ uses: actions/checkout@v6
- name: Setup pnpm
uses: pnpm/action-setup@v4
@@ -19,7 +16,7 @@ jobs:
run_install: false
- name: Setup Node.js
- uses: actions/setup-node@v5
+ uses: actions/setup-node@v6
with:
node-version: 22
cache: pnpm
diff --git a/.github/workflows/shell-tool-mcp-ci.yml b/.github/workflows/shell-tool-mcp-ci.yml
new file mode 100644
index 000000000..bd98c438f
--- /dev/null
+++ b/.github/workflows/shell-tool-mcp-ci.yml
@@ -0,0 +1,36 @@
+name: shell-tool-mcp CI
+
+on: workflow_dispatch
+
+env:
+ NODE_VERSION: 22
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v6
+
+ - name: Setup pnpm
+ uses: pnpm/action-setup@v4
+ with:
+ run_install: false
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v6
+ with:
+ node-version: ${{ env.NODE_VERSION }}
+ cache: "pnpm"
+
+ - name: Install dependencies
+ run: pnpm install --frozen-lockfile
+
+ - name: Format check
+ run: pnpm --filter @openai/codex-shell-tool-mcp run format
+
+ - name: Run tests
+ run: pnpm --filter @openai/codex-shell-tool-mcp test
+
+ - name: Build
+ run: pnpm --filter @openai/codex-shell-tool-mcp run build
diff --git a/.github/workflows/shell-tool-mcp.yml b/.github/workflows/shell-tool-mcp.yml
new file mode 100644
index 000000000..58c266dcc
--- /dev/null
+++ b/.github/workflows/shell-tool-mcp.yml
@@ -0,0 +1,390 @@
+name: shell-tool-mcp
+
+on: workflow_dispatch
+
+env:
+ NODE_VERSION: 22
+
+jobs:
+ metadata:
+ runs-on: ubuntu-latest
+ outputs:
+ version: ${{ steps.compute.outputs.version }}
+ release_tag: ${{ steps.compute.outputs.release_tag }}
+ should_publish: ${{ steps.compute.outputs.should_publish }}
+ npm_tag: ${{ steps.compute.outputs.npm_tag }}
+ steps:
+ - name: Compute version and tags
+ id: compute
+ run: |
+ set -euo pipefail
+
+ version="${{ inputs.release-version }}"
+ release_tag="${{ inputs.release-tag }}"
+
+ if [[ -z "$version" ]]; then
+ if [[ -n "$release_tag" && "$release_tag" =~ ^rust-v.+ ]]; then
+ version="${release_tag#rust-v}"
+ elif [[ "${GITHUB_REF_NAME:-}" =~ ^rust-v.+ ]]; then
+ version="${GITHUB_REF_NAME#rust-v}"
+ release_tag="${GITHUB_REF_NAME}"
+ else
+ echo "release-version is required when GITHUB_REF_NAME is not a rust-v tag."
+ exit 1
+ fi
+ fi
+
+ if [[ -z "$release_tag" ]]; then
+ release_tag="rust-v${version}"
+ fi
+
+ npm_tag=""
+ should_publish="false"
+ if [[ "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+ should_publish="true"
+ elif [[ "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
+ should_publish="true"
+ npm_tag="alpha"
+ fi
+
+ echo "version=${version}" >> "$GITHUB_OUTPUT"
+ echo "release_tag=${release_tag}" >> "$GITHUB_OUTPUT"
+ echo "npm_tag=${npm_tag}" >> "$GITHUB_OUTPUT"
+ echo "should_publish=${should_publish}" >> "$GITHUB_OUTPUT"
+
+ rust-binaries:
+ name: Build Rust - ${{ matrix.target }}
+ needs: metadata
+ runs-on: ${{ matrix.runner }}
+ timeout-minutes: 30
+ defaults:
+ run:
+ working-directory: codex-rs
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - runner: macos-15-xlarge
+ target: aarch64-apple-darwin
+ - runner: macos-15-xlarge
+ target: x86_64-apple-darwin
+ - runner: ubuntu-24.04
+ target: x86_64-unknown-linux-musl
+ install_musl: true
+ - runner: ubuntu-24.04-arm
+ target: aarch64-unknown-linux-musl
+ install_musl: true
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v6
+
+ - uses: dtolnay/rust-toolchain@1.90
+ with:
+ targets: ${{ matrix.target }}
+
+ - if: ${{ matrix.install_musl }}
+ name: Install musl build dependencies
+ run: |
+ sudo apt-get update
+ sudo apt-get install -y musl-tools pkg-config
+
+ - name: Build exec server binaries
+ run: cargo build --release --target ${{ matrix.target }} --bin codex-exec-mcp-server --bin codex-execve-wrapper
+
+ - name: Stage exec server binaries
+ run: |
+ dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}"
+ mkdir -p "$dest"
+ cp "target/${{ matrix.target }}/release/codex-exec-mcp-server" "$dest/"
+ cp "target/${{ matrix.target }}/release/codex-execve-wrapper" "$dest/"
+
+ - uses: actions/upload-artifact@v6
+ with:
+ name: shell-tool-mcp-rust-${{ matrix.target }}
+ path: artifacts/**
+ if-no-files-found: error
+
+ bash-linux:
+ name: Build Bash (Linux) - ${{ matrix.variant }} - ${{ matrix.target }}
+ needs: metadata
+ runs-on: ${{ matrix.runner }}
+ timeout-minutes: 30
+ container:
+ image: ${{ matrix.image }}
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - runner: ubuntu-24.04
+ target: x86_64-unknown-linux-musl
+ variant: ubuntu-24.04
+ image: ubuntu:24.04
+ - runner: ubuntu-24.04
+ target: x86_64-unknown-linux-musl
+ variant: ubuntu-22.04
+ image: ubuntu:22.04
+ - runner: ubuntu-24.04
+ target: x86_64-unknown-linux-musl
+ variant: debian-12
+ image: debian:12
+ - runner: ubuntu-24.04
+ target: x86_64-unknown-linux-musl
+ variant: debian-11
+ image: debian:11
+ - runner: ubuntu-24.04
+ target: x86_64-unknown-linux-musl
+ variant: centos-9
+ image: quay.io/centos/centos:stream9
+ - runner: ubuntu-24.04-arm
+ target: aarch64-unknown-linux-musl
+ variant: ubuntu-24.04
+ image: arm64v8/ubuntu:24.04
+ - runner: ubuntu-24.04-arm
+ target: aarch64-unknown-linux-musl
+ variant: ubuntu-22.04
+ image: arm64v8/ubuntu:22.04
+ - runner: ubuntu-24.04-arm
+ target: aarch64-unknown-linux-musl
+ variant: ubuntu-20.04
+ image: arm64v8/ubuntu:20.04
+ - runner: ubuntu-24.04-arm
+ target: aarch64-unknown-linux-musl
+ variant: debian-12
+ image: arm64v8/debian:12
+ - runner: ubuntu-24.04-arm
+ target: aarch64-unknown-linux-musl
+ variant: debian-11
+ image: arm64v8/debian:11
+ - runner: ubuntu-24.04-arm
+ target: aarch64-unknown-linux-musl
+ variant: centos-9
+ image: quay.io/centos/centos:stream9
+ steps:
+ - name: Install build prerequisites
+ shell: bash
+ run: |
+ set -euo pipefail
+ if command -v apt-get >/dev/null 2>&1; then
+ apt-get update
+ DEBIAN_FRONTEND=noninteractive apt-get install -y git build-essential bison autoconf gettext
+ elif command -v dnf >/dev/null 2>&1; then
+ dnf install -y git gcc gcc-c++ make bison autoconf gettext
+ elif command -v yum >/dev/null 2>&1; then
+ yum install -y git gcc gcc-c++ make bison autoconf gettext
+ else
+ echo "Unsupported package manager in container"
+ exit 1
+ fi
+
+ - name: Checkout repository
+ uses: actions/checkout@v6
+
+ - name: Build patched Bash
+ shell: bash
+ run: |
+ set -euo pipefail
+ git clone --depth 1 https://github.com/bminor/bash /tmp/bash
+ cd /tmp/bash
+ git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
+ git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
+ git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
+ ./configure --without-bash-malloc
+ cores="$(command -v nproc >/dev/null 2>&1 && nproc || getconf _NPROCESSORS_ONLN)"
+ make -j"${cores}"
+
+ dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}/bash/${{ matrix.variant }}"
+ mkdir -p "$dest"
+ cp bash "$dest/bash"
+
+ - uses: actions/upload-artifact@v6
+ with:
+ name: shell-tool-mcp-bash-${{ matrix.target }}-${{ matrix.variant }}
+ path: artifacts/**
+ if-no-files-found: error
+
+ bash-darwin:
+ name: Build Bash (macOS) - ${{ matrix.variant }} - ${{ matrix.target }}
+ needs: metadata
+ runs-on: ${{ matrix.runner }}
+ timeout-minutes: 30
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - runner: macos-15-xlarge
+ target: aarch64-apple-darwin
+ variant: macos-15
+ - runner: macos-14
+ target: aarch64-apple-darwin
+ variant: macos-14
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v6
+
+ - name: Build patched Bash
+ shell: bash
+ run: |
+ set -euo pipefail
+ git clone --depth 1 https://github.com/bminor/bash /tmp/bash
+ cd /tmp/bash
+ git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
+ git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
+ git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
+ ./configure --without-bash-malloc
+ cores="$(getconf _NPROCESSORS_ONLN)"
+ make -j"${cores}"
+
+ dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}/bash/${{ matrix.variant }}"
+ mkdir -p "$dest"
+ cp bash "$dest/bash"
+
+ - uses: actions/upload-artifact@v6
+ with:
+ name: shell-tool-mcp-bash-${{ matrix.target }}-${{ matrix.variant }}
+ path: artifacts/**
+ if-no-files-found: error
+
+ package:
+ name: Package npm module
+ needs:
+ - metadata
+ - rust-binaries
+ - bash-linux
+ - bash-darwin
+ runs-on: ubuntu-latest
+ env:
+ PACKAGE_VERSION: ${{ needs.metadata.outputs.version }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v6
+
+ - name: Setup pnpm
+ uses: pnpm/action-setup@v4
+ with:
+ version: 10.8.1
+ run_install: false
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v6
+ with:
+ node-version: ${{ env.NODE_VERSION }}
+
+ - name: Install JavaScript dependencies
+ run: pnpm install --frozen-lockfile
+
+ - name: Build (shell-tool-mcp)
+ run: pnpm --filter @openai/codex-shell-tool-mcp run build
+
+ - name: Download build artifacts
+ uses: actions/download-artifact@v7
+ with:
+ path: artifacts
+
+ - name: Assemble staging directory
+ id: staging
+ shell: bash
+ run: |
+ set -euo pipefail
+ staging="${STAGING_DIR}"
+ mkdir -p "$staging" "$staging/vendor"
+ cp shell-tool-mcp/README.md "$staging/"
+ cp shell-tool-mcp/package.json "$staging/"
+ cp -R shell-tool-mcp/bin "$staging/"
+
+ found_vendor="false"
+ shopt -s nullglob
+ for vendor_dir in artifacts/*/vendor; do
+ rsync -av "$vendor_dir/" "$staging/vendor/"
+ found_vendor="true"
+ done
+ if [[ "$found_vendor" == "false" ]]; then
+ echo "No vendor payloads were downloaded."
+ exit 1
+ fi
+
+ node - <<'NODE'
+ import fs from "node:fs";
+ import path from "node:path";
+
+ const stagingDir = process.env.STAGING_DIR;
+ const version = process.env.PACKAGE_VERSION;
+ const pkgPath = path.join(stagingDir, "package.json");
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8"));
+ pkg.version = version;
+ fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2) + "\n");
+ NODE
+
+ echo "dir=$staging" >> "$GITHUB_OUTPUT"
+ env:
+ STAGING_DIR: ${{ runner.temp }}/shell-tool-mcp
+
+ - name: Ensure binaries are executable
+ run: |
+ set -euo pipefail
+ staging="${{ steps.staging.outputs.dir }}"
+ chmod +x \
+ "$staging"/vendor/*/codex-exec-mcp-server \
+ "$staging"/vendor/*/codex-execve-wrapper \
+ "$staging"/vendor/*/bash/*/bash
+
+ - name: Create npm tarball
+ shell: bash
+ run: |
+ set -euo pipefail
+ mkdir -p dist/npm
+ staging="${{ steps.staging.outputs.dir }}"
+ pack_info=$(cd "$staging" && npm pack --ignore-scripts --json --pack-destination "${GITHUB_WORKSPACE}/dist/npm")
+ filename=$(PACK_INFO="$pack_info" node -e 'const data = JSON.parse(process.env.PACK_INFO); console.log(data[0].filename);')
+ mv "dist/npm/${filename}" "dist/npm/codex-shell-tool-mcp-npm-${PACKAGE_VERSION}.tgz"
+
+ - uses: actions/upload-artifact@v6
+ with:
+ name: codex-shell-tool-mcp-npm
+ path: dist/npm/codex-shell-tool-mcp-npm-${{ env.PACKAGE_VERSION }}.tgz
+ if-no-files-found: error
+
+ publish:
+ name: Publish npm package
+ needs:
+ - metadata
+ - package
+ if: ${{ inputs.publish && needs.metadata.outputs.should_publish == 'true' }}
+ runs-on: ubuntu-latest
+ permissions:
+ id-token: write
+ contents: read
+ steps:
+ - name: Setup pnpm
+ uses: pnpm/action-setup@v4
+ with:
+ version: 10.8.1
+ run_install: false
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v6
+ with:
+ node-version: ${{ env.NODE_VERSION }}
+ registry-url: https://registry.npmjs.org
+ scope: "@openai"
+
+ - name: Update npm
+ run: npm install -g npm@latest
+
+ - name: Download npm tarball
+ uses: actions/download-artifact@v7
+ with:
+ name: codex-shell-tool-mcp-npm
+ path: dist/npm
+
+ - name: Publish to npm
+ env:
+ NPM_TAG: ${{ needs.metadata.outputs.npm_tag }}
+ VERSION: ${{ needs.metadata.outputs.version }}
+ shell: bash
+ run: |
+ set -euo pipefail
+ tag_args=()
+ if [[ -n "${NPM_TAG}" ]]; then
+ tag_args+=(--tag "${NPM_TAG}")
+ fi
+ npm publish "dist/npm/codex-shell-tool-mcp-npm-${VERSION}.tgz" "${tag_args[@]}"
diff --git a/.gitignore b/.gitignore
index a58e9dfb7..8f39b7b1c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,6 +9,7 @@ node_modules
# build
dist/
+bazel-*
build/
out/
storybook-static/
@@ -85,3 +86,8 @@ CHANGELOG.ignore.md
# nix related
.direnv
.envrc
+
+# Python bytecode files
+__pycache__/
+*.pyc
+
diff --git a/AGENTS.md b/AGENTS.md
index aaebd0dfd..5c0a6db63 100644
--- a/AGENTS.md
+++ b/AGENTS.md
@@ -11,7 +11,6 @@ In the codex-rs folder where the rust code lives:
- Always collapse if statements per https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
- Always inline format! args when possible per https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args
- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
-- Do not use unsigned integer even if the number cannot be negative.
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
@@ -75,6 +74,14 @@ If you don’t have the tool:
### Test assertions
- Tests should use pretty_assertions::assert_eq for clearer diffs. Import this at the top of the test module if it isn't already.
+- Prefer deep equals comparisons whenever possible. Perform `assert_eq!()` on entire objects, rather than individual fields.
+- Avoid mutating process environment in tests; prefer passing environment-derived flags or dependencies from above.
+
+### Spawning workspace binaries in tests (Cargo vs Bazel)
+
+- Prefer `codex_utils_cargo_bin::cargo_bin("...")` over `assert_cmd::Command::cargo_bin(...)` or `escargot` when tests need to spawn first-party binaries.
+ - Under Bazel, binaries and resources may live under runfiles; use `codex_utils_cargo_bin::cargo_bin` to resolve absolute paths that remain stable after `chdir`.
+- When locating fixture files or test resources under Bazel, avoid `env!("CARGO_MANIFEST_DIR")`. Prefer `codex_utils_cargo_bin::find_resource!` so paths resolve correctly under both Cargo and Bazel runfiles.
### Integration tests (core)
diff --git a/README.md b/README.md
index 814161003..eb4ace74f 100644
--- a/README.md
+++ b/README.md
@@ -1,13 +1,11 @@
npm i -g @openai/codex or brew install --cask codex
-
Codex CLI is a coding agent from OpenAI that runs locally on your computer.
-
-If you want Codex in your code editor (VS Code, Cursor, Windsurf), install in your IDE
-If you are looking for the cloud-based agent from OpenAI, Codex Web , go to chatgpt.com/codex
-
-
+
+
+If you want Codex in your code editor (VS Code, Cursor, Windsurf), install in your IDE.
+If you are looking for the cloud-based agent from OpenAI, Codex Web , go to chatgpt.com/codex .
---
@@ -15,25 +13,19 @@
### Installing and running Codex CLI
-Install globally with your preferred package manager. If you use npm:
+Install globally with your preferred package manager:
```shell
+# Install using npm
npm install -g @openai/codex
```
-Alternatively, if you use Homebrew:
-
```shell
+# Install using Homebrew
brew install --cask codex
```
-Then simply run `codex` to get started:
-
-```shell
-codex
-```
-
-If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-upgrade-codex-isnt-upgrading-me).
+Then simply run `codex` to get started.
You can also go to the latest GitHub Release and download the appropriate binary for your platform.
@@ -53,57 +45,15 @@ Each archive contains a single entry with the platform baked into the name (e.g.
### Using Codex with your ChatGPT plan
-
-
-
-
Run `codex` and select **Sign in with ChatGPT**. We recommend signing into your ChatGPT account to use Codex as part of your Plus, Pro, Team, Edu, or Enterprise plan. [Learn more about what's included in your ChatGPT plan](https://help.openai.com/en/articles/11369540-codex-in-chatgpt).
-You can also use Codex with an API key, but this requires [additional setup](./docs/authentication.md#usage-based-billing-alternative-use-an-openai-api-key). If you previously used an API key for usage-based billing, see the [migration steps](./docs/authentication.md#migrating-from-usage-based-billing-api-key). If you're having trouble with login, please comment on [this issue](https://github.com/openai/codex/issues/1243).
-
-### Model Context Protocol (MCP)
-
-Codex can access MCP servers. To configure them, refer to the [config docs](./docs/config.md#mcp_servers).
+You can also use Codex with an API key, but this requires [additional setup](https://developers.openai.com/codex/auth#sign-in-with-an-api-key).
-### Configuration
+## Docs
-Codex CLI supports a rich set of configuration options, with preferences stored in `~/.codex/config.toml`. For full configuration options, see [Configuration](./docs/config.md).
-
----
-
-### Docs & FAQ
-
-- [**Getting started**](./docs/getting-started.md)
- - [CLI usage](./docs/getting-started.md#cli-usage)
- - [Slash Commands](./docs/slash_commands.md)
- - [Running with a prompt as input](./docs/getting-started.md#running-with-a-prompt-as-input)
- - [Example prompts](./docs/getting-started.md#example-prompts)
- - [Custom prompts](./docs/prompts.md)
- - [Memory with AGENTS.md](./docs/getting-started.md#memory-with-agentsmd)
-- [**Configuration**](./docs/config.md)
- - [Example config](./docs/example-config.md)
-- [**Sandbox & approvals**](./docs/sandbox.md)
-- [**Authentication**](./docs/authentication.md)
- - [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced)
- - [Login on a "Headless" machine](./docs/authentication.md#connecting-on-a-headless-machine)
-- **Automating Codex**
- - [GitHub Action](https://github.com/openai/codex-action)
- - [TypeScript SDK](./sdk/typescript/README.md)
- - [Non-interactive mode (`codex exec`)](./docs/exec.md)
-- [**Advanced**](./docs/advanced.md)
- - [Tracing / verbose logging](./docs/advanced.md#tracing--verbose-logging)
- - [Model Context Protocol (MCP)](./docs/advanced.md#model-context-protocol-mcp)
-- [**Zero data retention (ZDR)**](./docs/zdr.md)
+- [**Codex Documentation**](https://developers.openai.com/codex)
- [**Contributing**](./docs/contributing.md)
-- [**Install & build**](./docs/install.md)
- - [System Requirements](./docs/install.md#system-requirements)
- - [DotSlash](./docs/install.md#dotslash)
- - [Build from source](./docs/install.md#build-from-source)
-- [**FAQ**](./docs/faq.md)
+- [**Installing & building**](./docs/install.md)
- [**Open source fund**](./docs/open-source-fund.md)
----
-
-## License
-
This repository is licensed under the [Apache-2.0 License](LICENSE).
diff --git a/announcement_tip.toml b/announcement_tip.toml
new file mode 100644
index 000000000..3ad4a7659
--- /dev/null
+++ b/announcement_tip.toml
@@ -0,0 +1,16 @@
+# Example announcement tips for Codex TUI.
+# Each [[announcements]] entry is evaluated in order; the last matching one is shown.
+# Dates are UTC, formatted as YYYY-MM-DD. The from_date is inclusive and the to_date is exclusive.
+# version_regex matches against the CLI version (env!("CARGO_PKG_VERSION")); omit to apply to all versions.
+# target_app specify which app should display the announcement (cli, vsce, ...).
+
+[[announcements]]
+content = "Welcome to Codex! Check out the new onboarding flow."
+from_date = "2024-10-01"
+to_date = "2024-10-15"
+target_app = "cli"
+
+[[announcements]]
+content = "This is a test announcement"
+version_regex = "^0\\.0\\.0$"
+to_date = "2026-01-10"
diff --git a/codex-cli/bin/codex.js b/codex-cli/bin/codex.js
index 805be85af..6ec8069bd 100644
--- a/codex-cli/bin/codex.js
+++ b/codex-cli/bin/codex.js
@@ -95,10 +95,10 @@ function detectPackageManager() {
return "bun";
}
+
if (
- process.env.BUN_INSTALL ||
- process.env.BUN_INSTALL_GLOBAL_DIR ||
- process.env.BUN_INSTALL_BIN_DIR
+ __dirname.includes(".bun/install/global") ||
+ __dirname.includes(".bun\\install\\global")
) {
return "bun";
}
diff --git a/codex-cli/scripts/build_npm_package.py b/codex-cli/scripts/build_npm_package.py
index ef96bef2e..bf0eb5f46 100755
--- a/codex-cli/scripts/build_npm_package.py
+++ b/codex-cli/scripts/build_npm_package.py
@@ -20,9 +20,14 @@
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
"codex-sdk": ["codex"],
}
+WINDOWS_ONLY_COMPONENTS: dict[str, list[str]] = {
+ "codex": ["codex-windows-sandbox-setup", "codex-command-runner"],
+}
COMPONENT_DEST_DIR: dict[str, str] = {
"codex": "codex",
"codex-responses-api-proxy": "codex-responses-api-proxy",
+ "codex-windows-sandbox-setup": "codex",
+ "codex-command-runner": "codex",
"rg": "path",
}
@@ -103,7 +108,7 @@ def main() -> int:
"pointing to a directory containing pre-installed binaries."
)
- copy_native_binaries(vendor_src, staging_dir, native_components)
+ copy_native_binaries(vendor_src, staging_dir, package, native_components)
if release_version:
staging_dir_str = str(staging_dir)
@@ -232,7 +237,12 @@ def stage_codex_sdk_sources(staging_dir: Path) -> None:
shutil.copy2(license_src, staging_dir / "LICENSE")
-def copy_native_binaries(vendor_src: Path, staging_dir: Path, components: list[str]) -> None:
+def copy_native_binaries(
+ vendor_src: Path,
+ staging_dir: Path,
+ package: str,
+ components: list[str],
+) -> None:
vendor_src = vendor_src.resolve()
if not vendor_src.exists():
raise RuntimeError(f"Vendor source directory not found: {vendor_src}")
@@ -250,6 +260,9 @@ def copy_native_binaries(vendor_src: Path, staging_dir: Path, components: list[s
if not target_dir.is_dir():
continue
+ if "windows" in target_dir.name:
+ components_set.update(WINDOWS_ONLY_COMPONENTS.get(package, []))
+
dest_target_dir = vendor_dest / target_dir.name
dest_target_dir.mkdir(parents=True, exist_ok=True)
diff --git a/codex-cli/scripts/install_native_deps.py b/codex-cli/scripts/install_native_deps.py
index 8d3909c9e..58fbd370f 100755
--- a/codex-cli/scripts/install_native_deps.py
+++ b/codex-cli/scripts/install_native_deps.py
@@ -2,6 +2,7 @@
"""Install Codex native binaries (Rust CLI plus ripgrep helpers)."""
import argparse
+from contextlib import contextmanager
import json
import os
import shutil
@@ -12,6 +13,7 @@
from dataclasses import dataclass
from concurrent.futures import ThreadPoolExecutor, as_completed
from pathlib import Path
+import sys
from typing import Iterable, Sequence
from urllib.parse import urlparse
from urllib.request import urlopen
@@ -36,8 +38,11 @@ class BinaryComponent:
artifact_prefix: str # matches the artifact filename prefix (e.g. codex-.zst)
dest_dir: str # directory under vendor// where the binary is installed
binary_basename: str # executable name inside dest_dir (before optional .exe)
+ targets: tuple[str, ...] | None = None # limit installation to specific targets
+WINDOWS_TARGETS = tuple(target for target in BINARY_TARGETS if "windows" in target)
+
BINARY_COMPONENTS = {
"codex": BinaryComponent(
artifact_prefix="codex",
@@ -49,6 +54,18 @@ class BinaryComponent:
dest_dir="codex-responses-api-proxy",
binary_basename="codex-responses-api-proxy",
),
+ "codex-windows-sandbox-setup": BinaryComponent(
+ artifact_prefix="codex-windows-sandbox-setup",
+ dest_dir="codex",
+ binary_basename="codex-windows-sandbox-setup",
+ targets=WINDOWS_TARGETS,
+ ),
+ "codex-command-runner": BinaryComponent(
+ artifact_prefix="codex-command-runner",
+ dest_dir="codex",
+ binary_basename="codex-command-runner",
+ targets=WINDOWS_TARGETS,
+ ),
}
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
@@ -62,6 +79,45 @@ class BinaryComponent:
RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS}
DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS]
+# urllib.request.urlopen() defaults to no timeout (can hang indefinitely), which is painful in CI.
+DOWNLOAD_TIMEOUT_SECS = 60
+
+
+def _gha_enabled() -> bool:
+ # GitHub Actions supports "workflow commands" (e.g. ::group:: / ::error::) that make logs
+ # much easier to scan: groups collapse noisy sections and error annotations surface the
+ # failure in the UI without changing the actual exception/traceback output.
+ return os.environ.get("GITHUB_ACTIONS") == "true"
+
+
+def _gha_escape(value: str) -> str:
+ # Workflow commands require percent/newline escaping.
+ return value.replace("%", "%25").replace("\r", "%0D").replace("\n", "%0A")
+
+
+def _gha_error(*, title: str, message: str) -> None:
+ # Emit a GitHub Actions error annotation. This does not replace stdout/stderr logs; it just
+ # adds a prominent summary line to the job UI so the root cause is easier to spot.
+ if not _gha_enabled():
+ return
+ print(
+ f"::error title={_gha_escape(title)}::{_gha_escape(message)}",
+ flush=True,
+ )
+
+
+@contextmanager
+def _gha_group(title: str):
+ # Wrap a block in a collapsible log group on GitHub Actions. Outside of GHA this is a no-op
+ # so local output remains unchanged.
+ if _gha_enabled():
+ print(f"::group::{_gha_escape(title)}", flush=True)
+ try:
+ yield
+ finally:
+ if _gha_enabled():
+ print("::endgroup::", flush=True)
+
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Install native Codex binaries.")
@@ -79,7 +135,8 @@ def parse_args() -> argparse.Namespace:
choices=tuple(list(BINARY_COMPONENTS) + ["rg"]),
help=(
"Limit installation to the specified components."
- " May be repeated. Defaults to 'codex' and 'rg'."
+ " May be repeated. Defaults to codex, codex-windows-sandbox-setup,"
+ " codex-command-runner, and rg."
),
)
parser.add_argument(
@@ -101,7 +158,12 @@ def main() -> int:
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
vendor_dir.mkdir(parents=True, exist_ok=True)
- components = args.components or ["codex", "rg"]
+ components = args.components or [
+ "codex",
+ "codex-windows-sandbox-setup",
+ "codex-command-runner",
+ "rg",
+ ]
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
if not workflow_url:
@@ -110,19 +172,20 @@ def main() -> int:
workflow_id = workflow_url.rstrip("/").split("/")[-1]
print(f"Downloading native artifacts from workflow {workflow_id}...")
- with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
- artifacts_dir = Path(artifacts_dir_str)
- _download_artifacts(workflow_id, artifacts_dir)
- install_binary_components(
- artifacts_dir,
- vendor_dir,
- BINARY_TARGETS,
- [name for name in components if name in BINARY_COMPONENTS],
- )
+ with _gha_group(f"Download native artifacts from workflow {workflow_id}"):
+ with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
+ artifacts_dir = Path(artifacts_dir_str)
+ _download_artifacts(workflow_id, artifacts_dir)
+ install_binary_components(
+ artifacts_dir,
+ vendor_dir,
+ [BINARY_COMPONENTS[name] for name in components if name in BINARY_COMPONENTS],
+ )
if "rg" in components:
- print("Fetching ripgrep binaries...")
- fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
+ with _gha_group("Fetch ripgrep binaries"):
+ print("Fetching ripgrep binaries...")
+ fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
print(f"Installed native dependencies into {vendor_dir}")
return 0
@@ -183,7 +246,14 @@ def fetch_rg(
for future in as_completed(future_map):
target = future_map[future]
- results[target] = future.result()
+ try:
+ results[target] = future.result()
+ except Exception as exc:
+ _gha_error(
+ title="ripgrep install failed",
+ message=f"target={target} error={exc!r}",
+ )
+ raise RuntimeError(f"Failed to install ripgrep for target {target}.") from exc
print(f" installed ripgrep for {target}")
return [results[target] for target in targets]
@@ -206,23 +276,19 @@ def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
def install_binary_components(
artifacts_dir: Path,
vendor_dir: Path,
- targets: Iterable[str],
- component_names: Sequence[str],
+ selected_components: Sequence[BinaryComponent],
) -> None:
- selected_components = [BINARY_COMPONENTS[name] for name in component_names if name in BINARY_COMPONENTS]
if not selected_components:
return
- targets = list(targets)
- if not targets:
- return
-
for component in selected_components:
+ component_targets = list(component.targets or BINARY_TARGETS)
+
print(
f"Installing {component.binary_basename} binaries for targets: "
- + ", ".join(targets)
+ + ", ".join(component_targets)
)
- max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
+ max_workers = min(len(component_targets), max(1, (os.cpu_count() or 1)))
with ThreadPoolExecutor(max_workers=max_workers) as executor:
futures = {
executor.submit(
@@ -232,7 +298,7 @@ def install_binary_components(
target,
component,
): target
- for target in targets
+ for target in component_targets
}
for future in as_completed(futures):
installed_path = future.result()
@@ -285,6 +351,8 @@ def _fetch_single_rg(
url = providers[0]["url"]
archive_format = platform_info.get("format", "zst")
archive_member = platform_info.get("path")
+ digest = platform_info.get("digest")
+ expected_size = platform_info.get("size")
dest_dir = vendor_dir / target / "path"
dest_dir.mkdir(parents=True, exist_ok=True)
@@ -297,10 +365,32 @@ def _fetch_single_rg(
tmp_dir = Path(tmp_dir_str)
archive_filename = os.path.basename(urlparse(url).path)
download_path = tmp_dir / archive_filename
- _download_file(url, download_path)
+ print(
+ f" downloading ripgrep for {target} ({platform_key}) from {url}",
+ flush=True,
+ )
+ try:
+ _download_file(url, download_path)
+ except Exception as exc:
+ _gha_error(
+ title="ripgrep download failed",
+ message=f"target={target} platform={platform_key} url={url} error={exc!r}",
+ )
+ raise RuntimeError(
+ "Failed to download ripgrep "
+ f"(target={target}, platform={platform_key}, format={archive_format}, "
+ f"expected_size={expected_size!r}, digest={digest!r}, url={url}, dest={download_path})."
+ ) from exc
dest.unlink(missing_ok=True)
- extract_archive(download_path, archive_format, archive_member, dest)
+ try:
+ extract_archive(download_path, archive_format, archive_member, dest)
+ except Exception as exc:
+ raise RuntimeError(
+ "Failed to extract ripgrep "
+ f"(target={target}, platform={platform_key}, format={archive_format}, "
+ f"member={archive_member!r}, url={url}, archive={download_path})."
+ ) from exc
if not is_windows:
dest.chmod(0o755)
@@ -310,7 +400,9 @@ def _fetch_single_rg(
def _download_file(url: str, dest: Path) -> None:
dest.parent.mkdir(parents=True, exist_ok=True)
- with urlopen(url) as response, open(dest, "wb") as out:
+ dest.unlink(missing_ok=True)
+
+ with urlopen(url, timeout=DOWNLOAD_TIMEOUT_SECS) as response, open(dest, "wb") as out:
shutil.copyfileobj(response, out)
diff --git a/codex-rs/.cargo/audit.toml b/codex-rs/.cargo/audit.toml
new file mode 100644
index 000000000..143e64163
--- /dev/null
+++ b/codex-rs/.cargo/audit.toml
@@ -0,0 +1,6 @@
+[advisories]
+ignore = [
+ "RUSTSEC-2024-0388", # derivative 2.2.0 via starlark; upstream crate is unmaintained
+ "RUSTSEC-2025-0057", # fxhash 0.2.1 via starlark_map; upstream crate is unmaintained
+ "RUSTSEC-2024-0436", # paste 1.0.15 via starlark/ratatui; upstream crate is unmaintained
+]
diff --git a/codex-rs/.config/nextest.toml b/codex-rs/.config/nextest.toml
index 3ca7cfe50..f432af88e 100644
--- a/codex-rs/.config/nextest.toml
+++ b/codex-rs/.config/nextest.toml
@@ -7,3 +7,7 @@ slow-timeout = { period = "15s", terminate-after = 2 }
# Do not add new tests here
filter = 'test(rmcp_client) | test(humanlike_typing_1000_chars_appears_live_no_placeholder)'
slow-timeout = { period = "1m", terminate-after = 4 }
+
+[[profile.default.overrides]]
+filter = 'test(approval_matrix_covers_all_modes)'
+slow-timeout = { period = "30s", terminate-after = 2 }
diff --git a/codex-rs/.github/workflows/cargo-audit.yml b/codex-rs/.github/workflows/cargo-audit.yml
new file mode 100644
index 000000000..e75c841ab
--- /dev/null
+++ b/codex-rs/.github/workflows/cargo-audit.yml
@@ -0,0 +1,26 @@
+name: Cargo audit
+
+on:
+ pull_request:
+ push:
+ branches:
+ - main
+
+permissions:
+ contents: read
+
+jobs:
+ audit:
+ runs-on: ubuntu-latest
+ defaults:
+ run:
+ working-directory: codex-rs
+ steps:
+ - uses: actions/checkout@v4
+ - uses: dtolnay/rust-toolchain@stable
+ - name: Install cargo-audit
+ uses: taiki-e/install-action@v2
+ with:
+ tool: cargo-audit
+ - name: Run cargo audit
+ run: cargo audit --deny warnings
diff --git a/codex-rs/Cargo.lock b/codex-rs/Cargo.lock
index 0ed45ddb2..29be64f9a 100644
--- a/codex-rs/Cargo.lock
+++ b/codex-rs/Cargo.lock
@@ -12,6 +12,154 @@ dependencies = [
"regex",
]
+[[package]]
+name = "actix-codec"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a"
+dependencies = [
+ "bitflags 2.10.0",
+ "bytes",
+ "futures-core",
+ "futures-sink",
+ "memchr",
+ "pin-project-lite",
+ "tokio",
+ "tokio-util",
+ "tracing",
+]
+
+[[package]]
+name = "actix-http"
+version = "3.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7926860314cbe2fb5d1f13731e387ab43bd32bca224e82e6e2db85de0a3dba49"
+dependencies = [
+ "actix-codec",
+ "actix-rt",
+ "actix-service",
+ "actix-utils",
+ "bitflags 2.10.0",
+ "bytes",
+ "bytestring",
+ "derive_more 2.1.1",
+ "encoding_rs",
+ "foldhash 0.1.5",
+ "futures-core",
+ "http 0.2.12",
+ "httparse",
+ "httpdate",
+ "itoa",
+ "language-tags",
+ "mime",
+ "percent-encoding",
+ "pin-project-lite",
+ "smallvec",
+ "tokio",
+ "tokio-util",
+ "tracing",
+]
+
+[[package]]
+name = "actix-router"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13d324164c51f63867b57e73ba5936ea151b8a41a1d23d1031eeb9f70d0236f8"
+dependencies = [
+ "bytestring",
+ "cfg-if",
+ "http 0.2.12",
+ "regex-lite",
+ "serde",
+ "tracing",
+]
+
+[[package]]
+name = "actix-rt"
+version = "2.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92589714878ca59a7626ea19734f0e07a6a875197eec751bb5d3f99e64998c63"
+dependencies = [
+ "futures-core",
+ "tokio",
+]
+
+[[package]]
+name = "actix-server"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a65064ea4a457eaf07f2fba30b4c695bf43b721790e9530d26cb6f9019ff7502"
+dependencies = [
+ "actix-rt",
+ "actix-service",
+ "actix-utils",
+ "futures-core",
+ "futures-util",
+ "mio",
+ "socket2 0.5.10",
+ "tokio",
+ "tracing",
+]
+
+[[package]]
+name = "actix-service"
+version = "2.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e46f36bf0e5af44bdc4bdb36fbbd421aa98c79a9bce724e1edeb3894e10dc7f"
+dependencies = [
+ "futures-core",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "actix-utils"
+version = "3.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88a1dcdff1466e3c2488e1cb5c36a71822750ad43839937f85d2f4d9f8b705d8"
+dependencies = [
+ "local-waker",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "actix-web"
+version = "4.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1654a77ba142e37f049637a3e5685f864514af11fcbc51cb51eb6596afe5b8d6"
+dependencies = [
+ "actix-codec",
+ "actix-http",
+ "actix-router",
+ "actix-rt",
+ "actix-server",
+ "actix-service",
+ "actix-utils",
+ "bytes",
+ "bytestring",
+ "cfg-if",
+ "derive_more 2.1.1",
+ "encoding_rs",
+ "foldhash 0.1.5",
+ "futures-core",
+ "futures-util",
+ "impl-more",
+ "itoa",
+ "language-tags",
+ "log",
+ "mime",
+ "once_cell",
+ "pin-project-lite",
+ "regex-lite",
+ "serde",
+ "serde_json",
+ "serde_urlencoded",
+ "smallvec",
+ "socket2 0.6.1",
+ "time",
+ "tracing",
+ "url",
+]
+
[[package]]
name = "addr2line"
version = "0.24.2"
@@ -181,14 +329,16 @@ name = "app_test_support"
version = "0.0.0"
dependencies = [
"anyhow",
- "assert_cmd",
"base64",
"chrono",
"codex-app-server-protocol",
"codex-core",
"codex-protocol",
+ "codex-utils-cargo-bin",
+ "core_test_support",
"serde",
"serde_json",
+ "shlex",
"tokio",
"uuid",
"wiremock",
@@ -196,9 +346,9 @@ dependencies = [
[[package]]
name = "arboard"
-version = "3.6.0"
+version = "3.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "55f533f8e0af236ffe5eb979b99381df3258853f00ba2e44b6e1955292c75227"
+checksum = "0348a1c054491f4bfe6ab86a7b6ab1e44e45d899005de92f58b3df180b36ddaf"
dependencies = [
"clipboard-win",
"image",
@@ -210,11 +360,17 @@ dependencies = [
"objc2-foundation",
"parking_lot",
"percent-encoding",
- "windows-sys 0.59.0",
+ "windows-sys 0.60.2",
"wl-clipboard-rs",
"x11rb",
]
+[[package]]
+name = "arc-swap"
+version = "1.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
+
[[package]]
name = "arrayvec"
version = "0.7.6"
@@ -236,48 +392,6 @@ dependencies = [
"term",
]
-[[package]]
-name = "askama"
-version = "0.14.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f75363874b771be265f4ffe307ca705ef6f3baa19011c149da8674a87f1b75c4"
-dependencies = [
- "askama_derive",
- "itoa",
- "percent-encoding",
- "serde",
- "serde_json",
-]
-
-[[package]]
-name = "askama_derive"
-version = "0.14.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "129397200fe83088e8a68407a8e2b1f826cf0086b21ccdb866a722c8bcd3a94f"
-dependencies = [
- "askama_parser",
- "basic-toml",
- "memchr",
- "proc-macro2",
- "quote",
- "rustc-hash 2.1.1",
- "serde",
- "serde_derive",
- "syn 2.0.104",
-]
-
-[[package]]
-name = "askama_parser"
-version = "0.14.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d6ab5630b3d5eaf232620167977f95eb51f3432fc76852328774afbd242d4358"
-dependencies = [
- "memchr",
- "serde",
- "serde_derive",
- "winnow",
-]
-
[[package]]
name = "assert-json-diff"
version = "2.0.2"
@@ -495,7 +609,7 @@ dependencies = [
"axum-core",
"bytes",
"futures-util",
- "http",
+ "http 1.3.1",
"http-body",
"http-body-util",
"hyper",
@@ -523,7 +637,7 @@ checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6"
dependencies = [
"bytes",
"futures-core",
- "http",
+ "http 1.3.1",
"http-body",
"http-body-util",
"mime",
@@ -556,13 +670,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
[[package]]
-name = "basic-toml"
-version = "0.1.10"
+name = "base64ct"
+version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba62675e8242a4c4e806d12f11d136e626e6c8361d6b829310732241652a178a"
-dependencies = [
- "serde",
-]
+checksum = "0e050f626429857a27ddccb31e0aca21356bfa709c04041aefddac081a8f068a"
[[package]]
name = "beef"
@@ -669,6 +780,15 @@ version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
+[[package]]
+name = "bytestring"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "113b4343b5f6617e7ad401ced8de3cc8b012e73a594347c307b90db3e9271289"
+dependencies = [
+ "bytes",
+]
+
[[package]]
name = "cassowary"
version = "0.3.0"
@@ -699,6 +819,8 @@ version = "1.2.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "deec109607ca693028562ed836a5f1c4b8bd77755c4e132fc5ce11b0b6211ae7"
dependencies = [
+ "jobserver",
+ "libc",
"shlex",
]
@@ -726,6 +848,17 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
+[[package]]
+name = "chardetng"
+version = "0.1.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14b8f0b65b7b08ae3c8187e8d77174de20cb6777864c6b832d8ad365999cf1ea"
+dependencies = [
+ "cfg-if",
+ "encoding_rs",
+ "memchr",
+]
+
[[package]]
name = "chrono"
version = "0.4.42"
@@ -758,9 +891,9 @@ dependencies = [
[[package]]
name = "clap"
-version = "4.5.47"
+version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7eac00902d9d136acd712710d71823fb8ac8004ca445a89e73a41d45aa712931"
+checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
dependencies = [
"clap_builder",
"clap_derive",
@@ -768,9 +901,9 @@ dependencies = [
[[package]]
name = "clap_builder"
-version = "4.5.47"
+version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2ad9bbf750e73b5884fb8a211a9424a1906c1e156724260fdae972f31d70e1d6"
+checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
dependencies = [
"anstream",
"anstyle",
@@ -781,18 +914,18 @@ dependencies = [
[[package]]
name = "clap_complete"
-version = "4.5.57"
+version = "4.5.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4d9501bd3f5f09f7bbee01da9a511073ed30a80cd7a509f1214bb74eadea71ad"
+checksum = "4c0da80818b2d95eca9aa614a30783e42f62bf5fdfee24e68cfb960b071ba8d1"
dependencies = [
"clap",
]
[[package]]
name = "clap_derive"
-version = "4.5.47"
+version = "4.5.49"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c"
+checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671"
dependencies = [
"heck",
"proc-macro2",
@@ -830,13 +963,38 @@ dependencies = [
"tracing",
]
+[[package]]
+name = "codex-api"
+version = "0.0.0"
+dependencies = [
+ "anyhow",
+ "assert_matches",
+ "async-trait",
+ "bytes",
+ "codex-client",
+ "codex-protocol",
+ "eventsource-stream",
+ "futures",
+ "http 1.3.1",
+ "pretty_assertions",
+ "regex-lite",
+ "reqwest",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.17",
+ "tokio",
+ "tokio-test",
+ "tokio-util",
+ "tracing",
+ "wiremock",
+]
+
[[package]]
name = "codex-app-server"
version = "0.0.0"
dependencies = [
"anyhow",
"app_test_support",
- "assert_cmd",
"base64",
"chrono",
"codex-app-server-protocol",
@@ -848,18 +1006,20 @@ dependencies = [
"codex-file-search",
"codex-login",
"codex-protocol",
+ "codex-rmcp-client",
+ "codex-utils-absolute-path",
"codex-utils-json-to-toml",
"core_test_support",
"mcp-types",
- "opentelemetry-appender-tracing",
"os_info",
"pretty_assertions",
"serde",
"serde_json",
"serial_test",
+ "shlex",
"tempfile",
"tokio",
- "toml",
+ "toml 0.9.5",
"tracing",
"tracing-subscriber",
"uuid",
@@ -873,12 +1033,14 @@ dependencies = [
"anyhow",
"clap",
"codex-protocol",
+ "codex-utils-absolute-path",
"mcp-types",
"pretty_assertions",
"schemars 0.8.22",
"serde",
"serde_json",
"strum_macros 0.27.2",
+ "thiserror 2.0.17",
"ts-rs",
"uuid",
]
@@ -903,6 +1065,7 @@ dependencies = [
"anyhow",
"assert_cmd",
"assert_matches",
+ "codex-utils-cargo-bin",
"pretty_assertions",
"similar",
"tempfile",
@@ -966,6 +1129,7 @@ dependencies = [
"codex-common",
"codex-core",
"codex-git",
+ "codex-utils-cargo-bin",
"serde",
"serde_json",
"tempfile",
@@ -989,29 +1153,54 @@ dependencies = [
"codex-common",
"codex-core",
"codex-exec",
+ "codex-execpolicy",
"codex-login",
"codex-mcp-server",
- "codex-process-hardening",
"codex-protocol",
"codex-responses-api-proxy",
"codex-rmcp-client",
"codex-stdio-to-uds",
"codex-tui",
+ "codex-tui2",
+ "codex-utils-absolute-path",
+ "codex-utils-cargo-bin",
"codex-windows-sandbox",
- "ctor 0.5.0",
"libc",
"owo-colors",
"predicates",
"pretty_assertions",
"regex-lite",
"serde_json",
- "supports-color",
+ "supports-color 3.0.2",
"tempfile",
"tokio",
- "toml",
+ "toml 0.9.5",
"tracing",
]
+[[package]]
+name = "codex-client"
+version = "0.0.0"
+dependencies = [
+ "async-trait",
+ "bytes",
+ "eventsource-stream",
+ "futures",
+ "http 1.3.1",
+ "opentelemetry",
+ "opentelemetry_sdk",
+ "rand 0.9.2",
+ "reqwest",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.17",
+ "tokio",
+ "tracing",
+ "tracing-opentelemetry",
+ "tracing-subscriber",
+ "zstd",
+]
+
[[package]]
name = "codex-cloud-tasks"
version = "0.0.0"
@@ -1027,10 +1216,13 @@ dependencies = [
"codex-login",
"codex-tui",
"crossterm",
+ "owo-colors",
+ "pretty_assertions",
"ratatui",
"reqwest",
"serde",
"serde_json",
+ "supports-color 3.0.2",
"tokio",
"tokio-stream",
"tracing",
@@ -1058,14 +1250,14 @@ name = "codex-common"
version = "0.0.0"
dependencies = [
"clap",
- "codex-app-server-protocol",
"codex-core",
"codex-lmstudio",
"codex-ollama",
"codex-protocol",
- "once_cell",
+ "codex-utils-absolute-path",
+ "pretty_assertions",
"serde",
- "toml",
+ "toml 0.9.5",
]
[[package]]
@@ -1073,81 +1265,103 @@ name = "codex-core"
version = "0.0.0"
dependencies = [
"anyhow",
- "askama",
+ "arc-swap",
"assert_cmd",
"assert_matches",
"async-channel",
"async-trait",
"base64",
- "bytes",
+ "chardetng",
"chrono",
+ "codex-api",
"codex-app-server-protocol",
"codex-apply-patch",
"codex-arg0",
"codex-async-utils",
+ "codex-client",
+ "codex-core",
+ "codex-execpolicy",
"codex-file-search",
"codex-git",
"codex-keyring-store",
"codex-otel",
"codex-protocol",
"codex-rmcp-client",
+ "codex-utils-absolute-path",
+ "codex-utils-cargo-bin",
"codex-utils-pty",
"codex-utils-readiness",
"codex-utils-string",
- "codex-utils-tokenizer",
"codex-windows-sandbox",
"core-foundation 0.9.4",
"core_test_support",
"ctor 0.5.0",
"dirs",
"dunce",
+ "encoding_rs",
"env-flags",
- "escargot",
"eventsource-stream",
"futures",
- "http",
+ "http 1.3.1",
"image",
+ "include_dir",
"indexmap 2.12.0",
"keyring",
"landlock",
"libc",
"maplit",
"mcp-types",
+ "once_cell",
"openssl-sys",
"os_info",
"predicates",
"pretty_assertions",
"rand 0.9.2",
+ "regex",
"regex-lite",
"reqwest",
"seccompiler",
"serde",
"serde_json",
+ "serde_yaml",
"serial_test",
"sha1",
"sha2",
"shlex",
"similar",
- "strum_macros 0.27.2",
"tempfile",
"test-case",
"test-log",
"thiserror 2.0.17",
"time",
"tokio",
- "tokio-test",
"tokio-util",
- "toml",
- "toml_edit",
+ "toml 0.9.5",
+ "toml_edit 0.24.0+spec-1.1.0",
"tracing",
+ "tracing-subscriber",
"tracing-test",
"tree-sitter",
"tree-sitter-bash",
+ "url",
"uuid",
"walkdir",
"which",
"wildmatch",
"wiremock",
+ "zstd",
+]
+
+[[package]]
+name = "codex-debug-client"
+version = "0.0.0"
+dependencies = [
+ "anyhow",
+ "clap",
+ "codex-app-server-protocol",
+ "pretty_assertions",
+ "serde",
+ "serde_json",
]
[[package]]
@@ -1161,17 +1375,18 @@ dependencies = [
"codex-common",
"codex-core",
"codex-protocol",
+ "codex-utils-absolute-path",
+ "codex-utils-cargo-bin",
"core_test_support",
"libc",
"mcp-types",
- "opentelemetry-appender-tracing",
"owo-colors",
"predicates",
"pretty_assertions",
"serde",
"serde_json",
"shlex",
- "supports-color",
+ "supports-color 3.0.2",
"tempfile",
"tokio",
"tracing",
@@ -1183,27 +1398,34 @@ dependencies = [
]
[[package]]
-name = "codex-execpolicy"
+name = "codex-exec-server"
version = "0.0.0"
dependencies = [
- "allocative",
"anyhow",
+ "async-trait",
"clap",
- "derive_more 2.0.1",
- "env_logger",
- "log",
- "multimap",
+ "codex-core",
+ "codex-execpolicy",
+ "codex-utils-cargo-bin",
+ "exec_server_test_support",
+ "libc",
+ "maplit",
"path-absolutize",
- "regex-lite",
+ "pretty_assertions",
+ "rmcp",
"serde",
"serde_json",
- "serde_with",
- "starlark",
+ "shlex",
+ "socket2 0.6.1",
"tempfile",
+ "tokio",
+ "tokio-util",
+ "tracing",
+ "tracing-subscriber",
]
[[package]]
-name = "codex-execpolicy2"
+name = "codex-execpolicy"
version = "0.0.0"
dependencies = [
"anyhow",
@@ -1214,9 +1436,30 @@ dependencies = [
"serde_json",
"shlex",
"starlark",
+ "tempfile",
"thiserror 2.0.17",
]
+[[package]]
+name = "codex-execpolicy-legacy"
+version = "0.0.0"
+dependencies = [
+ "allocative",
+ "anyhow",
+ "clap",
+ "derive_more 2.1.1",
+ "env_logger",
+ "log",
+ "multimap",
+ "path-absolutize",
+ "regex-lite",
+ "serde",
+ "serde_json",
+ "serde_with",
+ "starlark",
+ "tempfile",
+]
+
[[package]]
name = "codex-feedback"
version = "0.0.0"
@@ -1225,6 +1468,7 @@ dependencies = [
"codex-protocol",
"pretty_assertions",
"sentry",
+ "tracing",
"tracing-subscriber",
]
@@ -1236,6 +1480,7 @@ dependencies = [
"clap",
"ignore",
"nucleo-matcher",
+ "pretty_assertions",
"serde",
"serde_json",
"tokio",
@@ -1271,6 +1516,7 @@ version = "0.0.0"
dependencies = [
"clap",
"codex-core",
+ "codex-utils-absolute-path",
"landlock",
"libc",
"seccompiler",
@@ -1320,7 +1566,6 @@ name = "codex-mcp-server"
version = "0.0.0"
dependencies = [
"anyhow",
- "assert_cmd",
"codex-arg0",
"codex-common",
"codex-core",
@@ -1363,20 +1608,27 @@ name = "codex-otel"
version = "0.0.0"
dependencies = [
"chrono",
+ "codex-api",
"codex-app-server-protocol",
"codex-protocol",
+ "codex-utils-absolute-path",
"eventsource-stream",
+ "http 1.3.1",
"opentelemetry",
+ "opentelemetry-appender-tracing",
"opentelemetry-otlp",
"opentelemetry-semantic-conventions",
"opentelemetry_sdk",
+ "pretty_assertions",
"reqwest",
"serde",
"serde_json",
"strum_macros 0.27.2",
+ "thiserror 2.0.17",
"tokio",
- "tonic",
"tracing",
+ "tracing-opentelemetry",
+ "tracing-subscriber",
]
[[package]]
@@ -1384,6 +1636,7 @@ name = "codex-process-hardening"
version = "0.0.0"
dependencies = [
"libc",
+ "pretty_assertions",
]
[[package]]
@@ -1391,14 +1644,15 @@ name = "codex-protocol"
version = "0.0.0"
dependencies = [
"anyhow",
- "base64",
"codex-git",
+ "codex-utils-absolute-path",
"codex-utils-image",
"icu_decimal",
"icu_locale_core",
"icu_provider",
"mcp-types",
"mime_guess",
+ "pretty_assertions",
"schemars 0.8.22",
"serde",
"serde_json",
@@ -1436,8 +1690,8 @@ dependencies = [
"axum",
"codex-keyring-store",
"codex-protocol",
+ "codex-utils-cargo-bin",
"dirs",
- "escargot",
"futures",
"keyring",
"mcp-types",
@@ -1464,6 +1718,7 @@ version = "0.0.0"
dependencies = [
"anyhow",
"assert_cmd",
+ "codex-utils-cargo-bin",
"pretty_assertions",
"tempfile",
"uds_windows",
@@ -1472,6 +1727,77 @@ dependencies = [
[[package]]
name = "codex-tui"
version = "0.0.0"
+dependencies = [
+ "anyhow",
+ "arboard",
+ "assert_matches",
+ "base64",
+ "chrono",
+ "clap",
+ "codex-ansi-escape",
+ "codex-app-server-protocol",
+ "codex-arg0",
+ "codex-backend-client",
+ "codex-common",
+ "codex-core",
+ "codex-feedback",
+ "codex-file-search",
+ "codex-login",
+ "codex-protocol",
+ "codex-utils-absolute-path",
+ "codex-windows-sandbox",
+ "color-eyre",
+ "crossterm",
+ "derive_more 2.1.1",
+ "diffy",
+ "dirs",
+ "dunce",
+ "image",
+ "insta",
+ "itertools 0.14.0",
+ "lazy_static",
+ "libc",
+ "mcp-types",
+ "pathdiff",
+ "pretty_assertions",
+ "pulldown-cmark",
+ "rand 0.9.2",
+ "ratatui",
+ "ratatui-macros",
+ "regex-lite",
+ "reqwest",
+ "serde",
+ "serde_json",
+ "serial_test",
+ "shlex",
+ "strum 0.27.2",
+ "strum_macros 0.27.2",
+ "supports-color 3.0.2",
+ "tempfile",
+ "textwrap 0.16.2",
+ "thiserror 2.0.17",
+ "tokio",
+ "tokio-stream",
+ "tokio-util",
+ "toml 0.9.5",
+ "tracing",
+ "tracing-appender",
+ "tracing-subscriber",
+ "tree-sitter-bash",
+ "tree-sitter-highlight",
+ "unicode-segmentation",
+ "unicode-width 0.2.1",
+ "url",
+ "uuid",
+ "vt100",
+ "which",
+ "windows-sys 0.52.0",
+ "winsplit",
+]
+
+[[package]]
+name = "codex-tui2"
+version = "0.0.0"
dependencies = [
"anyhow",
"arboard",
@@ -1490,10 +1816,12 @@ dependencies = [
"codex-file-search",
"codex-login",
"codex-protocol",
+ "codex-tui",
+ "codex-utils-absolute-path",
"codex-windows-sandbox",
"color-eyre",
"crossterm",
- "derive_more 2.0.1",
+ "derive_more 2.1.1",
"diffy",
"dirs",
"dunce",
@@ -1503,12 +1831,12 @@ dependencies = [
"lazy_static",
"libc",
"mcp-types",
- "opentelemetry-appender-tracing",
"pathdiff",
"pretty_assertions",
"pulldown-cmark",
"rand 0.9.2",
"ratatui",
+ "ratatui-core",
"ratatui-macros",
"regex-lite",
"reqwest",
@@ -1518,32 +1846,56 @@ dependencies = [
"shlex",
"strum 0.27.2",
"strum_macros 0.27.2",
- "supports-color",
+ "supports-color 3.0.2",
"tempfile",
"textwrap 0.16.2",
"tokio",
"tokio-stream",
- "toml",
+ "tokio-util",
+ "toml 0.9.5",
"tracing",
"tracing-appender",
"tracing-subscriber",
"tree-sitter-bash",
"tree-sitter-highlight",
+ "tui-scrollbar",
"unicode-segmentation",
"unicode-width 0.2.1",
"url",
+ "uuid",
"vt100",
]
+[[package]]
+name = "codex-utils-absolute-path"
+version = "0.0.0"
+dependencies = [
+ "path-absolutize",
+ "schemars 0.8.22",
+ "serde",
+ "serde_json",
+ "tempfile",
+ "ts-rs",
+]
+
[[package]]
name = "codex-utils-cache"
version = "0.0.0"
dependencies = [
- "lru",
+ "lru 0.16.3",
"sha1",
"tokio",
]
+[[package]]
+name = "codex-utils-cargo-bin"
+version = "0.0.0"
+dependencies = [
+ "assert_cmd",
+ "path-absolutize",
+ "thiserror 2.0.17",
+]
+
[[package]]
name = "codex-utils-image"
version = "0.0.0"
@@ -1562,7 +1914,7 @@ version = "0.0.0"
dependencies = [
"pretty_assertions",
"serde_json",
- "toml",
+ "toml 0.9.5",
]
[[package]]
@@ -1570,8 +1922,13 @@ name = "codex-utils-pty"
version = "0.0.0"
dependencies = [
"anyhow",
+ "filedescriptor",
+ "lazy_static",
+ "log",
"portable-pty",
+ "shared_library",
"tokio",
+ "winapi",
]
[[package]]
@@ -1589,29 +1946,25 @@ dependencies = [
name = "codex-utils-string"
version = "0.0.0"
-[[package]]
-name = "codex-utils-tokenizer"
-version = "0.0.0"
-dependencies = [
- "anyhow",
- "codex-utils-cache",
- "pretty_assertions",
- "thiserror 2.0.17",
- "tiktoken-rs",
- "tokio",
-]
-
[[package]]
name = "codex-windows-sandbox"
-version = "0.1.0"
+version = "0.0.0"
dependencies = [
"anyhow",
+ "base64",
+ "chrono",
+ "codex-protocol",
+ "codex-utils-absolute-path",
"dirs-next",
"dunce",
+ "pretty_assertions",
"rand 0.8.5",
"serde",
"serde_json",
+ "tempfile",
+ "windows 0.58.0",
"windows-sys 0.52.0",
+ "winres",
]
[[package]]
@@ -1671,6 +2024,20 @@ dependencies = [
"static_assertions",
]
+[[package]]
+name = "compact_str"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3fdb1325a1cece981e8a296ab8f0f9b63ae357bd0784a9faaf548cc7b480707a"
+dependencies = [
+ "castaway",
+ "cfg-if",
+ "itoa",
+ "rustversion",
+ "ryu",
+ "static_assertions",
+]
+
[[package]]
name = "concurrent-queue"
version = "2.5.0"
@@ -1692,6 +2059,18 @@ dependencies = [
"windows-sys 0.59.0",
]
+[[package]]
+name = "const-hex"
+version = "1.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3bb320cac8a0750d7f25280aa97b09c26edfe161164238ecbbb31092b079e735"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "proptest",
+ "serde_core",
+]
+
[[package]]
name = "convert_case"
version = "0.6.0"
@@ -1703,9 +2082,9 @@ dependencies = [
[[package]]
name = "convert_case"
-version = "0.7.1"
+version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7"
+checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9"
dependencies = [
"unicode-segmentation",
]
@@ -1742,11 +2121,17 @@ version = "0.0.0"
dependencies = [
"anyhow",
"assert_cmd",
+ "base64",
"codex-core",
"codex-protocol",
+ "codex-utils-absolute-path",
+ "codex-utils-cargo-bin",
"notify",
+ "pretty_assertions",
"regex-lite",
+ "reqwest",
"serde_json",
+ "shlex",
"tempfile",
"tokio",
"walkdir",
@@ -1892,6 +2277,16 @@ dependencies = [
"darling_macro 0.21.3",
]
+[[package]]
+name = "darling"
+version = "0.23.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d"
+dependencies = [
+ "darling_core 0.23.0",
+ "darling_macro 0.23.0",
+]
+
[[package]]
name = "darling_core"
version = "0.20.11"
@@ -1920,6 +2315,19 @@ dependencies = [
"syn 2.0.104",
]
+[[package]]
+name = "darling_core"
+version = "0.23.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0"
+dependencies = [
+ "ident_case",
+ "proc-macro2",
+ "quote",
+ "strsim 0.11.1",
+ "syn 2.0.104",
+]
+
[[package]]
name = "darling_macro"
version = "0.20.11"
@@ -1942,6 +2350,17 @@ dependencies = [
"syn 2.0.104",
]
+[[package]]
+name = "darling_macro"
+version = "0.23.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d"
+dependencies = [
+ "darling_core 0.23.0",
+ "quote",
+ "syn 2.0.104",
+]
+
[[package]]
name = "dbus"
version = "0.9.9"
@@ -2010,6 +2429,16 @@ dependencies = [
"serde_json",
]
+[[package]]
+name = "der"
+version = "0.7.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
+dependencies = [
+ "pem-rfc7468",
+ "zeroize",
+]
+
[[package]]
name = "deranged"
version = "0.5.4"
@@ -2042,11 +2471,11 @@ dependencies = [
[[package]]
name = "derive_more"
-version = "2.0.1"
+version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
+checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134"
dependencies = [
- "derive_more-impl 2.0.1",
+ "derive_more-impl 2.1.1",
]
[[package]]
@@ -2064,13 +2493,14 @@ dependencies = [
[[package]]
name = "derive_more-impl"
-version = "2.0.1"
+version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
+checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb"
dependencies = [
- "convert_case 0.7.1",
+ "convert_case 0.10.0",
"proc-macro2",
"quote",
+ "rustc_version",
"syn 2.0.104",
"unicode-xid",
]
@@ -2186,6 +2616,15 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
+[[package]]
+name = "document-features"
+version = "0.2.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4b8a88685455ed29a21542a33abd9cb6510b6b129abadabdcef0f4c55bc8f61"
+dependencies = [
+ "litrs",
+]
+
[[package]]
name = "dotenvy"
version = "0.15.7"
@@ -2368,17 +2807,6 @@ version = "3.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59"
-[[package]]
-name = "escargot"
-version = "0.5.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "11c3aea32bc97b500c9ca6a72b768a26e558264303d101d3409cf6d57a9ed0cf"
-dependencies = [
- "log",
- "serde",
- "serde_json",
-]
-
[[package]]
name = "event-listener"
version = "5.4.0"
@@ -2411,6 +2839,18 @@ dependencies = [
"pin-project-lite",
]
+[[package]]
+name = "exec_server_test_support"
+version = "0.0.0"
+dependencies = [
+ "anyhow",
+ "codex-core",
+ "codex-utils-cargo-bin",
+ "rmcp",
+ "serde_json",
+ "tokio",
+]
+
[[package]]
name = "eyre"
version = "0.6.12"
@@ -2421,17 +2861,6 @@ dependencies = [
"once_cell",
]
-[[package]]
-name = "fancy-regex"
-version = "0.13.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2"
-dependencies = [
- "bit-set",
- "regex-automata",
- "regex-syntax 0.8.5",
-]
-
[[package]]
name = "fastrand"
version = "2.3.0"
@@ -2549,6 +2978,12 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
+[[package]]
+name = "foldhash"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb"
+
[[package]]
name = "foreign-types"
version = "0.3.2"
@@ -2779,7 +3214,7 @@ dependencies = [
"fnv",
"futures-core",
"futures-sink",
- "http",
+ "http 1.3.1",
"indexmap 2.12.0",
"slab",
"tokio",
@@ -2821,7 +3256,7 @@ checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5"
dependencies = [
"allocator-api2",
"equivalent",
- "foldhash",
+ "foldhash 0.1.5",
]
[[package]]
@@ -2829,6 +3264,11 @@ name = "hashbrown"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
+dependencies = [
+ "allocator-api2",
+ "equivalent",
+ "foldhash 0.2.0",
+]
[[package]]
name = "heck"
@@ -2886,6 +3326,17 @@ dependencies = [
"windows-link 0.1.3",
]
+[[package]]
+name = "http"
+version = "0.2.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1"
+dependencies = [
+ "bytes",
+ "fnv",
+ "itoa",
+]
+
[[package]]
name = "http"
version = "1.3.1"
@@ -2904,7 +3355,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
dependencies = [
"bytes",
- "http",
+ "http 1.3.1",
]
[[package]]
@@ -2915,7 +3366,7 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
dependencies = [
"bytes",
"futures-core",
- "http",
+ "http 1.3.1",
"http-body",
"pin-project-lite",
]
@@ -2943,7 +3394,7 @@ dependencies = [
"futures-channel",
"futures-core",
"h2",
- "http",
+ "http 1.3.1",
"http-body",
"httparse",
"httpdate",
@@ -2961,7 +3412,7 @@ version = "0.27.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58"
dependencies = [
- "http",
+ "http 1.3.1",
"hyper",
"hyper-util",
"rustls",
@@ -3013,14 +3464,14 @@ dependencies = [
"futures-channel",
"futures-core",
"futures-util",
- "http",
+ "http 1.3.1",
"http-body",
"hyper",
"ipnet",
"libc",
"percent-encoding",
"pin-project-lite",
- "socket2 0.6.0",
+ "socket2 0.6.1",
"system-configuration",
"tokio",
"tower-service",
@@ -3040,7 +3491,7 @@ dependencies = [
"js-sys",
"log",
"wasm-bindgen",
- "windows-core",
+ "windows-core 0.61.2",
]
[[package]]
@@ -3223,9 +3674,9 @@ dependencies = [
[[package]]
name = "image"
-version = "0.25.8"
+version = "0.25.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "529feb3e6769d234375c4cf1ee2ce713682b8e76538cb13f9fc23e1400a591e7"
+checksum = "e6506c6c10786659413faa717ceebcb8f70731c0a60cbae39795fdf114519c1a"
dependencies = [
"bytemuck",
"byteorder-lite",
@@ -3233,8 +3684,33 @@ dependencies = [
"num-traits",
"png",
"tiff",
- "zune-core",
- "zune-jpeg",
+ "zune-core 0.5.0",
+ "zune-jpeg 0.5.5",
+]
+
+[[package]]
+name = "impl-more"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2"
+
+[[package]]
+name = "include_dir"
+version = "0.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "923d117408f1e49d914f1a379a309cffe4f18c05cf4e3d12e613a15fc81bd0dd"
+dependencies = [
+ "include_dir_macros",
+]
+
+[[package]]
+name = "include_dir_macros"
+version = "0.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7cab85a7ed0bd5f0e76d93846e0147172bed2e2d3f859bcc33a8d9699cad1a75"
+dependencies = [
+ "proc-macro2",
+ "quote",
]
[[package]]
@@ -3304,13 +3780,14 @@ dependencies = [
[[package]]
name = "insta"
-version = "1.43.2"
+version = "1.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
+checksum = "1b66886d14d18d420ab5052cbff544fc5d34d0b2cdd35eb5976aaa10a4a472e5"
dependencies = [
"console",
"once_cell",
"similar",
+ "tempfile",
]
[[package]]
@@ -3335,17 +3812,6 @@ dependencies = [
"rustversion",
]
-[[package]]
-name = "io-uring"
-version = "0.7.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4"
-dependencies = [
- "bitflags 2.10.0",
- "cfg-if",
- "libc",
-]
-
[[package]]
name = "ipnet"
version = "2.11.0"
@@ -3464,6 +3930,16 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130"
+[[package]]
+name = "jobserver"
+version = "0.1.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
+dependencies = [
+ "getrandom 0.3.3",
+ "libc",
+]
+
[[package]]
name = "js-sys"
version = "0.3.77"
@@ -3474,6 +3950,16 @@ dependencies = [
"wasm-bindgen",
]
+[[package]]
+name = "kasuari"
+version = "0.4.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8fe90c1150662e858c7d5f945089b7517b0a80d8bf7ba4b1b5ffc984e7230a5b"
+dependencies = [
+ "hashbrown 0.16.0",
+ "thiserror 2.0.17",
+]
+
[[package]]
name = "keyring"
version = "3.6.3"
@@ -3545,15 +4031,21 @@ dependencies = [
[[package]]
name = "landlock"
-version = "0.4.2"
+version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b3d2ef408b88e913bfc6594f5e693d57676f6463ded7d8bf994175364320c706"
+checksum = "49fefd6652c57d68aaa32544a4c0e642929725bdc1fd929367cdeb673ab81088"
dependencies = [
"enumflags2",
"libc",
"thiserror 2.0.17",
]
+[[package]]
+name = "language-tags"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388"
+
[[package]]
name = "lazy_static"
version = "1.5.0"
@@ -3562,9 +4054,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
-version = "0.2.175"
+version = "0.2.177"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543"
+checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
[[package]]
name = "libdbus-sys"
@@ -3613,6 +4105,18 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
+[[package]]
+name = "litrs"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092"
+
+[[package]]
+name = "local-waker"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487"
+
[[package]]
name = "lock_api"
version = "0.4.13"
@@ -3661,6 +4165,15 @@ dependencies = [
"hashbrown 0.15.4",
]
+[[package]]
+name = "lru"
+version = "0.16.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1dc47f592c06f33f8e3aea9591776ec7c9f9e4124778ff8a3c3b87159f7e593"
+dependencies = [
+ "hashbrown 0.16.0",
+]
+
[[package]]
name = "lru-slab"
version = "0.1.2"
@@ -3716,14 +4229,16 @@ name = "mcp_test_support"
version = "0.0.0"
dependencies = [
"anyhow",
- "assert_cmd",
"codex-core",
"codex-mcp-server",
+ "codex-utils-cargo-bin",
+ "core_test_support",
"mcp-types",
"os_info",
"pretty_assertions",
"serde",
"serde_json",
+ "shlex",
"tokio",
"wiremock",
]
@@ -4056,7 +4571,7 @@ dependencies = [
"base64",
"chrono",
"getrandom 0.2.16",
- "http",
+ "http 1.3.1",
"rand 0.8.5",
"reqwest",
"serde",
@@ -4204,9 +4719,9 @@ dependencies = [
[[package]]
name = "openssl-sys"
-version = "0.9.109"
+version = "0.9.111"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571"
+checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321"
dependencies = [
"cc",
"libc",
@@ -4217,9 +4732,9 @@ dependencies = [
[[package]]
name = "opentelemetry"
-version = "0.30.0"
+version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aaf416e4cb72756655126f7dd7bb0af49c674f4c1b9903e80c009e0c37e552e6"
+checksum = "b84bcd6ae87133e903af7ef497404dda70c60d0ea14895fc8a5e6722754fc2a0"
dependencies = [
"futures-core",
"futures-sink",
@@ -4231,9 +4746,9 @@ dependencies = [
[[package]]
name = "opentelemetry-appender-tracing"
-version = "0.30.1"
+version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e68f63eca5fad47e570e00e893094fc17be959c80c79a7d6ec1abdd5ae6ffc16"
+checksum = "ef6a1ac5ca3accf562b8c306fa8483c85f4390f768185ab775f242f7fe8fdcc2"
dependencies = [
"opentelemetry",
"tracing",
@@ -4243,24 +4758,24 @@ dependencies = [
[[package]]
name = "opentelemetry-http"
-version = "0.30.0"
+version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50f6639e842a97dbea8886e3439710ae463120091e2e064518ba8e716e6ac36d"
+checksum = "d7a6d09a73194e6b66df7c8f1b680f156d916a1a942abf2de06823dd02b7855d"
dependencies = [
"async-trait",
"bytes",
- "http",
+ "http 1.3.1",
"opentelemetry",
"reqwest",
]
[[package]]
name = "opentelemetry-otlp"
-version = "0.30.0"
+version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dbee664a43e07615731afc539ca60c6d9f1a9425e25ca09c57bc36c87c55852b"
+checksum = "7a2366db2dca4d2ad033cad11e6ee42844fd727007af5ad04a1730f4cb8163bf"
dependencies = [
- "http",
+ "http 1.3.1",
"opentelemetry",
"opentelemetry-http",
"opentelemetry-proto",
@@ -4276,30 +4791,32 @@ dependencies = [
[[package]]
name = "opentelemetry-proto"
-version = "0.30.0"
+version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2e046fd7660710fe5a05e8748e70d9058dc15c94ba914e7c4faa7c728f0e8ddc"
+checksum = "a7175df06de5eaee9909d4805a3d07e28bb752c34cab57fa9cff549da596b30f"
dependencies = [
"base64",
- "hex",
+ "const-hex",
"opentelemetry",
"opentelemetry_sdk",
"prost",
"serde",
+ "serde_json",
"tonic",
+ "tonic-prost",
]
[[package]]
name = "opentelemetry-semantic-conventions"
-version = "0.30.0"
+version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "83d059a296a47436748557a353c5e6c5705b9470ef6c95cfc52c21a8814ddac2"
+checksum = "e62e29dfe041afb8ed2a6c9737ab57db4907285d999ef8ad3a59092a36bdc846"
[[package]]
name = "opentelemetry_sdk"
-version = "0.30.0"
+version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "11f644aa9e5e31d11896e024305d7e3c98a88884d9f8919dbf37a9991bc47a4b"
+checksum = "e14ae4f5991976fd48df6d843de219ca6d31b01daaab2dad5af2badeded372bd"
dependencies = [
"futures-channel",
"futures-executor",
@@ -4307,7 +4824,6 @@ dependencies = [
"opentelemetry",
"percent-encoding",
"rand 0.9.2",
- "serde_json",
"thiserror 2.0.17",
"tokio",
"tokio-stream",
@@ -4356,6 +4872,10 @@ name = "owo-colors"
version = "4.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48dd4f4a2c8405440fd0462561f0e5806bd0f77e86f51c761481bdd4018b545e"
+dependencies = [
+ "supports-color 2.1.0",
+ "supports-color 3.0.2",
+]
[[package]]
name = "parking"
@@ -4392,6 +4912,12 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
+[[package]]
+name = "pastey"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57d6c094ee800037dff99e02cab0eaf3142826586742a270ab3d7a62656bd27a"
+
[[package]]
name = "path-absolutize"
version = "3.1.1"
@@ -4416,6 +4942,15 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3"
+[[package]]
+name = "pem-rfc7468"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412"
+dependencies = [
+ "base64ct",
+]
+
[[package]]
name = "percent-encoding"
version = "2.3.1"
@@ -4644,7 +5179,7 @@ version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983"
dependencies = [
- "toml_edit",
+ "toml_edit 0.23.10+spec-1.0.0",
]
[[package]]
@@ -4658,23 +5193,38 @@ dependencies = [
[[package]]
name = "process-wrap"
-version = "8.2.1"
+version = "9.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a3ef4f2f0422f23a82ec9f628ea2acd12871c81a9362b02c43c1aa86acfc3ba1"
+checksum = "5e5fd83ab7fa55fd06f5e665e3fc52b8bca451c0486b8ea60ad649cd1c10a5da"
dependencies = [
"futures",
"indexmap 2.12.0",
"nix 0.30.1",
"tokio",
"tracing",
- "windows",
+ "windows 0.61.3",
+]
+
+[[package]]
+name = "proptest"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40"
+dependencies = [
+ "bitflags 2.10.0",
+ "num-traits",
+ "rand 0.9.2",
+ "rand_chacha 0.9.0",
+ "rand_xorshift",
+ "regex-syntax 0.8.5",
+ "unarray",
]
[[package]]
name = "prost"
-version = "0.13.5"
+version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
+checksum = "7231bd9b3d3d33c86b58adbac74b5ec0ad9f496b19d22801d773636feaa95f3d"
dependencies = [
"bytes",
"prost-derive",
@@ -4682,9 +5232,9 @@ dependencies = [
[[package]]
name = "prost-derive"
-version = "0.13.5"
+version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d"
+checksum = "9120690fafc389a67ba3803df527d0ec9cbbc9cc45e4cc20b332996dfb672425"
dependencies = [
"anyhow",
"itertools 0.14.0",
@@ -4756,9 +5306,9 @@ dependencies = [
"pin-project-lite",
"quinn-proto",
"quinn-udp",
- "rustc-hash 2.1.1",
+ "rustc-hash",
"rustls",
- "socket2 0.6.0",
+ "socket2 0.6.1",
"thiserror 2.0.17",
"tokio",
"tracing",
@@ -4776,7 +5326,7 @@ dependencies = [
"lru-slab",
"rand 0.9.2",
"ring",
- "rustc-hash 2.1.1",
+ "rustc-hash",
"rustls",
"rustls-pki-types",
"slab",
@@ -4795,7 +5345,7 @@ dependencies = [
"cfg_aliases 0.2.1",
"libc",
"once_cell",
- "socket2 0.6.0",
+ "socket2 0.6.1",
"tracing",
"windows-sys 0.60.2",
]
@@ -4884,6 +5434,15 @@ dependencies = [
"getrandom 0.3.3",
]
+[[package]]
+name = "rand_xorshift"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a"
+dependencies = [
+ "rand_core 0.9.3",
+]
+
[[package]]
name = "ratatui"
version = "0.29.0"
@@ -4891,16 +5450,36 @@ source = "git+https://github.com/nornagon/ratatui?branch=nornagon-v0.29.0-patch#
dependencies = [
"bitflags 2.10.0",
"cassowary",
- "compact_str",
+ "compact_str 0.8.1",
"crossterm",
"indoc",
"instability",
"itertools 0.13.0",
- "lru",
+ "lru 0.12.5",
"paste",
"strum 0.26.3",
"unicode-segmentation",
- "unicode-truncate",
+ "unicode-truncate 1.1.0",
+ "unicode-width 0.2.1",
+]
+
+[[package]]
+name = "ratatui-core"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5ef8dea09a92caaf73bff7adb70b76162e5937524058a7e5bff37869cbbec293"
+dependencies = [
+ "bitflags 2.10.0",
+ "compact_str 0.9.0",
+ "hashbrown 0.16.0",
+ "indoc",
+ "itertools 0.14.0",
+ "kasuari",
+ "lru 0.16.3",
+ "strum 0.27.2",
+ "thiserror 2.0.17",
+ "unicode-segmentation",
+ "unicode-truncate 2.0.0",
"unicode-width 0.2.1",
]
@@ -4966,9 +5545,9 @@ dependencies = [
[[package]]
name = "regex"
-version = "1.11.1"
+version = "1.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
+checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4"
dependencies = [
"aho-corasick",
"memchr",
@@ -4978,9 +5557,9 @@ dependencies = [
[[package]]
name = "regex-automata"
-version = "0.4.9"
+version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
+checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c"
dependencies = [
"aho-corasick",
"memchr",
@@ -4989,9 +5568,9 @@ dependencies = [
[[package]]
name = "regex-lite"
-version = "0.1.7"
+version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "943f41321c63ef1c92fd763bfe054d2668f7f225a5c29f0105903dc2fc04ba30"
+checksum = "8d942b98df5e658f56f20d592c7f868833fe38115e65c33003d8cd224b0155da"
[[package]]
name = "regex-syntax"
@@ -5007,9 +5586,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "reqwest"
-version = "0.12.23"
+version = "0.12.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb"
+checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
dependencies = [
"base64",
"bytes",
@@ -5018,7 +5597,7 @@ dependencies = [
"futures-core",
"futures-util",
"h2",
- "http",
+ "http 1.3.1",
"http-body",
"http-body-util",
"hyper",
@@ -5070,19 +5649,20 @@ dependencies = [
[[package]]
name = "rmcp"
-version = "0.8.5"
+version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e5947688160b56fb6c827e3c20a72c90392a1d7e9dec74749197aa1780ac42ca"
+checksum = "528d42f8176e6e5e71ea69182b17d1d0a19a6b3b894b564678b74cd7cab13cfa"
dependencies = [
+ "async-trait",
"base64",
"bytes",
"chrono",
"futures",
- "http",
+ "http 1.3.1",
"http-body",
"http-body-util",
"oauth2",
- "paste",
+ "pastey",
"pin-project-lite",
"process-wrap",
"rand 0.9.2",
@@ -5104,11 +5684,11 @@ dependencies = [
[[package]]
name = "rmcp-macros"
-version = "0.8.5"
+version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "01263441d3f8635c628e33856c468b96ebbce1af2d3699ea712ca71432d4ee7a"
+checksum = "e3f81daaa494eb8e985c9462f7d6ce1ab05e5299f48aafd76cdd3d8b060e6f59"
dependencies = [
- "darling 0.21.3",
+ "darling 0.23.0",
"proc-macro2",
"quote",
"serde_json",
@@ -5121,12 +5701,6 @@ version = "0.1.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f"
-[[package]]
-name = "rustc-hash"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
-
[[package]]
name = "rustc-hash"
version = "2.1.1"
@@ -5174,6 +5748,7 @@ version = "0.23.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2491382039b29b9b11ff08b76ff6c97cf287671dbb74f0be44bda389fffe9bd1"
dependencies = [
+ "log",
"once_cell",
"ring",
"rustls-pki-types",
@@ -5464,13 +6039,14 @@ checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
[[package]]
name = "sentry"
-version = "0.34.0"
+version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5484316556650182f03b43d4c746ce0e3e48074a21e2f51244b648b6542e1066"
+checksum = "d9794f69ad475e76c057e326175d3088509649e3aed98473106b9fe94ba59424"
dependencies = [
"httpdate",
"native-tls",
"reqwest",
+ "sentry-actix",
"sentry-backtrace",
"sentry-contexts",
"sentry-core",
@@ -5481,23 +6057,35 @@ dependencies = [
"ureq",
]
+[[package]]
+name = "sentry-actix"
+version = "0.46.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e0fee202934063ace4f1d1d063113b8982293762628e563a2d2fba08fb20b110"
+dependencies = [
+ "actix-http",
+ "actix-web",
+ "bytes",
+ "futures-util",
+ "sentry-core",
+]
+
[[package]]
name = "sentry-backtrace"
-version = "0.34.0"
+version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "40aa225bb41e2ec9d7c90886834367f560efc1af028f1c5478a6cce6a59c463a"
+checksum = "e81137ad53b8592bd0935459ad74c0376053c40084aa170451e74eeea8dbc6c3"
dependencies = [
"backtrace",
- "once_cell",
"regex",
"sentry-core",
]
[[package]]
name = "sentry-contexts"
-version = "0.34.0"
+version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a8dd746da3d16cb8c39751619cefd4fcdbd6df9610f3310fd646b55f6e39910"
+checksum = "cfb403c66cc2651a01b9bacda2e7c22cd51f7e8f56f206aa4310147eb3259282"
dependencies = [
"hostname",
"libc",
@@ -5509,33 +6097,32 @@ dependencies = [
[[package]]
name = "sentry-core"
-version = "0.34.0"
+version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "161283cfe8e99c8f6f236a402b9ccf726b201f365988b5bb637ebca0abbd4a30"
+checksum = "cfc409727ae90765ca8ea76fe6c949d6f159a11d02e130b357fa652ee9efcada"
dependencies = [
- "once_cell",
- "rand 0.8.5",
+ "rand 0.9.2",
"sentry-types",
"serde",
"serde_json",
+ "url",
]
[[package]]
name = "sentry-debug-images"
-version = "0.34.0"
+version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8fc6b25e945fcaa5e97c43faee0267eebda9f18d4b09a251775d8fef1086238a"
+checksum = "06a2778a222fd90ebb01027c341a72f8e24b0c604c6126504a4fe34e5500e646"
dependencies = [
"findshlibs",
- "once_cell",
"sentry-core",
]
[[package]]
name = "sentry-panic"
-version = "0.34.0"
+version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bc74f229c7186dd971a9491ffcbe7883544aa064d1589bd30b83fb856cd22d63"
+checksum = "3df79f4e1e72b2a8b75a0ebf49e78709ceb9b3f0b451f13adc92a0361b0aaabe"
dependencies = [
"sentry-backtrace",
"sentry-core",
@@ -5543,10 +6130,11 @@ dependencies = [
[[package]]
name = "sentry-tracing"
-version = "0.34.0"
+version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cd3c5faf2103cd01eeda779ea439b68c4ee15adcdb16600836e97feafab362ec"
+checksum = "ff2046f527fd4b75e0b6ab3bd656c67dce42072f828dc4d03c206d15dca74a93"
dependencies = [
+ "bitflags 2.10.0",
"sentry-backtrace",
"sentry-core",
"tracing-core",
@@ -5555,16 +6143,16 @@ dependencies = [
[[package]]
name = "sentry-types"
-version = "0.34.0"
+version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5d68cdf6bc41b8ff3ae2a9c4671e97426dcdd154cc1d4b6b72813f285d6b163f"
+checksum = "c7b9b4e4c03a4d3643c18c78b8aa91d2cbee5da047d2fa0ca4bb29bc67e6c55c"
dependencies = [
"debugid",
"hex",
- "rand 0.8.5",
+ "rand 0.9.2",
"serde",
"serde_json",
- "thiserror 1.0.69",
+ "thiserror 2.0.17",
"time",
"url",
"uuid",
@@ -5670,9 +6258,9 @@ dependencies = [
[[package]]
name = "serde_with"
-version = "3.14.0"
+version = "3.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5"
+checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7"
dependencies = [
"base64",
"chrono",
@@ -5681,8 +6269,7 @@ dependencies = [
"indexmap 2.12.0",
"schemars 0.9.0",
"schemars 1.0.4",
- "serde",
- "serde_derive",
+ "serde_core",
"serde_json",
"serde_with_macros",
"time",
@@ -5690,16 +6277,29 @@ dependencies = [
[[package]]
name = "serde_with_macros"
-version = "3.14.0"
+version = "3.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f"
+checksum = "52a8e3ca0ca629121f70ab50f95249e5a6f925cc0f6ffe8256c45b728875706c"
dependencies = [
- "darling 0.20.11",
+ "darling 0.21.3",
"proc-macro2",
"quote",
"syn 2.0.104",
]
+[[package]]
+name = "serde_yaml"
+version = "0.9.34+deprecated"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
+dependencies = [
+ "indexmap 2.12.0",
+ "itoa",
+ "ryu",
+ "serde",
+ "unsafe-libyaml",
+]
+
[[package]]
name = "serial2"
version = "0.2.31"
@@ -5879,12 +6479,12 @@ dependencies = [
[[package]]
name = "socket2"
-version = "0.6.0"
+version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807"
+checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881"
dependencies = [
"libc",
- "windows-sys 0.59.0",
+ "windows-sys 0.60.2",
]
[[package]]
@@ -6046,6 +6646,9 @@ name = "strum"
version = "0.27.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
+dependencies = [
+ "strum_macros 0.27.2",
+]
[[package]]
name = "strum_macros"
@@ -6078,6 +6681,16 @@ version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
+[[package]]
+name = "supports-color"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d6398cde53adc3c4557306a96ce67b302968513830a77a95b2b17305d9719a89"
+dependencies = [
+ "is-terminal",
+ "is_ci",
+]
+
[[package]]
name = "supports-color"
version = "3.0.2"
@@ -6243,9 +6856,9 @@ dependencies = [
[[package]]
name = "test-log"
-version = "0.2.18"
+version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e33b98a582ea0be1168eba097538ee8dd4bbe0f2b01b22ac92ea30054e5be7b"
+checksum = "37d53ac171c92a39e4769491c4b4dde7022c60042254b5fc044ae409d34a24d4"
dependencies = [
"env_logger",
"test-log-macros",
@@ -6254,9 +6867,9 @@ dependencies = [
[[package]]
name = "test-log-macros"
-version = "0.2.18"
+version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "451b374529930d7601b1eef8d32bc79ae870b6079b069401709c2a8bf9e75f36"
+checksum = "be35209fd0781c5401458ab66e4f98accf63553e8fae7425503e92fdd319783b"
dependencies = [
"proc-macro2",
"quote",
@@ -6343,22 +6956,7 @@ dependencies = [
"half",
"quick-error",
"weezl",
- "zune-jpeg",
-]
-
-[[package]]
-name = "tiktoken-rs"
-version = "0.9.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3a19830747d9034cd9da43a60eaa8e552dfda7712424aebf187b7a60126bae0d"
-dependencies = [
- "anyhow",
- "base64",
- "bstr",
- "fancy-regex",
- "lazy_static",
- "regex",
- "rustc-hash 1.1.0",
+ "zune-jpeg 0.4.19",
]
[[package]]
@@ -6442,29 +7040,26 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tokio"
-version = "1.47.1"
+version = "1.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038"
+checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408"
dependencies = [
- "backtrace",
"bytes",
- "io-uring",
"libc",
"mio",
"parking_lot",
"pin-project-lite",
"signal-hook-registry",
- "slab",
- "socket2 0.6.0",
+ "socket2 0.6.1",
"tokio-macros",
- "windows-sys 0.59.0",
+ "windows-sys 0.61.1",
]
[[package]]
name = "tokio-macros"
-version = "2.5.0"
+version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
+checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5"
dependencies = [
"proc-macro2",
"quote",
@@ -6493,13 +7088,14 @@ dependencies = [
[[package]]
name = "tokio-stream"
-version = "0.1.17"
+version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047"
+checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70"
dependencies = [
"futures-core",
"pin-project-lite",
"tokio",
+ "tokio-util",
]
[[package]]
@@ -6526,9 +7122,19 @@ dependencies = [
"futures-sink",
"futures-util",
"pin-project-lite",
+ "slab",
"tokio",
]
+[[package]]
+name = "toml"
+version = "0.5.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234"
+dependencies = [
+ "serde",
+]
+
[[package]]
name = "toml"
version = "0.9.5"
@@ -6546,18 +7152,30 @@ dependencies = [
[[package]]
name = "toml_datetime"
-version = "0.7.0"
+version = "0.7.5+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3"
+checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347"
dependencies = [
- "serde",
+ "serde_core",
+]
+
+[[package]]
+name = "toml_edit"
+version = "0.23.10+spec-1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269"
+dependencies = [
+ "indexmap 2.12.0",
+ "toml_datetime",
+ "toml_parser",
+ "winnow",
]
[[package]]
name = "toml_edit"
-version = "0.23.4"
+version = "0.24.0+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7211ff1b8f0d3adae1663b7da9ffe396eabe1ca25f0b0bee42b0da29a9ddce93"
+checksum = "8c740b185920170a6d9191122cafef7010bd6270a3824594bff6784c04d7f09e"
dependencies = [
"indexmap 2.12.0",
"toml_datetime",
@@ -6568,31 +7186,29 @@ dependencies = [
[[package]]
name = "toml_parser"
-version = "1.0.2"
+version = "1.0.6+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b551886f449aa90d4fe2bdaa9f4a2577ad2dde302c61ecf262d80b116db95c10"
+checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44"
dependencies = [
"winnow",
]
[[package]]
name = "toml_writer"
-version = "1.0.2"
+version = "1.0.6+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64"
+checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607"
[[package]]
name = "tonic"
-version = "0.13.1"
+version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7e581ba15a835f4d9ea06c55ab1bd4dce26fc53752c69a04aac00703bfb49ba9"
+checksum = "eb7613188ce9f7df5bfe185db26c5814347d110db17920415cf2fbcad85e7203"
dependencies = [
"async-trait",
- "axum",
"base64",
"bytes",
- "h2",
- "http",
+ "http 1.3.1",
"http-body",
"http-body-util",
"hyper",
@@ -6600,9 +7216,10 @@ dependencies = [
"hyper-util",
"percent-encoding",
"pin-project",
- "prost",
- "socket2 0.5.10",
+ "rustls-native-certs",
+ "sync_wrapper",
"tokio",
+ "tokio-rustls",
"tokio-stream",
"tower",
"tower-layer",
@@ -6610,6 +7227,17 @@ dependencies = [
"tracing",
]
+[[package]]
+name = "tonic-prost"
+version = "0.14.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "66bd50ad6ce1252d87ef024b3d64fe4c3cf54a86fb9ef4c631fdd0ded7aeaa67"
+dependencies = [
+ "bytes",
+ "prost",
+ "tonic",
+]
+
[[package]]
name = "tower"
version = "0.5.2"
@@ -6638,7 +7266,7 @@ dependencies = [
"bitflags 2.10.0",
"bytes",
"futures-util",
- "http",
+ "http 1.3.1",
"http-body",
"iri-string",
"pin-project-lite",
@@ -6661,9 +7289,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
[[package]]
name = "tracing"
-version = "0.1.41"
+version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
+checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
dependencies = [
"log",
"pin-project-lite",
@@ -6685,9 +7313,9 @@ dependencies = [
[[package]]
name = "tracing-attributes"
-version = "0.1.30"
+version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
+checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
dependencies = [
"proc-macro2",
"quote",
@@ -6696,9 +7324,9 @@ dependencies = [
[[package]]
name = "tracing-core"
-version = "0.1.34"
+version = "0.1.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
+checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
dependencies = [
"once_cell",
"valuable",
@@ -6725,11 +7353,30 @@ dependencies = [
"tracing-core",
]
+[[package]]
+name = "tracing-opentelemetry"
+version = "0.32.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e6e5658463dd88089aba75c7791e1d3120633b1bfde22478b28f625a9bb1b8e"
+dependencies = [
+ "js-sys",
+ "opentelemetry",
+ "opentelemetry_sdk",
+ "rustversion",
+ "smallvec",
+ "thiserror 2.0.17",
+ "tracing",
+ "tracing-core",
+ "tracing-log",
+ "tracing-subscriber",
+ "web-time",
+]
+
[[package]]
name = "tracing-subscriber"
-version = "0.3.20"
+version = "0.3.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
+checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
dependencies = [
"matchers",
"nu-ansi-term",
@@ -6848,6 +7495,16 @@ dependencies = [
"termcolor",
]
+[[package]]
+name = "tui-scrollbar"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c42613099915b2e30e9f144670666e858e2538366f77742e1cf1c2f230efcacd"
+dependencies = [
+ "document-features",
+ "ratatui-core",
+]
+
[[package]]
name = "typenum"
version = "1.18.0"
@@ -6874,6 +7531,12 @@ dependencies = [
"libc",
]
+[[package]]
+name = "unarray"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
+
[[package]]
name = "unicase"
version = "2.8.1"
@@ -6909,6 +7572,17 @@ dependencies = [
"unicode-width 0.1.14",
]
+[[package]]
+name = "unicode-truncate"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8fbf03860ff438702f3910ca5f28f8dac63c1c11e7efb5012b8b175493606330"
+dependencies = [
+ "itertools 0.13.0",
+ "unicode-segmentation",
+ "unicode-width 0.2.1",
+]
+
[[package]]
name = "unicode-width"
version = "0.1.14"
@@ -6927,6 +7601,12 @@ version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
+[[package]]
+name = "unsafe-libyaml"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
+
[[package]]
name = "untrusted"
version = "0.9.0"
@@ -6935,15 +7615,31 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
[[package]]
name = "ureq"
-version = "2.12.1"
+version = "3.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d"
+checksum = "d39cb1dbab692d82a977c0392ffac19e188bd9186a9f32806f0aaa859d75585a"
dependencies = [
"base64",
+ "der",
"log",
"native-tls",
- "once_cell",
- "url",
+ "percent-encoding",
+ "rustls-pki-types",
+ "ureq-proto",
+ "utf-8",
+ "webpki-root-certs",
+]
+
+[[package]]
+name = "ureq-proto"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d81f9efa9df032be5934a46a068815a10a042b494b6a58cb0a1a97bb5467ed6f"
+dependencies = [
+ "base64",
+ "http 1.3.1",
+ "httparse",
+ "log",
]
[[package]]
@@ -6964,6 +7660,12 @@ version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
+[[package]]
+name = "utf-8"
+version = "0.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
+
[[package]]
name = "utf8_iter"
version = "1.0.4"
@@ -7247,9 +7949,9 @@ dependencies = [
[[package]]
name = "webbrowser"
-version = "1.0.5"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aaf4f3c0ba838e82b4e5ccc4157003fb8c324ee24c058470ffb82820becbde98"
+checksum = "00f1243ef785213e3a32fa0396093424a3a6ea566f9948497e5a2309261a4c97"
dependencies = [
"core-foundation 0.10.1",
"jni",
@@ -7261,6 +7963,15 @@ dependencies = [
"web-sys",
]
+[[package]]
+name = "webpki-root-certs"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee3e3b5f5e80bc89f30ce8d0343bf4e5f12341c51f3e26cbeecbc7c85443e85b"
+dependencies = [
+ "rustls-pki-types",
+]
+
[[package]]
name = "webpki-roots"
version = "1.0.2"
@@ -7290,9 +8001,9 @@ dependencies = [
[[package]]
name = "wildmatch"
-version = "2.5.0"
+version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "39b7d07a236abaef6607536ccfaf19b396dbe3f5110ddb73d39f4562902ed382"
+checksum = "29333c3ea1ba8b17211763463ff24ee84e41c78224c16b001cd907e663a38c68"
[[package]]
name = "winapi"
@@ -7325,6 +8036,16 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+[[package]]
+name = "windows"
+version = "0.58.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6"
+dependencies = [
+ "windows-core 0.58.0",
+ "windows-targets 0.52.6",
+]
+
[[package]]
name = "windows"
version = "0.61.3"
@@ -7332,7 +8053,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893"
dependencies = [
"windows-collections",
- "windows-core",
+ "windows-core 0.61.2",
"windows-future",
"windows-link 0.1.3",
"windows-numerics",
@@ -7344,7 +8065,20 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8"
dependencies = [
- "windows-core",
+ "windows-core 0.61.2",
+]
+
+[[package]]
+name = "windows-core"
+version = "0.58.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99"
+dependencies = [
+ "windows-implement 0.58.0",
+ "windows-interface 0.58.0",
+ "windows-result 0.2.0",
+ "windows-strings 0.1.0",
+ "windows-targets 0.52.6",
]
[[package]]
@@ -7353,11 +8087,11 @@ version = "0.61.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3"
dependencies = [
- "windows-implement",
- "windows-interface",
+ "windows-implement 0.60.0",
+ "windows-interface 0.59.1",
"windows-link 0.1.3",
- "windows-result",
- "windows-strings",
+ "windows-result 0.3.4",
+ "windows-strings 0.4.2",
]
[[package]]
@@ -7366,11 +8100,22 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e"
dependencies = [
- "windows-core",
+ "windows-core 0.61.2",
"windows-link 0.1.3",
"windows-threading",
]
+[[package]]
+name = "windows-implement"
+version = "0.58.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.104",
+]
+
[[package]]
name = "windows-implement"
version = "0.60.0"
@@ -7382,6 +8127,17 @@ dependencies = [
"syn 2.0.104",
]
+[[package]]
+name = "windows-interface"
+version = "0.58.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.104",
+]
+
[[package]]
name = "windows-interface"
version = "0.59.1"
@@ -7411,7 +8167,7 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1"
dependencies = [
- "windows-core",
+ "windows-core 0.61.2",
"windows-link 0.1.3",
]
@@ -7422,8 +8178,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e"
dependencies = [
"windows-link 0.1.3",
- "windows-result",
- "windows-strings",
+ "windows-result 0.3.4",
+ "windows-strings 0.4.2",
+]
+
+[[package]]
+name = "windows-result"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e"
+dependencies = [
+ "windows-targets 0.52.6",
]
[[package]]
@@ -7435,6 +8200,16 @@ dependencies = [
"windows-link 0.1.3",
]
+[[package]]
+name = "windows-strings"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10"
+dependencies = [
+ "windows-result 0.2.0",
+ "windows-targets 0.52.6",
+]
+
[[package]]
name = "windows-strings"
version = "0.4.2"
@@ -7742,9 +8517,9 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
[[package]]
name = "winnow"
-version = "0.7.12"
+version = "0.7.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95"
+checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
dependencies = [
"memchr",
]
@@ -7758,12 +8533,27 @@ dependencies = [
"winapi",
]
+[[package]]
+name = "winres"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b68db261ef59e9e52806f688020631e987592bd83619edccda9c47d42cde4f6c"
+dependencies = [
+ "toml 0.5.11",
+]
+
[[package]]
name = "winsafe"
version = "0.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
+[[package]]
+name = "winsplit"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ab703352da6a72f35c39a533526393725640575bb211f61987a2748323ad956"
+
[[package]]
name = "wiremock"
version = "0.6.5"
@@ -7774,7 +8564,7 @@ dependencies = [
"base64",
"deadpool",
"futures",
- "http",
+ "http 1.3.1",
"http-body-util",
"hyper",
"hyper-util",
@@ -8035,19 +8825,62 @@ dependencies = [
"syn 2.0.104",
]
+[[package]]
+name = "zstd"
+version = "0.13.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
+dependencies = [
+ "zstd-safe",
+]
+
+[[package]]
+name = "zstd-safe"
+version = "7.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
+dependencies = [
+ "zstd-sys",
+]
+
+[[package]]
+name = "zstd-sys"
+version = "2.0.16+zstd.1.5.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748"
+dependencies = [
+ "cc",
+ "pkg-config",
+]
+
[[package]]
name = "zune-core"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a"
+[[package]]
+name = "zune-core"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "111f7d9820f05fd715df3144e254d6fc02ee4088b0644c0ffd0efc9e6d9d2773"
+
[[package]]
name = "zune-jpeg"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c9e525af0a6a658e031e95f14b7f889976b74a11ba0eca5a5fc9ac8a1c43a6a"
dependencies = [
- "zune-core",
+ "zune-core 0.4.12",
+]
+
+[[package]]
+name = "zune-jpeg"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc6fb7703e32e9a07fb3f757360338b3a567a5054f21b5f52a666752e333d58e"
+dependencies = [
+ "zune-core 0.5.0",
]
[[package]]
diff --git a/codex-rs/Cargo.toml b/codex-rs/Cargo.toml
index b19bf7660..52c61027d 100644
--- a/codex-rs/Cargo.toml
+++ b/codex-rs/Cargo.toml
@@ -6,6 +6,7 @@ members = [
"app-server",
"app-server-protocol",
"app-server-test-client",
+ "debug-client",
"apply-patch",
"arg0",
"feedback",
@@ -16,8 +17,9 @@ members = [
"common",
"core",
"exec",
+ "exec-server",
"execpolicy",
- "execpolicy2",
+ "execpolicy-legacy",
"keyring-store",
"file-search",
"linux-sandbox",
@@ -33,6 +35,9 @@ members = [
"stdio-to-uds",
"otel",
"tui",
+ "tui2",
+ "utils/absolute-path",
+ "utils/cargo-bin",
"utils/git",
"utils/cache",
"utils/image",
@@ -40,22 +45,25 @@ members = [
"utils/pty",
"utils/readiness",
"utils/string",
- "utils/tokenizer",
+ "codex-client",
+ "codex-api",
]
resolver = "2"
[workspace.package]
-version = "0.0.0"
+version = "0.80.0"
# Track the edition for all workspace crates in one place. Individual
# crates can still override this value, but keeping it here means new
# crates created with `cargo new -w ...` automatically inherit the 2024
# edition.
edition = "2024"
+license = "Apache-2.0"
[workspace.dependencies]
# Internal
app_test_support = { path = "app-server/tests/common" }
codex-ansi-escape = { path = "ansi-escape" }
+codex-api = { path = "codex-api" }
codex-app-server = { path = "app-server" }
codex-app-server-protocol = { path = "app-server-protocol" }
codex-apply-patch = { path = "apply-patch" }
@@ -63,9 +71,11 @@ codex-arg0 = { path = "arg0" }
codex-async-utils = { path = "async-utils" }
codex-backend-client = { path = "backend-client" }
codex-chatgpt = { path = "chatgpt" }
+codex-client = { path = "codex-client" }
codex-common = { path = "common" }
codex-core = { path = "core" }
codex-exec = { path = "exec" }
+codex-execpolicy = { path = "execpolicy" }
codex-feedback = { path = "feedback" }
codex-file-search = { path = "file-search" }
codex-git = { path = "utils/git" }
@@ -82,15 +92,18 @@ codex-responses-api-proxy = { path = "responses-api-proxy" }
codex-rmcp-client = { path = "rmcp-client" }
codex-stdio-to-uds = { path = "stdio-to-uds" }
codex-tui = { path = "tui" }
+codex-tui2 = { path = "tui2" }
+codex-utils-absolute-path = { path = "utils/absolute-path" }
codex-utils-cache = { path = "utils/cache" }
+codex-utils-cargo-bin = { path = "utils/cargo-bin" }
codex-utils-image = { path = "utils/image" }
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
codex-utils-pty = { path = "utils/pty" }
codex-utils-readiness = { path = "utils/readiness" }
codex-utils-string = { path = "utils/string" }
-codex-utils-tokenizer = { path = "utils/tokenizer" }
codex-windows-sandbox = { path = "windows-sandbox-rs" }
core_test_support = { path = "core/tests/common" }
+exec_server_test_support = { path = "exec-server/tests/common" }
mcp-types = { path = "mcp-types" }
mcp_test_support = { path = "mcp-server/tests/common" }
@@ -99,7 +112,6 @@ allocative = "0.3.3"
ansi-to-tui = "7.0.0"
anyhow = "1"
arboard = { version = "3", features = ["wayland-data-control"] }
-askama = "0.14"
assert_cmd = "2"
assert_matches = "1.5.0"
async-channel = "2.3.1"
@@ -108,6 +120,7 @@ async-trait = "0.1.89"
axum = { version = "0.8", default-features = false }
base64 = "0.22.1"
bytes = "1.10.1"
+chardetng = "0.1.17"
chrono = "0.4.42"
clap = "4"
clap_complete = "4"
@@ -119,9 +132,9 @@ diffy = "0.4.2"
dirs = "6"
dotenvy = "0.15.7"
dunce = "1.0.4"
+encoding_rs = "0.8.35"
env-flags = "0.1.1"
env_logger = "0.11.5"
-escargot = "0.5"
eventsource-stream = "0.2.3"
futures = { version = "0.3", default-features = false }
http = "1.3.1"
@@ -129,28 +142,30 @@ icu_decimal = "2.1"
icu_locale_core = "2.1"
icu_provider = { version = "2.1", features = ["sync"] }
ignore = "0.4.23"
-image = { version = "^0.25.8", default-features = false }
+image = { version = "^0.25.9", default-features = false }
+include_dir = "0.7.4"
indexmap = "2.12.0"
-insta = "1.43.2"
+insta = "1.46.0"
itertools = "0.14.0"
keyring = { version = "3.6", default-features = false }
-landlock = "0.4.1"
+landlock = "0.4.4"
lazy_static = "1"
-libc = "0.2.175"
+libc = "0.2.177"
log = "0.4"
-lru = "0.12.5"
+lru = "0.16.3"
maplit = "1.0.2"
mime_guess = "2.0.5"
multimap = "0.10.0"
notify = "8.2.0"
nucleo-matcher = "0.3.1"
-once_cell = "1"
+once_cell = "1.20.2"
openssl-sys = "*"
-opentelemetry = "0.30.0"
-opentelemetry-appender-tracing = "0.30.0"
-opentelemetry-otlp = "0.30.0"
-opentelemetry-semantic-conventions = "0.30.0"
-opentelemetry_sdk = "0.30.0"
+opentelemetry = "0.31.0"
+opentelemetry-appender-tracing = "0.31.0"
+opentelemetry-otlp = "0.31.0"
+opentelemetry-semantic-conventions = "0.31.0"
+opentelemetry_sdk = "0.31.0"
+tracing-opentelemetry = "0.32.0"
os_info = "3.12.0"
owo-colors = "4.2.0"
path-absolutize = "3.1.1"
@@ -161,48 +176,52 @@ pretty_assertions = "1.4.1"
pulldown-cmark = "0.10"
rand = "0.9"
ratatui = "0.29.0"
+ratatui-core = "0.1.0"
ratatui-macros = "0.6.0"
-regex-lite = "0.1.7"
+regex = "1.12.2"
+regex-lite = "0.1.8"
reqwest = "0.12"
-rmcp = { version = "0.8.5", default-features = false }
+rmcp = { version = "0.12.0", default-features = false }
schemars = "0.8.22"
seccompiler = "0.5.0"
-sentry = "0.34.0"
+sentry = "0.46.0"
serde = "1"
serde_json = "1"
-serde_with = "3.14"
+serde_with = "3.16"
+serde_yaml = "0.9"
serial_test = "3.2.0"
sha1 = "0.10.6"
sha2 = "0.10"
shlex = "1.3.0"
similar = "2.7.0"
+socket2 = "0.6.1"
starlark = "0.13.0"
strum = "0.27.2"
strum_macros = "0.27.2"
supports-color = "3.0.2"
sys-locale = "0.3.2"
tempfile = "3.23.0"
-test-log = "0.2.18"
+test-log = "0.2.19"
textwrap = "0.16.2"
thiserror = "2.0.17"
-tiktoken-rs = "0.9"
time = "0.3"
tiny_http = "0.12"
tokio = "1"
-tokio-stream = "0.1.17"
+tokio-stream = "0.1.18"
tokio-test = "0.4"
tokio-util = "0.7.16"
toml = "0.9.5"
-toml_edit = "0.23.4"
-tonic = "0.13.1"
-tracing = "0.1.41"
+toml_edit = "0.24.0"
+tracing = "0.1.43"
tracing-appender = "0.2.3"
-tracing-subscriber = "0.3.20"
+tracing-subscriber = "0.3.22"
tracing-test = "0.2.5"
tree-sitter = "0.25.10"
tree-sitter-bash = "0.25"
+zstd = "0.13"
tree-sitter-highlight = "0.25.10"
ts-rs = "11"
+tui-scrollbar = "0.2.1"
uds_windows = "1.1.0"
unicode-segmentation = "1.12.0"
unicode-width = "0.2"
@@ -213,7 +232,7 @@ vt100 = "0.16.2"
walkdir = "2.5.0"
webbrowser = "1.0"
which = "6"
-wildmatch = "2.5.0"
+wildmatch = "2.6.1"
wiremock = "0.6"
zeroize = "1.8.2"
@@ -259,12 +278,7 @@ unwrap_used = "deny"
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
# silence the false positive here instead of deleting a real dependency.
[workspace.metadata.cargo-shear]
-ignored = [
- "icu_provider",
- "openssl-sys",
- "codex-utils-readiness",
- "codex-utils-tokenizer",
-]
+ignored = ["icu_provider", "openssl-sys", "codex-utils-readiness"]
[profile.release]
lto = "fat"
diff --git a/codex-rs/README.md b/codex-rs/README.md
index 385b4c62e..cbe1fe377 100644
--- a/codex-rs/README.md
+++ b/codex-rs/README.md
@@ -15,8 +15,8 @@ You can also install via Homebrew (`brew install --cask codex`) or download a pl
## Documentation quickstart
-- First run with Codex? Follow the walkthrough in [`docs/getting-started.md`](../docs/getting-started.md) for prompts, keyboard shortcuts, and session management.
-- Already shipping with Codex and want deeper control? Jump to [`docs/advanced.md`](../docs/advanced.md) and the configuration reference at [`docs/config.md`](../docs/config.md).
+- First run with Codex? Start with [`docs/getting-started.md`](../docs/getting-started.md) (links to the walkthrough for prompts, keyboard shortcuts, and session management).
+- Want deeper control? See [`docs/config.md`](../docs/config.md) and [`docs/install.md`](../docs/install.md).
## What's new in the Rust CLI
@@ -30,7 +30,7 @@ Codex supports a rich set of configuration options. Note that the Rust CLI uses
#### MCP client
-Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#mcp_servers) for details.
+Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#connecting-to-mcp-servers) for details.
#### MCP server (experimental)
@@ -46,7 +46,7 @@ Use `codex mcp` to add/list/get/remove MCP server launchers defined in `config.t
### Notifications
-You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](../docs/config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS.
+You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](../docs/config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS. When Codex detects that it is running under WSL 2 inside Windows Terminal (`WT_SESSION` is set), the TUI automatically falls back to native Windows toast notifications so approval prompts and completed turns surface even though Windows Terminal does not implement OSC 9.
### `codex exec` to run Codex programmatically/non-interactively
diff --git a/codex-rs/ansi-escape/Cargo.toml b/codex-rs/ansi-escape/Cargo.toml
index 4107a7275..a10dbf913 100644
--- a/codex-rs/ansi-escape/Cargo.toml
+++ b/codex-rs/ansi-escape/Cargo.toml
@@ -1,7 +1,8 @@
[package]
-edition = "2024"
name = "codex-ansi-escape"
-version = { workspace = true }
+version.workspace = true
+edition.workspace = true
+license.workspace = true
[lib]
name = "codex_ansi_escape"
diff --git a/codex-rs/app-server-protocol/Cargo.toml b/codex-rs/app-server-protocol/Cargo.toml
index 4d1afadaa..1c21bd6ea 100644
--- a/codex-rs/app-server-protocol/Cargo.toml
+++ b/codex-rs/app-server-protocol/Cargo.toml
@@ -1,7 +1,8 @@
[package]
-edition = "2024"
name = "codex-app-server-protocol"
-version = { workspace = true }
+version.workspace = true
+edition.workspace = true
+license.workspace = true
[lib]
name = "codex_app_server_protocol"
@@ -14,11 +15,13 @@ workspace = true
anyhow = { workspace = true }
clap = { workspace = true, features = ["derive"] }
codex-protocol = { workspace = true }
+codex-utils-absolute-path = { workspace = true }
mcp-types = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
strum_macros = { workspace = true }
+thiserror = { workspace = true }
ts-rs = { workspace = true }
uuid = { workspace = true, features = ["serde", "v7"] }
diff --git a/codex-rs/app-server-protocol/src/export.rs b/codex-rs/app-server-protocol/src/export.rs
index 11296e8e5..a60c1be62 100644
--- a/codex-rs/app-server-protocol/src/export.rs
+++ b/codex-rs/app-server-protocol/src/export.rs
@@ -31,6 +31,7 @@ use std::process::Command;
use ts_rs::TS;
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
+const IGNORED_DEFINITIONS: &[&str] = &["Option<()>"];
#[derive(Clone)]
pub struct GeneratedSchema {
@@ -61,7 +62,32 @@ pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
Ok(())
}
+#[derive(Clone, Copy, Debug)]
+pub struct GenerateTsOptions {
+ pub generate_indices: bool,
+ pub ensure_headers: bool,
+ pub run_prettier: bool,
+}
+
+impl Default for GenerateTsOptions {
+ fn default() -> Self {
+ Self {
+ generate_indices: true,
+ ensure_headers: true,
+ run_prettier: true,
+ }
+ }
+}
+
pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
+ generate_ts_with_options(out_dir, prettier, GenerateTsOptions::default())
+}
+
+pub fn generate_ts_with_options(
+ out_dir: &Path,
+ prettier: Option<&Path>,
+ options: GenerateTsOptions,
+) -> Result<()> {
let v2_out_dir = out_dir.join("v2");
ensure_dir(out_dir)?;
ensure_dir(&v2_out_dir)?;
@@ -74,17 +100,28 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
export_server_responses(out_dir)?;
ServerNotification::export_all_to(out_dir)?;
- generate_index_ts(out_dir)?;
- generate_index_ts(&v2_out_dir)?;
+ if options.generate_indices {
+ generate_index_ts(out_dir)?;
+ generate_index_ts(&v2_out_dir)?;
+ }
// Ensure our header is present on all TS files (root + subdirs like v2/).
- let ts_files = ts_files_in_recursive(out_dir)?;
- for file in &ts_files {
- prepend_header_if_missing(file)?;
+ let mut ts_files = Vec::new();
+ let should_collect_ts_files =
+ options.ensure_headers || (options.run_prettier && prettier.is_some());
+ if should_collect_ts_files {
+ ts_files = ts_files_in_recursive(out_dir)?;
+ }
+
+ if options.ensure_headers {
+ for file in &ts_files {
+ prepend_header_if_missing(file)?;
+ }
}
// Optionally run Prettier on all generated TS files.
- if let Some(prettier_bin) = prettier
+ if options.run_prettier
+ && let Some(prettier_bin) = prettier
&& !ts_files.is_empty()
{
let status = Command::new(prettier_bin)
@@ -148,7 +185,6 @@ fn build_schema_bundle(schemas: Vec) -> Result {
"ServerNotification",
"ServerRequest",
];
- const IGNORED_DEFINITIONS: &[&str] = &["Option<()>"];
let namespaced_types = collect_namespaced_types(&schemas);
let mut definitions = Map::new();
@@ -268,8 +304,11 @@ where
out_dir.join(format!("{file_stem}.json"))
};
- write_pretty_json(out_path, &schema_value)
- .with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
+ if !IGNORED_DEFINITIONS.contains(&logical_name) {
+ write_pretty_json(out_path, &schema_value)
+ .with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
+ }
+
let namespace = match raw_namespace {
Some("v1") | None => None,
Some(ns) => Some(ns.to_string()),
@@ -723,7 +762,13 @@ mod tests {
let _guard = TempDirGuard(output_dir.clone());
- generate_ts(&output_dir, None)?;
+ // Avoid doing more work than necessary to keep the test from timing out.
+ let options = GenerateTsOptions {
+ generate_indices: false,
+ ensure_headers: false,
+ run_prettier: false,
+ };
+ generate_ts_with_options(&output_dir, None, options)?;
let mut undefined_offenders = Vec::new();
let mut optional_nullable_offenders = BTreeSet::new();
diff --git a/codex-rs/app-server-protocol/src/lib.rs b/codex-rs/app-server-protocol/src/lib.rs
index 9c02ea924..06102083f 100644
--- a/codex-rs/app-server-protocol/src/lib.rs
+++ b/codex-rs/app-server-protocol/src/lib.rs
@@ -7,5 +7,6 @@ pub use export::generate_ts;
pub use export::generate_types;
pub use jsonrpc_lite::*;
pub use protocol::common::*;
+pub use protocol::thread_history::*;
pub use protocol::v1::*;
pub use protocol::v2::*;
diff --git a/codex-rs/app-server-protocol/src/protocol/common.rs b/codex-rs/app-server-protocol/src/protocol/common.rs
index db9bed111..16ee3b982 100644
--- a/codex-rs/app-server-protocol/src/protocol/common.rs
+++ b/codex-rs/app-server-protocol/src/protocol/common.rs
@@ -109,17 +109,29 @@ client_request_definitions! {
params: v2::ThreadResumeParams,
response: v2::ThreadResumeResponse,
},
+ ThreadFork => "thread/fork" {
+ params: v2::ThreadForkParams,
+ response: v2::ThreadForkResponse,
+ },
ThreadArchive => "thread/archive" {
params: v2::ThreadArchiveParams,
response: v2::ThreadArchiveResponse,
},
+ ThreadRollback => "thread/rollback" {
+ params: v2::ThreadRollbackParams,
+ response: v2::ThreadRollbackResponse,
+ },
ThreadList => "thread/list" {
params: v2::ThreadListParams,
response: v2::ThreadListResponse,
},
- ThreadCompact => "thread/compact" {
- params: v2::ThreadCompactParams,
- response: v2::ThreadCompactResponse,
+ ThreadLoadedList => "thread/loaded/list" {
+ params: v2::ThreadLoadedListParams,
+ response: v2::ThreadLoadedListResponse,
+ },
+ SkillsList => "skills/list" {
+ params: v2::SkillsListParams,
+ response: v2::SkillsListResponse,
},
TurnStart => "turn/start" {
params: v2::TurnStartParams,
@@ -129,12 +141,26 @@ client_request_definitions! {
params: v2::TurnInterruptParams,
response: v2::TurnInterruptResponse,
},
+ ReviewStart => "review/start" {
+ params: v2::ReviewStartParams,
+ response: v2::ReviewStartResponse,
+ },
ModelList => "model/list" {
params: v2::ModelListParams,
response: v2::ModelListResponse,
},
+ McpServerOauthLogin => "mcpServer/oauth/login" {
+ params: v2::McpServerOauthLoginParams,
+ response: v2::McpServerOauthLoginResponse,
+ },
+
+ McpServerStatusList => "mcpServerStatus/list" {
+ params: v2::ListMcpServerStatusParams,
+ response: v2::ListMcpServerStatusResponse,
+ },
+
LoginAccount => "account/login/start" {
params: v2::LoginAccountParams,
response: v2::LoginAccountResponse,
@@ -160,6 +186,30 @@ client_request_definitions! {
response: v2::FeedbackUploadResponse,
},
+ /// Execute a command (argv vector) under the server's sandbox.
+ OneOffCommandExec => "command/exec" {
+ params: v2::CommandExecParams,
+ response: v2::CommandExecResponse,
+ },
+
+ ConfigRead => "config/read" {
+ params: v2::ConfigReadParams,
+ response: v2::ConfigReadResponse,
+ },
+ ConfigValueWrite => "config/value/write" {
+ params: v2::ConfigValueWriteParams,
+ response: v2::ConfigWriteResponse,
+ },
+ ConfigBatchWrite => "config/batchWrite" {
+ params: v2::ConfigBatchWriteParams,
+ response: v2::ConfigWriteResponse,
+ },
+
+ ConfigRequirementsRead => "configRequirements/read" {
+ params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
+ response: v2::ConfigRequirementsReadResponse,
+ },
+
GetAccount => "account/read" {
params: v2::GetAccountParams,
response: v2::GetAccountResponse,
@@ -184,6 +234,11 @@ client_request_definitions! {
params: v1::ResumeConversationParams,
response: v1::ResumeConversationResponse,
},
+ /// Fork a recorded Codex conversation into a new session.
+ ForkConversation {
+ params: v1::ForkConversationParams,
+ response: v1::ForkConversationResponse,
+ },
ArchiveConversation {
params: v1::ArchiveConversationParams,
response: v1::ArchiveConversationResponse,
@@ -374,7 +429,7 @@ macro_rules! server_notification_definitions {
impl TryFrom for ServerNotification {
type Error = serde_json::Error;
- fn try_from(value: JSONRPCNotification) -> Result {
+ fn try_from(value: JSONRPCNotification) -> Result {
serde_json::from_value(serde_json::to_value(value)?)
}
}
@@ -434,6 +489,13 @@ server_request_definitions! {
response: v2::CommandExecutionRequestApprovalResponse,
},
+ /// Sent when approval is requested for a specific file change.
+ /// This request is used for Turns started via turn/start.
+ FileChangeRequestApproval => "item/fileChange/requestApproval" {
+ params: v2::FileChangeRequestApprovalParams,
+ response: v2::FileChangeRequestApprovalResponse,
+ },
+
/// DEPRECATED APIs below
/// Request to approve a patch.
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
@@ -476,19 +538,33 @@ pub struct FuzzyFileSearchResponse {
server_notification_definitions! {
/// NEW NOTIFICATIONS
+ Error => "error" (v2::ErrorNotification),
ThreadStarted => "thread/started" (v2::ThreadStartedNotification),
+ ThreadTokenUsageUpdated => "thread/tokenUsage/updated" (v2::ThreadTokenUsageUpdatedNotification),
TurnStarted => "turn/started" (v2::TurnStartedNotification),
TurnCompleted => "turn/completed" (v2::TurnCompletedNotification),
+ TurnDiffUpdated => "turn/diff/updated" (v2::TurnDiffUpdatedNotification),
+ TurnPlanUpdated => "turn/plan/updated" (v2::TurnPlanUpdatedNotification),
ItemStarted => "item/started" (v2::ItemStartedNotification),
ItemCompleted => "item/completed" (v2::ItemCompletedNotification),
+ /// This event is internal-only. Used by Codex Cloud.
+ RawResponseItemCompleted => "rawResponseItem/completed" (v2::RawResponseItemCompletedNotification),
AgentMessageDelta => "item/agentMessage/delta" (v2::AgentMessageDeltaNotification),
CommandExecutionOutputDelta => "item/commandExecution/outputDelta" (v2::CommandExecutionOutputDeltaNotification),
+ TerminalInteraction => "item/commandExecution/terminalInteraction" (v2::TerminalInteractionNotification),
+ FileChangeOutputDelta => "item/fileChange/outputDelta" (v2::FileChangeOutputDeltaNotification),
McpToolCallProgress => "item/mcpToolCall/progress" (v2::McpToolCallProgressNotification),
+ McpServerOauthLoginCompleted => "mcpServer/oauthLogin/completed" (v2::McpServerOauthLoginCompletedNotification),
AccountUpdated => "account/updated" (v2::AccountUpdatedNotification),
AccountRateLimitsUpdated => "account/rateLimits/updated" (v2::AccountRateLimitsUpdatedNotification),
ReasoningSummaryTextDelta => "item/reasoning/summaryTextDelta" (v2::ReasoningSummaryTextDeltaNotification),
ReasoningSummaryPartAdded => "item/reasoning/summaryPartAdded" (v2::ReasoningSummaryPartAddedNotification),
ReasoningTextDelta => "item/reasoning/textDelta" (v2::ReasoningTextDeltaNotification),
+ ContextCompacted => "thread/compacted" (v2::ContextCompactedNotification),
+ DeprecationNotice => "deprecationNotice" (v2::DeprecationNoticeNotification),
+
+ /// Notifies the user of world-writable directories on Windows, which cannot be protected by the sandbox.
+ WindowsWorldWritableWarning => "windows/worldWritableWarning" (v2::WindowsWorldWritableWarningNotification),
#[serde(rename = "account/login/completed")]
#[ts(rename = "account/login/completed")]
@@ -511,7 +587,7 @@ client_notification_definitions! {
mod tests {
use super::*;
use anyhow::Result;
- use codex_protocol::ConversationId;
+ use codex_protocol::ThreadId;
use codex_protocol::account::PlanType;
use codex_protocol::parse_command::ParsedCommand;
use codex_protocol::protocol::AskForApproval;
@@ -524,7 +600,7 @@ mod tests {
let request = ClientRequest::NewConversation {
request_id: RequestId::Integer(42),
params: v1::NewConversationParams {
- model: Some("gpt-5.1-codex".to_string()),
+ model: Some("gpt-5.1-codex-max".to_string()),
model_provider: None,
profile: None,
cwd: None,
@@ -542,7 +618,7 @@ mod tests {
"method": "newConversation",
"id": 42,
"params": {
- "model": "gpt-5.1-codex",
+ "model": "gpt-5.1-codex-max",
"modelProvider": null,
"profile": null,
"cwd": null,
@@ -560,7 +636,7 @@ mod tests {
#[test]
fn conversation_id_serializes_as_plain_string() -> Result<()> {
- let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
+ let id = ThreadId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
assert_eq!(
json!("67e55044-10b1-426f-9247-bb680e5fe0c8"),
@@ -571,11 +647,10 @@ mod tests {
#[test]
fn conversation_id_deserializes_from_plain_string() -> Result<()> {
- let id: ConversationId =
- serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
+ let id: ThreadId = serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
assert_eq!(
- ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
+ ThreadId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
id,
);
Ok(())
@@ -596,14 +671,13 @@ mod tests {
#[test]
fn serialize_server_request() -> Result<()> {
- let conversation_id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
+ let conversation_id = ThreadId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
let params = v1::ExecCommandApprovalParams {
conversation_id,
call_id: "call-42".to_string(),
command: vec!["echo".to_string(), "hello".to_string()],
cwd: PathBuf::from("/tmp"),
reason: Some("because tests".to_string()),
- risk: None,
parsed_cmd: vec![ParsedCommand::Unknown {
cmd: "echo hello".to_string(),
}],
@@ -623,7 +697,6 @@ mod tests {
"command": ["echo", "hello"],
"cwd": "/tmp",
"reason": "because tests",
- "risk": null,
"parsedCmd": [
{
"type": "unknown",
@@ -656,6 +729,22 @@ mod tests {
Ok(())
}
+ #[test]
+ fn serialize_config_requirements_read() -> Result<()> {
+ let request = ClientRequest::ConfigRequirementsRead {
+ request_id: RequestId::Integer(1),
+ params: None,
+ };
+ assert_eq!(
+ json!({
+ "method": "configRequirements/read",
+ "id": 1,
+ }),
+ serde_json::to_value(&request)?,
+ );
+ Ok(())
+ }
+
#[test]
fn serialize_account_login_api_key() -> Result<()> {
let request = ClientRequest::LoginAccount {
diff --git a/codex-rs/app-server-protocol/src/protocol/mappers.rs b/codex-rs/app-server-protocol/src/protocol/mappers.rs
new file mode 100644
index 000000000..f708c1fa8
--- /dev/null
+++ b/codex-rs/app-server-protocol/src/protocol/mappers.rs
@@ -0,0 +1,15 @@
+use crate::protocol::v1;
+use crate::protocol::v2;
+
+impl From for v2::CommandExecParams {
+ fn from(value: v1::ExecOneOffCommandParams) -> Self {
+ Self {
+ command: value.command,
+ timeout_ms: value
+ .timeout_ms
+ .map(|timeout| i64::try_from(timeout).unwrap_or(60_000)),
+ cwd: value.cwd,
+ sandbox_policy: value.sandbox_policy.map(std::convert::Into::into),
+ }
+ }
+}
diff --git a/codex-rs/app-server-protocol/src/protocol/mod.rs b/codex-rs/app-server-protocol/src/protocol/mod.rs
index 11edf04cc..e26933243 100644
--- a/codex-rs/app-server-protocol/src/protocol/mod.rs
+++ b/codex-rs/app-server-protocol/src/protocol/mod.rs
@@ -2,5 +2,7 @@
// Exposes protocol pieces used by `lib.rs` via `pub use protocol::common::*;`.
pub mod common;
+mod mappers;
+pub mod thread_history;
pub mod v1;
pub mod v2;
diff --git a/codex-rs/app-server-protocol/src/protocol/thread_history.rs b/codex-rs/app-server-protocol/src/protocol/thread_history.rs
new file mode 100644
index 000000000..6fa6dfabb
--- /dev/null
+++ b/codex-rs/app-server-protocol/src/protocol/thread_history.rs
@@ -0,0 +1,524 @@
+use crate::protocol::v2::ThreadItem;
+use crate::protocol::v2::Turn;
+use crate::protocol::v2::TurnError;
+use crate::protocol::v2::TurnStatus;
+use crate::protocol::v2::UserInput;
+use codex_protocol::protocol::AgentReasoningEvent;
+use codex_protocol::protocol::AgentReasoningRawContentEvent;
+use codex_protocol::protocol::EventMsg;
+use codex_protocol::protocol::ThreadRolledBackEvent;
+use codex_protocol::protocol::TurnAbortedEvent;
+use codex_protocol::protocol::UserMessageEvent;
+
+/// Convert persisted [`EventMsg`] entries into a sequence of [`Turn`] values.
+///
+/// The purpose of this is to convert the EventMsgs persisted in a rollout file
+/// into a sequence of Turns and ThreadItems, which allows the client to render
+/// the historical messages when resuming a thread.
+pub fn build_turns_from_event_msgs(events: &[EventMsg]) -> Vec {
+ let mut builder = ThreadHistoryBuilder::new();
+ for event in events {
+ builder.handle_event(event);
+ }
+ builder.finish()
+}
+
+struct ThreadHistoryBuilder {
+ turns: Vec,
+ current_turn: Option,
+ next_turn_index: i64,
+ next_item_index: i64,
+}
+
+impl ThreadHistoryBuilder {
+ fn new() -> Self {
+ Self {
+ turns: Vec::new(),
+ current_turn: None,
+ next_turn_index: 1,
+ next_item_index: 1,
+ }
+ }
+
+ fn finish(mut self) -> Vec {
+ self.finish_current_turn();
+ self.turns
+ }
+
+ /// This function should handle all EventMsg variants that can be persisted in a rollout file.
+ /// See `should_persist_event_msg` in `codex-rs/core/rollout/policy.rs`.
+ fn handle_event(&mut self, event: &EventMsg) {
+ match event {
+ EventMsg::UserMessage(payload) => self.handle_user_message(payload),
+ EventMsg::AgentMessage(payload) => self.handle_agent_message(payload.message.clone()),
+ EventMsg::AgentReasoning(payload) => self.handle_agent_reasoning(payload),
+ EventMsg::AgentReasoningRawContent(payload) => {
+ self.handle_agent_reasoning_raw_content(payload)
+ }
+ EventMsg::TokenCount(_) => {}
+ EventMsg::EnteredReviewMode(_) => {}
+ EventMsg::ExitedReviewMode(_) => {}
+ EventMsg::ThreadRolledBack(payload) => self.handle_thread_rollback(payload),
+ EventMsg::UndoCompleted(_) => {}
+ EventMsg::TurnAborted(payload) => self.handle_turn_aborted(payload),
+ _ => {}
+ }
+ }
+
+ fn handle_user_message(&mut self, payload: &UserMessageEvent) {
+ self.finish_current_turn();
+ let mut turn = self.new_turn();
+ let id = self.next_item_id();
+ let content = self.build_user_inputs(payload);
+ turn.items.push(ThreadItem::UserMessage { id, content });
+ self.current_turn = Some(turn);
+ }
+
+ fn handle_agent_message(&mut self, text: String) {
+ if text.is_empty() {
+ return;
+ }
+
+ let id = self.next_item_id();
+ self.ensure_turn()
+ .items
+ .push(ThreadItem::AgentMessage { id, text });
+ }
+
+ fn handle_agent_reasoning(&mut self, payload: &AgentReasoningEvent) {
+ if payload.text.is_empty() {
+ return;
+ }
+
+ // If the last item is a reasoning item, add the new text to the summary.
+ if let Some(ThreadItem::Reasoning { summary, .. }) = self.ensure_turn().items.last_mut() {
+ summary.push(payload.text.clone());
+ return;
+ }
+
+ // Otherwise, create a new reasoning item.
+ let id = self.next_item_id();
+ self.ensure_turn().items.push(ThreadItem::Reasoning {
+ id,
+ summary: vec![payload.text.clone()],
+ content: Vec::new(),
+ });
+ }
+
+ fn handle_agent_reasoning_raw_content(&mut self, payload: &AgentReasoningRawContentEvent) {
+ if payload.text.is_empty() {
+ return;
+ }
+
+ // If the last item is a reasoning item, add the new text to the content.
+ if let Some(ThreadItem::Reasoning { content, .. }) = self.ensure_turn().items.last_mut() {
+ content.push(payload.text.clone());
+ return;
+ }
+
+ // Otherwise, create a new reasoning item.
+ let id = self.next_item_id();
+ self.ensure_turn().items.push(ThreadItem::Reasoning {
+ id,
+ summary: Vec::new(),
+ content: vec![payload.text.clone()],
+ });
+ }
+
+ fn handle_turn_aborted(&mut self, _payload: &TurnAbortedEvent) {
+ let Some(turn) = self.current_turn.as_mut() else {
+ return;
+ };
+ turn.status = TurnStatus::Interrupted;
+ }
+
+ fn handle_thread_rollback(&mut self, payload: &ThreadRolledBackEvent) {
+ self.finish_current_turn();
+
+ let n = usize::try_from(payload.num_turns).unwrap_or(usize::MAX);
+ if n >= self.turns.len() {
+ self.turns.clear();
+ } else {
+ self.turns.truncate(self.turns.len().saturating_sub(n));
+ }
+
+ // Re-number subsequent synthetic ids so the pruned history is consistent.
+ self.next_turn_index =
+ i64::try_from(self.turns.len().saturating_add(1)).unwrap_or(i64::MAX);
+ let item_count: usize = self.turns.iter().map(|t| t.items.len()).sum();
+ self.next_item_index = i64::try_from(item_count.saturating_add(1)).unwrap_or(i64::MAX);
+ }
+
+ fn finish_current_turn(&mut self) {
+ if let Some(turn) = self.current_turn.take() {
+ if turn.items.is_empty() {
+ return;
+ }
+ self.turns.push(turn.into());
+ }
+ }
+
+ fn new_turn(&mut self) -> PendingTurn {
+ PendingTurn {
+ id: self.next_turn_id(),
+ items: Vec::new(),
+ error: None,
+ status: TurnStatus::Completed,
+ }
+ }
+
+ fn ensure_turn(&mut self) -> &mut PendingTurn {
+ if self.current_turn.is_none() {
+ let turn = self.new_turn();
+ return self.current_turn.insert(turn);
+ }
+
+ if let Some(turn) = self.current_turn.as_mut() {
+ return turn;
+ }
+
+ unreachable!("current turn must exist after initialization");
+ }
+
+ fn next_turn_id(&mut self) -> String {
+ let id = format!("turn-{}", self.next_turn_index);
+ self.next_turn_index += 1;
+ id
+ }
+
+ fn next_item_id(&mut self) -> String {
+ let id = format!("item-{}", self.next_item_index);
+ self.next_item_index += 1;
+ id
+ }
+
+ fn build_user_inputs(&self, payload: &UserMessageEvent) -> Vec {
+ let mut content = Vec::new();
+ if !payload.message.trim().is_empty() {
+ content.push(UserInput::Text {
+ text: payload.message.clone(),
+ });
+ }
+ if let Some(images) = &payload.images {
+ for image in images {
+ content.push(UserInput::Image { url: image.clone() });
+ }
+ }
+ content
+ }
+}
+
+struct PendingTurn {
+ id: String,
+ items: Vec,
+ error: Option,
+ status: TurnStatus,
+}
+
+impl From for Turn {
+ fn from(value: PendingTurn) -> Self {
+ Self {
+ id: value.id,
+ items: value.items,
+ error: value.error,
+ status: value.status,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use codex_protocol::protocol::AgentMessageEvent;
+ use codex_protocol::protocol::AgentReasoningEvent;
+ use codex_protocol::protocol::AgentReasoningRawContentEvent;
+ use codex_protocol::protocol::ThreadRolledBackEvent;
+ use codex_protocol::protocol::TurnAbortReason;
+ use codex_protocol::protocol::TurnAbortedEvent;
+ use codex_protocol::protocol::UserMessageEvent;
+ use pretty_assertions::assert_eq;
+
+ #[test]
+ fn builds_multiple_turns_with_reasoning_items() {
+ let events = vec![
+ EventMsg::UserMessage(UserMessageEvent {
+ message: "First turn".into(),
+ images: Some(vec!["https://example.com/one.png".into()]),
+ }),
+ EventMsg::AgentMessage(AgentMessageEvent {
+ message: "Hi there".into(),
+ }),
+ EventMsg::AgentReasoning(AgentReasoningEvent {
+ text: "thinking".into(),
+ }),
+ EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
+ text: "full reasoning".into(),
+ }),
+ EventMsg::UserMessage(UserMessageEvent {
+ message: "Second turn".into(),
+ images: None,
+ }),
+ EventMsg::AgentMessage(AgentMessageEvent {
+ message: "Reply two".into(),
+ }),
+ ];
+
+ let turns = build_turns_from_event_msgs(&events);
+ assert_eq!(turns.len(), 2);
+
+ let first = &turns[0];
+ assert_eq!(first.id, "turn-1");
+ assert_eq!(first.status, TurnStatus::Completed);
+ assert_eq!(first.items.len(), 3);
+ assert_eq!(
+ first.items[0],
+ ThreadItem::UserMessage {
+ id: "item-1".into(),
+ content: vec![
+ UserInput::Text {
+ text: "First turn".into(),
+ },
+ UserInput::Image {
+ url: "https://example.com/one.png".into(),
+ }
+ ],
+ }
+ );
+ assert_eq!(
+ first.items[1],
+ ThreadItem::AgentMessage {
+ id: "item-2".into(),
+ text: "Hi there".into(),
+ }
+ );
+ assert_eq!(
+ first.items[2],
+ ThreadItem::Reasoning {
+ id: "item-3".into(),
+ summary: vec!["thinking".into()],
+ content: vec!["full reasoning".into()],
+ }
+ );
+
+ let second = &turns[1];
+ assert_eq!(second.id, "turn-2");
+ assert_eq!(second.items.len(), 2);
+ assert_eq!(
+ second.items[0],
+ ThreadItem::UserMessage {
+ id: "item-4".into(),
+ content: vec![UserInput::Text {
+ text: "Second turn".into()
+ }],
+ }
+ );
+ assert_eq!(
+ second.items[1],
+ ThreadItem::AgentMessage {
+ id: "item-5".into(),
+ text: "Reply two".into(),
+ }
+ );
+ }
+
+ #[test]
+ fn splits_reasoning_when_interleaved() {
+ let events = vec![
+ EventMsg::UserMessage(UserMessageEvent {
+ message: "Turn start".into(),
+ images: None,
+ }),
+ EventMsg::AgentReasoning(AgentReasoningEvent {
+ text: "first summary".into(),
+ }),
+ EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
+ text: "first content".into(),
+ }),
+ EventMsg::AgentMessage(AgentMessageEvent {
+ message: "interlude".into(),
+ }),
+ EventMsg::AgentReasoning(AgentReasoningEvent {
+ text: "second summary".into(),
+ }),
+ ];
+
+ let turns = build_turns_from_event_msgs(&events);
+ assert_eq!(turns.len(), 1);
+ let turn = &turns[0];
+ assert_eq!(turn.items.len(), 4);
+
+ assert_eq!(
+ turn.items[1],
+ ThreadItem::Reasoning {
+ id: "item-2".into(),
+ summary: vec!["first summary".into()],
+ content: vec!["first content".into()],
+ }
+ );
+ assert_eq!(
+ turn.items[3],
+ ThreadItem::Reasoning {
+ id: "item-4".into(),
+ summary: vec!["second summary".into()],
+ content: Vec::new(),
+ }
+ );
+ }
+
+ #[test]
+ fn marks_turn_as_interrupted_when_aborted() {
+ let events = vec![
+ EventMsg::UserMessage(UserMessageEvent {
+ message: "Please do the thing".into(),
+ images: None,
+ }),
+ EventMsg::AgentMessage(AgentMessageEvent {
+ message: "Working...".into(),
+ }),
+ EventMsg::TurnAborted(TurnAbortedEvent {
+ reason: TurnAbortReason::Replaced,
+ }),
+ EventMsg::UserMessage(UserMessageEvent {
+ message: "Let's try again".into(),
+ images: None,
+ }),
+ EventMsg::AgentMessage(AgentMessageEvent {
+ message: "Second attempt complete.".into(),
+ }),
+ ];
+
+ let turns = build_turns_from_event_msgs(&events);
+ assert_eq!(turns.len(), 2);
+
+ let first_turn = &turns[0];
+ assert_eq!(first_turn.status, TurnStatus::Interrupted);
+ assert_eq!(first_turn.items.len(), 2);
+ assert_eq!(
+ first_turn.items[0],
+ ThreadItem::UserMessage {
+ id: "item-1".into(),
+ content: vec![UserInput::Text {
+ text: "Please do the thing".into()
+ }],
+ }
+ );
+ assert_eq!(
+ first_turn.items[1],
+ ThreadItem::AgentMessage {
+ id: "item-2".into(),
+ text: "Working...".into(),
+ }
+ );
+
+ let second_turn = &turns[1];
+ assert_eq!(second_turn.status, TurnStatus::Completed);
+ assert_eq!(second_turn.items.len(), 2);
+ assert_eq!(
+ second_turn.items[0],
+ ThreadItem::UserMessage {
+ id: "item-3".into(),
+ content: vec![UserInput::Text {
+ text: "Let's try again".into()
+ }],
+ }
+ );
+ assert_eq!(
+ second_turn.items[1],
+ ThreadItem::AgentMessage {
+ id: "item-4".into(),
+ text: "Second attempt complete.".into(),
+ }
+ );
+ }
+
+ #[test]
+ fn drops_last_turns_on_thread_rollback() {
+ let events = vec![
+ EventMsg::UserMessage(UserMessageEvent {
+ message: "First".into(),
+ images: None,
+ }),
+ EventMsg::AgentMessage(AgentMessageEvent {
+ message: "A1".into(),
+ }),
+ EventMsg::UserMessage(UserMessageEvent {
+ message: "Second".into(),
+ images: None,
+ }),
+ EventMsg::AgentMessage(AgentMessageEvent {
+ message: "A2".into(),
+ }),
+ EventMsg::ThreadRolledBack(ThreadRolledBackEvent { num_turns: 1 }),
+ EventMsg::UserMessage(UserMessageEvent {
+ message: "Third".into(),
+ images: None,
+ }),
+ EventMsg::AgentMessage(AgentMessageEvent {
+ message: "A3".into(),
+ }),
+ ];
+
+ let turns = build_turns_from_event_msgs(&events);
+ let expected = vec![
+ Turn {
+ id: "turn-1".into(),
+ status: TurnStatus::Completed,
+ error: None,
+ items: vec![
+ ThreadItem::UserMessage {
+ id: "item-1".into(),
+ content: vec![UserInput::Text {
+ text: "First".into(),
+ }],
+ },
+ ThreadItem::AgentMessage {
+ id: "item-2".into(),
+ text: "A1".into(),
+ },
+ ],
+ },
+ Turn {
+ id: "turn-2".into(),
+ status: TurnStatus::Completed,
+ error: None,
+ items: vec![
+ ThreadItem::UserMessage {
+ id: "item-3".into(),
+ content: vec![UserInput::Text {
+ text: "Third".into(),
+ }],
+ },
+ ThreadItem::AgentMessage {
+ id: "item-4".into(),
+ text: "A3".into(),
+ },
+ ],
+ },
+ ];
+ assert_eq!(turns, expected);
+ }
+
+ #[test]
+ fn thread_rollback_clears_all_turns_when_num_turns_exceeds_history() {
+ let events = vec![
+ EventMsg::UserMessage(UserMessageEvent {
+ message: "One".into(),
+ images: None,
+ }),
+ EventMsg::AgentMessage(AgentMessageEvent {
+ message: "A1".into(),
+ }),
+ EventMsg::UserMessage(UserMessageEvent {
+ message: "Two".into(),
+ images: None,
+ }),
+ EventMsg::AgentMessage(AgentMessageEvent {
+ message: "A2".into(),
+ }),
+ EventMsg::ThreadRolledBack(ThreadRolledBackEvent { num_turns: 99 }),
+ ];
+
+ let turns = build_turns_from_event_msgs(&events);
+ assert_eq!(turns, Vec::::new());
+ }
+}
diff --git a/codex-rs/app-server-protocol/src/protocol/v1.rs b/codex-rs/app-server-protocol/src/protocol/v1.rs
index 54f80c9fd..ecc9d7c07 100644
--- a/codex-rs/app-server-protocol/src/protocol/v1.rs
+++ b/codex-rs/app-server-protocol/src/protocol/v1.rs
@@ -1,22 +1,22 @@
use std::collections::HashMap;
use std::path::PathBuf;
-use codex_protocol::ConversationId;
+use codex_protocol::ThreadId;
use codex_protocol::config_types::ForcedLoginMethod;
-use codex_protocol::config_types::ReasoningEffort;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::SandboxMode;
use codex_protocol::config_types::Verbosity;
use codex_protocol::models::ResponseItem;
+use codex_protocol::openai_models::ReasoningEffort;
use codex_protocol::parse_command::ParsedCommand;
use codex_protocol::protocol::AskForApproval;
use codex_protocol::protocol::EventMsg;
use codex_protocol::protocol::FileChange;
use codex_protocol::protocol::ReviewDecision;
-use codex_protocol::protocol::SandboxCommandAssessment;
use codex_protocol::protocol::SandboxPolicy;
use codex_protocol::protocol::SessionSource;
use codex_protocol::protocol::TurnAbortReason;
+use codex_utils_absolute_path::AbsolutePathBuf;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
@@ -68,7 +68,7 @@ pub struct NewConversationParams {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct NewConversationResponse {
- pub conversation_id: ConversationId,
+ pub conversation_id: ThreadId,
pub model: String,
pub reasoning_effort: Option,
pub rollout_path: PathBuf,
@@ -77,7 +77,16 @@ pub struct NewConversationResponse {
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ResumeConversationResponse {
- pub conversation_id: ConversationId,
+ pub conversation_id: ThreadId,
+ pub model: String,
+ pub initial_messages: Option>,
+ pub rollout_path: PathBuf,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+pub struct ForkConversationResponse {
+ pub conversation_id: ThreadId,
pub model: String,
pub initial_messages: Option>,
pub rollout_path: PathBuf,
@@ -90,9 +99,9 @@ pub enum GetConversationSummaryParams {
#[serde(rename = "rolloutPath")]
rollout_path: PathBuf,
},
- ConversationId {
+ ThreadId {
#[serde(rename = "conversationId")]
- conversation_id: ConversationId,
+ conversation_id: ThreadId,
},
}
@@ -113,7 +122,7 @@ pub struct ListConversationsParams {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ConversationSummary {
- pub conversation_id: ConversationId,
+ pub conversation_id: ThreadId,
pub path: PathBuf,
pub preview: String,
pub timestamp: Option,
@@ -143,11 +152,19 @@ pub struct ListConversationsResponse {
#[serde(rename_all = "camelCase")]
pub struct ResumeConversationParams {
pub path: Option,
- pub conversation_id: Option,
+ pub conversation_id: Option,
pub history: Option>,
pub overrides: Option,
}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+pub struct ForkConversationParams {
+ pub path: Option,
+ pub conversation_id: Option,
+ pub overrides: Option,
+}
+
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct AddConversationSubscriptionResponse {
@@ -158,7 +175,7 @@ pub struct AddConversationSubscriptionResponse {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ArchiveConversationParams {
- pub conversation_id: ConversationId,
+ pub conversation_id: ThreadId,
pub rollout_path: PathBuf,
}
@@ -198,7 +215,7 @@ pub struct GitDiffToRemoteResponse {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ApplyPatchApprovalParams {
- pub conversation_id: ConversationId,
+ pub conversation_id: ThreadId,
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
/// and [codex_core::protocol::PatchApplyEndEvent].
pub call_id: String,
@@ -219,14 +236,13 @@ pub struct ApplyPatchApprovalResponse {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ExecCommandApprovalParams {
- pub conversation_id: ConversationId,
+ pub conversation_id: ThreadId,
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
/// and [codex_core::protocol::ExecCommandEndEvent].
pub call_id: String,
pub command: Vec,
pub cwd: PathBuf,
pub reason: Option,
- pub risk: Option,
pub parsed_cmd: Vec,
}
@@ -361,7 +377,7 @@ pub struct Tools {
#[serde(rename_all = "camelCase")]
pub struct SandboxSettings {
#[serde(default)]
- pub writable_roots: Vec,
+ pub writable_roots: Vec,
pub network_access: Option,
pub exclude_tmpdir_env_var: Option,
pub exclude_slash_tmp: Option,
@@ -370,14 +386,14 @@ pub struct SandboxSettings {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct SendUserMessageParams {
- pub conversation_id: ConversationId,
+ pub conversation_id: ThreadId,
pub items: Vec,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct SendUserTurnParams {
- pub conversation_id: ConversationId,
+ pub conversation_id: ThreadId,
pub items: Vec,
pub cwd: PathBuf,
pub approval_policy: AskForApproval,
@@ -385,6 +401,8 @@ pub struct SendUserTurnParams {
pub model: String,
pub effort: Option,
pub summary: ReasoningSummary,
+ /// Optional JSON Schema used to constrain the final assistant message for this turn.
+ pub output_schema: Option,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -394,7 +412,7 @@ pub struct SendUserTurnResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct InterruptConversationParams {
- pub conversation_id: ConversationId,
+ pub conversation_id: ThreadId,
}
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
@@ -410,7 +428,7 @@ pub struct SendUserMessageResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct AddConversationListenerParams {
- pub conversation_id: ConversationId,
+ pub conversation_id: ThreadId,
#[serde(default)]
pub experimental_raw_events: bool,
}
@@ -444,7 +462,7 @@ pub struct LoginChatGptCompleteNotification {
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct SessionConfiguredNotification {
- pub session_id: ConversationId,
+ pub session_id: ThreadId,
pub model: String,
pub reasoning_effort: Option,
pub history_log_id: u64,
diff --git a/codex-rs/app-server-protocol/src/protocol/v2.rs b/codex-rs/app-server-protocol/src/protocol/v2.rs
index a2b9cee3f..348df069f 100644
--- a/codex-rs/app-server-protocol/src/protocol/v2.rs
+++ b/codex-rs/app-server-protocol/src/protocol/v2.rs
@@ -2,23 +2,42 @@ use std::collections::HashMap;
use std::path::PathBuf;
use crate::protocol::common::AuthMode;
-use codex_protocol::ConversationId;
use codex_protocol::account::PlanType;
-use codex_protocol::approvals::SandboxCommandAssessment as CoreSandboxCommandAssessment;
-use codex_protocol::config_types::ReasoningEffort;
+use codex_protocol::approvals::ExecPolicyAmendment as CoreExecPolicyAmendment;
+use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::ReasoningSummary;
+use codex_protocol::config_types::SandboxMode as CoreSandboxMode;
+use codex_protocol::config_types::Verbosity;
use codex_protocol::items::AgentMessageContent as CoreAgentMessageContent;
use codex_protocol::items::TurnItem as CoreTurnItem;
use codex_protocol::models::ResponseItem;
+use codex_protocol::openai_models::ReasoningEffort;
use codex_protocol::parse_command::ParsedCommand as CoreParsedCommand;
+use codex_protocol::plan_tool::PlanItemArg as CorePlanItemArg;
+use codex_protocol::plan_tool::StepStatus as CorePlanStepStatus;
+use codex_protocol::protocol::AskForApproval as CoreAskForApproval;
+use codex_protocol::protocol::CodexErrorInfo as CoreCodexErrorInfo;
+use codex_protocol::protocol::CreditsSnapshot as CoreCreditsSnapshot;
+use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
+use codex_protocol::protocol::SessionSource as CoreSessionSource;
+use codex_protocol::protocol::SkillErrorInfo as CoreSkillErrorInfo;
+use codex_protocol::protocol::SkillMetadata as CoreSkillMetadata;
+use codex_protocol::protocol::SkillScope as CoreSkillScope;
+use codex_protocol::protocol::TokenUsage as CoreTokenUsage;
+use codex_protocol::protocol::TokenUsageInfo as CoreTokenUsageInfo;
use codex_protocol::user_input::UserInput as CoreUserInput;
+use codex_utils_absolute_path::AbsolutePathBuf;
use mcp_types::ContentBlock as McpContentBlock;
+use mcp_types::Resource as McpResource;
+use mcp_types::ResourceTemplate as McpResourceTemplate;
+use mcp_types::Tool as McpTool;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Value as JsonValue;
+use thiserror::Error;
use ts_rs::TS;
// Macro to declare a camelCased API v2 enum mirroring a core enum which
@@ -46,35 +65,483 @@ macro_rules! v2_enum_from_core {
};
}
-v2_enum_from_core!(
- pub enum AskForApproval from codex_protocol::protocol::AskForApproval {
- UnlessTrusted, OnFailure, OnRequest, Never
+/// This translation layer make sure that we expose codex error code in camel case.
+///
+/// When an upstream HTTP status is available (for example, from the Responses API or a provider),
+/// it is forwarded in `httpStatusCode` on the relevant `codexErrorInfo` variant.
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub enum CodexErrorInfo {
+ ContextWindowExceeded,
+ UsageLimitExceeded,
+ HttpConnectionFailed {
+ #[serde(rename = "httpStatusCode")]
+ #[ts(rename = "httpStatusCode")]
+ http_status_code: Option,
+ },
+ /// Failed to connect to the response SSE stream.
+ ResponseStreamConnectionFailed {
+ #[serde(rename = "httpStatusCode")]
+ #[ts(rename = "httpStatusCode")]
+ http_status_code: Option,
+ },
+ InternalServerError,
+ Unauthorized,
+ BadRequest,
+ ThreadRollbackFailed,
+ SandboxError,
+ /// The response SSE stream disconnected in the middle of a turn before completion.
+ ResponseStreamDisconnected {
+ #[serde(rename = "httpStatusCode")]
+ #[ts(rename = "httpStatusCode")]
+ http_status_code: Option,
+ },
+ /// Reached the retry limit for responses.
+ ResponseTooManyFailedAttempts {
+ #[serde(rename = "httpStatusCode")]
+ #[ts(rename = "httpStatusCode")]
+ http_status_code: Option,
+ },
+ Other,
+}
+
+impl From for CodexErrorInfo {
+ fn from(value: CoreCodexErrorInfo) -> Self {
+ match value {
+ CoreCodexErrorInfo::ContextWindowExceeded => CodexErrorInfo::ContextWindowExceeded,
+ CoreCodexErrorInfo::UsageLimitExceeded => CodexErrorInfo::UsageLimitExceeded,
+ CoreCodexErrorInfo::HttpConnectionFailed { http_status_code } => {
+ CodexErrorInfo::HttpConnectionFailed { http_status_code }
+ }
+ CoreCodexErrorInfo::ResponseStreamConnectionFailed { http_status_code } => {
+ CodexErrorInfo::ResponseStreamConnectionFailed { http_status_code }
+ }
+ CoreCodexErrorInfo::InternalServerError => CodexErrorInfo::InternalServerError,
+ CoreCodexErrorInfo::Unauthorized => CodexErrorInfo::Unauthorized,
+ CoreCodexErrorInfo::BadRequest => CodexErrorInfo::BadRequest,
+ CoreCodexErrorInfo::ThreadRollbackFailed => CodexErrorInfo::ThreadRollbackFailed,
+ CoreCodexErrorInfo::SandboxError => CodexErrorInfo::SandboxError,
+ CoreCodexErrorInfo::ResponseStreamDisconnected { http_status_code } => {
+ CodexErrorInfo::ResponseStreamDisconnected { http_status_code }
+ }
+ CoreCodexErrorInfo::ResponseTooManyFailedAttempts { http_status_code } => {
+ CodexErrorInfo::ResponseTooManyFailedAttempts { http_status_code }
+ }
+ CoreCodexErrorInfo::Other => CodexErrorInfo::Other,
+ }
}
-);
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "kebab-case")]
+#[ts(rename_all = "kebab-case", export_to = "v2/")]
+pub enum AskForApproval {
+ #[serde(rename = "untrusted")]
+ #[ts(rename = "untrusted")]
+ UnlessTrusted,
+ OnFailure,
+ OnRequest,
+ Never,
+}
+
+impl AskForApproval {
+ pub fn to_core(self) -> CoreAskForApproval {
+ match self {
+ AskForApproval::UnlessTrusted => CoreAskForApproval::UnlessTrusted,
+ AskForApproval::OnFailure => CoreAskForApproval::OnFailure,
+ AskForApproval::OnRequest => CoreAskForApproval::OnRequest,
+ AskForApproval::Never => CoreAskForApproval::Never,
+ }
+ }
+}
+
+impl From for AskForApproval {
+ fn from(value: CoreAskForApproval) -> Self {
+ match value {
+ CoreAskForApproval::UnlessTrusted => AskForApproval::UnlessTrusted,
+ CoreAskForApproval::OnFailure => AskForApproval::OnFailure,
+ CoreAskForApproval::OnRequest => AskForApproval::OnRequest,
+ CoreAskForApproval::Never => AskForApproval::Never,
+ }
+ }
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "kebab-case")]
+#[ts(rename_all = "kebab-case", export_to = "v2/")]
+pub enum SandboxMode {
+ ReadOnly,
+ WorkspaceWrite,
+ DangerFullAccess,
+}
+
+impl SandboxMode {
+ pub fn to_core(self) -> CoreSandboxMode {
+ match self {
+ SandboxMode::ReadOnly => CoreSandboxMode::ReadOnly,
+ SandboxMode::WorkspaceWrite => CoreSandboxMode::WorkspaceWrite,
+ SandboxMode::DangerFullAccess => CoreSandboxMode::DangerFullAccess,
+ }
+ }
+}
+
+impl From for SandboxMode {
+ fn from(value: CoreSandboxMode) -> Self {
+ match value {
+ CoreSandboxMode::ReadOnly => SandboxMode::ReadOnly,
+ CoreSandboxMode::WorkspaceWrite => SandboxMode::WorkspaceWrite,
+ CoreSandboxMode::DangerFullAccess => SandboxMode::DangerFullAccess,
+ }
+ }
+}
v2_enum_from_core!(
- pub enum SandboxMode from codex_protocol::config_types::SandboxMode {
- ReadOnly, WorkspaceWrite, DangerFullAccess
+ pub enum ReviewDelivery from codex_protocol::protocol::ReviewDelivery {
+ Inline, Detached
}
);
v2_enum_from_core!(
- pub enum CommandRiskLevel from codex_protocol::approvals::SandboxRiskLevel {
- Low,
- Medium,
- High
+ pub enum McpAuthStatus from codex_protocol::protocol::McpAuthStatus {
+ Unsupported,
+ NotLoggedIn,
+ BearerToken,
+ OAuth
}
);
+// TODO(mbolin): Support in-repo layer.
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
+#[serde(tag = "type", rename_all = "camelCase")]
+#[ts(tag = "type")]
+#[ts(export_to = "v2/")]
+pub enum ConfigLayerSource {
+ /// Managed preferences layer delivered by MDM (macOS only).
+ #[serde(rename_all = "camelCase")]
+ #[ts(rename_all = "camelCase")]
+ Mdm {
+ domain: String,
+ key: String,
+ },
+
+ /// Managed config layer from a file (usually `managed_config.toml`).
+ #[serde(rename_all = "camelCase")]
+ #[ts(rename_all = "camelCase")]
+ System {
+ /// This is the path to the system config.toml file, though it is not
+ /// guaranteed to exist.
+ file: AbsolutePathBuf,
+ },
+
+ /// User config layer from $CODEX_HOME/config.toml. This layer is special
+ /// in that it is expected to be:
+ /// - writable by the user
+ /// - generally outside the workspace directory
+ #[serde(rename_all = "camelCase")]
+ #[ts(rename_all = "camelCase")]
+ User {
+ /// This is the path to the user's config.toml file, though it is not
+ /// guaranteed to exist.
+ file: AbsolutePathBuf,
+ },
+
+ /// Path to a .codex/ folder within a project. There could be multiple of
+ /// these between `cwd` and the project/repo root.
+ #[serde(rename_all = "camelCase")]
+ #[ts(rename_all = "camelCase")]
+ Project {
+ dot_codex_folder: AbsolutePathBuf,
+ },
+
+ /// Session-layer overrides supplied via `-c`/`--config`.
+ SessionFlags,
+
+ /// `managed_config.toml` was designed to be a config that was loaded
+ /// as the last layer on top of everything else. This scheme did not quite
+ /// work out as intended, but we keep this variant as a "best effort" while
+ /// we phase out `managed_config.toml` in favor of `requirements.toml`.
+ #[serde(rename_all = "camelCase")]
+ #[ts(rename_all = "camelCase")]
+ LegacyManagedConfigTomlFromFile {
+ file: AbsolutePathBuf,
+ },
+
+ LegacyManagedConfigTomlFromMdm,
+}
+
+impl ConfigLayerSource {
+ /// A settings from a layer with a higher precedence will override a setting
+ /// from a layer with a lower precedence.
+ pub fn precedence(&self) -> i16 {
+ match self {
+ ConfigLayerSource::Mdm { .. } => 0,
+ ConfigLayerSource::System { .. } => 10,
+ ConfigLayerSource::User { .. } => 20,
+ ConfigLayerSource::Project { .. } => 25,
+ ConfigLayerSource::SessionFlags => 30,
+ ConfigLayerSource::LegacyManagedConfigTomlFromFile { .. } => 40,
+ ConfigLayerSource::LegacyManagedConfigTomlFromMdm => 50,
+ }
+ }
+}
+
+/// Compares [ConfigLayerSource] by precedence, so `A < B` means settings from
+/// layer `A` will be overridden by settings from layer `B`.
+impl PartialOrd for ConfigLayerSource {
+ fn partial_cmp(&self, other: &Self) -> Option {
+ Some(self.precedence().cmp(&other.precedence()))
+ }
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
+#[serde(rename_all = "snake_case")]
+#[ts(export_to = "v2/")]
+pub struct SandboxWorkspaceWrite {
+ #[serde(default)]
+ pub writable_roots: Vec,
+ #[serde(default)]
+ pub network_access: bool,
+ #[serde(default)]
+ pub exclude_tmpdir_env_var: bool,
+ #[serde(default)]
+ pub exclude_slash_tmp: bool,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "snake_case")]
+#[ts(export_to = "v2/")]
+pub struct ToolsV2 {
+ #[serde(alias = "web_search_request")]
+ pub web_search: Option,
+ pub view_image: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "snake_case")]
+#[ts(export_to = "v2/")]
+pub struct ProfileV2 {
+ pub model: Option,
+ pub model_provider: Option,
+ pub approval_policy: Option,
+ pub model_reasoning_effort: Option,
+ pub model_reasoning_summary: Option,
+ pub model_verbosity: Option,
+ pub chatgpt_base_url: Option,
+ #[serde(default, flatten)]
+ pub additional: HashMap,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "snake_case")]
+#[ts(export_to = "v2/")]
+pub struct AnalyticsConfig {
+ pub enabled: Option,
+ #[serde(default, flatten)]
+ pub additional: HashMap,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "snake_case")]
+#[ts(export_to = "v2/")]
+pub struct Config {
+ pub model: Option,
+ pub review_model: Option,
+ pub model_context_window: Option,
+ pub model_auto_compact_token_limit: Option,
+ pub model_provider: Option,
+ pub approval_policy: Option,
+ pub sandbox_mode: Option,
+ pub sandbox_workspace_write: Option,
+ pub forced_chatgpt_workspace_id: Option,
+ pub forced_login_method: Option,
+ pub tools: Option,
+ pub profile: Option,
+ #[serde(default)]
+ pub profiles: HashMap,
+ pub instructions: Option,
+ pub developer_instructions: Option,
+ pub compact_prompt: Option,
+ pub model_reasoning_effort: Option,
+ pub model_reasoning_summary: Option,
+ pub model_verbosity: Option,
+ pub analytics: Option,
+ #[serde(default, flatten)]
+ pub additional: HashMap,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ConfigLayerMetadata {
+ pub name: ConfigLayerSource,
+ pub version: String,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ConfigLayer {
+ pub name: ConfigLayerSource,
+ pub version: String,
+ pub config: JsonValue,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub enum MergeStrategy {
+ Replace,
+ Upsert,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub enum WriteStatus {
+ Ok,
+ OkOverridden,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct OverriddenMetadata {
+ pub message: String,
+ pub overriding_layer: ConfigLayerMetadata,
+ pub effective_value: JsonValue,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ConfigWriteResponse {
+ pub status: WriteStatus,
+ pub version: String,
+ /// Canonical path to the config file that was written.
+ pub file_path: AbsolutePathBuf,
+ pub overridden_metadata: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub enum ConfigWriteErrorCode {
+ ConfigLayerReadonly,
+ ConfigVersionConflict,
+ ConfigValidationError,
+ ConfigPathNotFound,
+ ConfigSchemaUnknownKey,
+ UserLayerNotFound,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ConfigReadParams {
+ #[serde(default)]
+ pub include_layers: bool,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ConfigReadResponse {
+ pub config: Config,
+ pub origins: HashMap,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub layers: Option>,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ConfigRequirements {
+ pub allowed_approval_policies: Option>,
+ pub allowed_sandbox_modes: Option>,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ConfigRequirementsReadResponse {
+ /// Null if no requirements are configured (e.g. no requirements.toml/MDM entries).
+ pub requirements: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ConfigValueWriteParams {
+ pub key_path: String,
+ pub value: JsonValue,
+ pub merge_strategy: MergeStrategy,
+ /// Path to the config file to write; defaults to the user's `config.toml` when omitted.
+ pub file_path: Option,
+ pub expected_version: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ConfigBatchWriteParams {
+ pub edits: Vec,
+ /// Path to the config file to write; defaults to the user's `config.toml` when omitted.
+ pub file_path: Option,
+ pub expected_version: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ConfigEdit {
+ pub key_path: String,
+ pub value: JsonValue,
+ pub merge_strategy: MergeStrategy,
+}
+
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
-pub enum ApprovalDecision {
+pub enum CommandExecutionApprovalDecision {
+ /// User approved the command.
Accept,
+ /// User approved the command and future identical commands should run without prompting.
+ AcceptForSession,
+ /// User approved the command, and wants to apply the proposed execpolicy amendment so future
+ /// matching commands can run without prompting.
+ AcceptWithExecpolicyAmendment {
+ execpolicy_amendment: ExecPolicyAmendment,
+ },
+ /// User denied the command. The agent will continue the turn.
Decline,
+ /// User denied the command. The turn will also be immediately interrupted.
Cancel,
}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub enum FileChangeApprovalDecision {
+ /// User approved the file changes.
+ Accept,
+ /// User approved the file changes and future changes to the same files should run without prompting.
+ AcceptForSession,
+ /// User denied the file changes. The agent will continue the turn.
+ Decline,
+ /// User denied the file changes. The turn will also be immediately interrupted.
+ Cancel,
+}
+
+#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub enum NetworkAccess {
+ #[default]
+ Restricted,
+ Enabled,
+}
+
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(tag = "type", rename_all = "camelCase")]
#[ts(tag = "type")]
@@ -84,9 +551,15 @@ pub enum SandboxPolicy {
ReadOnly,
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
+ ExternalSandbox {
+ #[serde(default)]
+ network_access: NetworkAccess,
+ },
+ #[serde(rename_all = "camelCase")]
+ #[ts(rename_all = "camelCase")]
WorkspaceWrite {
#[serde(default)]
- writable_roots: Vec,
+ writable_roots: Vec,
#[serde(default)]
network_access: bool,
#[serde(default)]
@@ -103,6 +576,14 @@ impl SandboxPolicy {
codex_protocol::protocol::SandboxPolicy::DangerFullAccess
}
SandboxPolicy::ReadOnly => codex_protocol::protocol::SandboxPolicy::ReadOnly,
+ SandboxPolicy::ExternalSandbox { network_access } => {
+ codex_protocol::protocol::SandboxPolicy::ExternalSandbox {
+ network_access: match network_access {
+ NetworkAccess::Restricted => CoreNetworkAccess::Restricted,
+ NetworkAccess::Enabled => CoreNetworkAccess::Enabled,
+ },
+ }
+ }
SandboxPolicy::WorkspaceWrite {
writable_roots,
network_access,
@@ -125,6 +606,14 @@ impl From for SandboxPolicy {
SandboxPolicy::DangerFullAccess
}
codex_protocol::protocol::SandboxPolicy::ReadOnly => SandboxPolicy::ReadOnly,
+ codex_protocol::protocol::SandboxPolicy::ExternalSandbox { network_access } => {
+ SandboxPolicy::ExternalSandbox {
+ network_access: match network_access {
+ CoreNetworkAccess::Restricted => NetworkAccess::Restricted,
+ CoreNetworkAccess::Enabled => NetworkAccess::Enabled,
+ },
+ }
+ }
codex_protocol::protocol::SandboxPolicy::WorkspaceWrite {
writable_roots,
network_access,
@@ -140,28 +629,23 @@ impl From for SandboxPolicy {
}
}
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
-#[serde(rename_all = "camelCase")]
-#[ts(export_to = "v2/")]
-pub struct SandboxCommandAssessment {
- pub description: String,
- pub risk_level: CommandRiskLevel,
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
+#[serde(transparent)]
+#[ts(type = "Array", export_to = "v2/")]
+pub struct ExecPolicyAmendment {
+ pub command: Vec,
}
-impl SandboxCommandAssessment {
- pub fn into_core(self) -> CoreSandboxCommandAssessment {
- CoreSandboxCommandAssessment {
- description: self.description,
- risk_level: self.risk_level.to_core(),
- }
+impl ExecPolicyAmendment {
+ pub fn into_core(self) -> CoreExecPolicyAmendment {
+ CoreExecPolicyAmendment::new(self.command)
}
}
-impl From for SandboxCommandAssessment {
- fn from(value: CoreSandboxCommandAssessment) -> Self {
+impl From for ExecPolicyAmendment {
+ fn from(value: CoreExecPolicyAmendment) -> Self {
Self {
- description: value.description,
- risk_level: CommandRiskLevel::from(value.risk_level),
+ command: value.command().to_vec(),
}
}
}
@@ -190,6 +674,56 @@ pub enum CommandAction {
},
}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(rename_all = "camelCase", export_to = "v2/")]
+#[derive(Default)]
+pub enum SessionSource {
+ Cli,
+ #[serde(rename = "vscode")]
+ #[ts(rename = "vscode")]
+ #[default]
+ VsCode,
+ Exec,
+ AppServer,
+ #[serde(other)]
+ Unknown,
+}
+
+impl From for SessionSource {
+ fn from(value: CoreSessionSource) -> Self {
+ match value {
+ CoreSessionSource::Cli => SessionSource::Cli,
+ CoreSessionSource::VSCode => SessionSource::VsCode,
+ CoreSessionSource::Exec => SessionSource::Exec,
+ CoreSessionSource::Mcp => SessionSource::AppServer,
+ CoreSessionSource::SubAgent(_) => SessionSource::Unknown,
+ CoreSessionSource::Unknown => SessionSource::Unknown,
+ }
+ }
+}
+
+impl From for CoreSessionSource {
+ fn from(value: SessionSource) -> Self {
+ match value {
+ SessionSource::Cli => CoreSessionSource::Cli,
+ SessionSource::VsCode => CoreSessionSource::VSCode,
+ SessionSource::Exec => CoreSessionSource::Exec,
+ SessionSource::AppServer => CoreSessionSource::Mcp,
+ SessionSource::Unknown => CoreSessionSource::Unknown,
+ }
+ }
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct GitInfo {
+ pub sha: Option,
+ pub branch: Option,
+ pub origin_url: Option,
+}
+
impl CommandAction {
pub fn into_core(self) -> CoreParsedCommand {
match self {
@@ -289,10 +823,21 @@ pub struct CancelLoginAccountParams {
pub login_id: String,
}
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
+#[ts(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
-pub struct CancelLoginAccountResponse {}
+pub enum CancelLoginAccountStatus {
+ Canceled,
+ NotFound,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct CancelLoginAccountResponse {
+ pub status: CancelLoginAccountStatus,
+}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
@@ -364,13 +909,64 @@ pub struct ModelListResponse {
pub next_cursor: Option,
}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ListMcpServerStatusParams {
+ /// Opaque pagination cursor returned by a previous call.
+ pub cursor: Option,
+ /// Optional page size; defaults to a server-defined value.
+ pub limit: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct McpServerStatus {
+ pub name: String,
+ pub tools: std::collections::HashMap,
+ pub resources: Vec,
+ pub resource_templates: Vec,
+ pub auth_status: McpAuthStatus,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ListMcpServerStatusResponse {
+ pub data: Vec,
+ /// Opaque cursor to pass to the next call to continue after the last item.
+ /// If None, there are no more items to return.
+ pub next_cursor: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct McpServerOauthLoginParams {
+ pub name: String,
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ #[ts(optional)]
+ pub scopes: Option>,
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ #[ts(optional)]
+ pub timeout_secs: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct McpServerOauthLoginResponse {
+ pub authorization_url: String,
+}
+
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct FeedbackUploadParams {
pub classification: String,
pub reason: Option,
- pub conversation_id: Option,
+ pub thread_id: Option,
pub include_logs: bool,
}
@@ -381,6 +977,26 @@ pub struct FeedbackUploadResponse {
pub thread_id: String,
}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct CommandExecParams {
+ pub command: Vec,
+ #[ts(type = "number | null")]
+ pub timeout_ms: Option,
+ pub cwd: Option,
+ pub sandbox_policy: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct CommandExecResponse {
+ pub exit_code: i32,
+ pub stdout: String,
+ pub stderr: String,
+}
+
// === Threads, Turns, and Items ===
// Thread APIs
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
@@ -395,6 +1011,12 @@ pub struct ThreadStartParams {
pub config: Option>,
pub base_instructions: Option,
pub developer_instructions: Option,
+ /// If true, opt into emitting raw response items on the event stream.
+ ///
+ /// This is for internal use only (e.g. Codex Cloud).
+ /// (TODO): Figure out a better way to categorize internal / experimental events & protocols.
+ #[serde(default)]
+ pub experimental_raw_events: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -402,6 +1024,12 @@ pub struct ThreadStartParams {
#[ts(export_to = "v2/")]
pub struct ThreadStartResponse {
pub thread: Thread,
+ pub model: String,
+ pub model_provider: String,
+ pub cwd: PathBuf,
+ pub approval_policy: AskForApproval,
+ pub sandbox: SandboxPolicy,
+ pub reasoning_effort: Option,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq, JsonSchema, TS)]
@@ -444,6 +1072,53 @@ pub struct ThreadResumeParams {
#[ts(export_to = "v2/")]
pub struct ThreadResumeResponse {
pub thread: Thread,
+ pub model: String,
+ pub model_provider: String,
+ pub cwd: PathBuf,
+ pub approval_policy: AskForApproval,
+ pub sandbox: SandboxPolicy,
+ pub reasoning_effort: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+/// There are two ways to fork a thread:
+/// 1. By thread_id: load the thread from disk by thread_id and fork it into a new thread.
+/// 2. By path: load the thread from disk by path and fork it into a new thread.
+///
+/// If using path, the thread_id param will be ignored.
+///
+/// Prefer using thread_id whenever possible.
+pub struct ThreadForkParams {
+ pub thread_id: String,
+
+ /// [UNSTABLE] Specify the rollout path to fork from.
+ /// If specified, the thread_id param will be ignored.
+ pub path: Option,
+
+ /// Configuration overrides for the forked thread, if any.
+ pub model: Option,
+ pub model_provider: Option,
+ pub cwd: Option,
+ pub approval_policy: Option,
+ pub sandbox: Option,
+ pub config: Option>,
+ pub base_instructions: Option,
+ pub developer_instructions: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ThreadForkResponse {
+ pub thread: Thread,
+ pub model: String,
+ pub model_provider: String,
+ pub cwd: PathBuf,
+ pub approval_policy: AskForApproval,
+ pub sandbox: SandboxPolicy,
+ pub reasoning_effort: Option,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -458,6 +1133,30 @@ pub struct ThreadArchiveParams {
#[ts(export_to = "v2/")]
pub struct ThreadArchiveResponse {}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ThreadRollbackParams {
+ pub thread_id: String,
+ /// The number of turns to drop from the end of the thread. Must be >= 1.
+ ///
+ /// This only modifies the thread's history and does not revert local file changes
+ /// that have been made by the agent. Clients are responsible for reverting these changes.
+ pub num_turns: u32,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ThreadRollbackResponse {
+ /// The updated thread after applying the rollback, with `turns` populated.
+ ///
+ /// The ThreadItems stored in each Turn are lossy since we explicitly do not
+ /// persist all agent interactions, such as command executions. This is the same
+ /// behavior as `thread/resume`.
+ pub thread: Thread,
+}
+
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
@@ -474,24 +1173,126 @@ pub struct ThreadListParams {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
-pub struct ThreadListResponse {
- pub data: Vec,
- /// Opaque cursor to pass to the next call to continue after the last item.
- /// if None, there are no more items to return.
- pub next_cursor: Option,
+pub struct ThreadListResponse {
+ pub data: Vec,
+ /// Opaque cursor to pass to the next call to continue after the last item.
+ /// if None, there are no more items to return.
+ pub next_cursor: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ThreadLoadedListParams {
+ /// Opaque pagination cursor returned by a previous call.
+ pub cursor: Option,
+ /// Optional page size; defaults to no limit.
+ pub limit: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ThreadLoadedListResponse {
+ /// Thread ids for sessions currently loaded in memory.
+ pub data: Vec,
+ /// Opaque cursor to pass to the next call to continue after the last item.
+ /// if None, there are no more items to return.
+ pub next_cursor: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct SkillsListParams {
+ /// When empty, defaults to the current session working directory.
+ #[serde(default, skip_serializing_if = "Vec::is_empty")]
+ pub cwds: Vec,
+
+ /// When true, bypass the skills cache and re-scan skills from disk.
+ #[serde(default, skip_serializing_if = "std::ops::Not::not")]
+ pub force_reload: bool,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct SkillsListResponse {
+ pub data: Vec,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "snake_case")]
+#[ts(rename_all = "snake_case")]
+#[ts(export_to = "v2/")]
+pub enum SkillScope {
+ User,
+ Repo,
+ System,
+ Admin,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct SkillMetadata {
+ pub name: String,
+ pub description: String,
+ #[ts(optional)]
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ pub short_description: Option,
+ pub path: PathBuf,
+ pub scope: SkillScope,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
-pub struct ThreadCompactParams {
- pub thread_id: String,
+pub struct SkillErrorInfo {
+ pub path: PathBuf,
+ pub message: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
-pub struct ThreadCompactResponse {}
+pub struct SkillsListEntry {
+ pub cwd: PathBuf,
+ pub skills: Vec,
+ pub errors: Vec,
+}
+
+impl From for SkillMetadata {
+ fn from(value: CoreSkillMetadata) -> Self {
+ Self {
+ name: value.name,
+ description: value.description,
+ short_description: value.short_description,
+ path: value.path,
+ scope: value.scope.into(),
+ }
+ }
+}
+
+impl From for SkillScope {
+ fn from(value: CoreSkillScope) -> Self {
+ match value {
+ CoreSkillScope::User => Self::User,
+ CoreSkillScope::Repo => Self::Repo,
+ CoreSkillScope::System => Self::System,
+ CoreSkillScope::Admin => Self::Admin,
+ }
+ }
+}
+
+impl From for SkillErrorInfo {
+ fn from(value: CoreSkillErrorInfo) -> Self {
+ Self {
+ path: value.path,
+ message: value.message,
+ }
+ }
+}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
@@ -500,11 +1301,25 @@ pub struct Thread {
pub id: String,
/// Usually the first user message in the thread, if available.
pub preview: String,
+ /// Model provider used for this thread (for example, 'openai').
pub model_provider: String,
/// Unix timestamp (in seconds) when the thread was created.
+ #[ts(type = "number")]
pub created_at: i64,
/// [UNSTABLE] Path to the thread on disk.
pub path: PathBuf,
+ /// Working directory captured for the thread.
+ pub cwd: PathBuf,
+ /// Version of the CLI that created the thread.
+ pub cli_version: String,
+ /// Origin of the thread (CLI, VSCode, codex exec, codex app-server, etc.).
+ pub source: SessionSource,
+ /// Optional Git metadata captured when the thread was created.
+ pub git_info: Option,
+ /// Only populated on `thread/resume`, `thread/rollback`, `thread/fork` responses.
+ /// For all other responses and notifications returning a Thread,
+ /// the turns field will be an empty list.
+ pub turns: Vec,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -514,21 +1329,99 @@ pub struct AccountUpdatedNotification {
pub auth_mode: Option,
}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ThreadTokenUsageUpdatedNotification {
+ pub thread_id: String,
+ pub turn_id: String,
+ pub token_usage: ThreadTokenUsage,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ThreadTokenUsage {
+ pub total: TokenUsageBreakdown,
+ pub last: TokenUsageBreakdown,
+ // TODO(aibrahim): make this not optional
+ #[ts(type = "number | null")]
+ pub model_context_window: Option,
+}
+
+impl From for ThreadTokenUsage {
+ fn from(value: CoreTokenUsageInfo) -> Self {
+ Self {
+ total: value.total_token_usage.into(),
+ last: value.last_token_usage.into(),
+ model_context_window: value.model_context_window,
+ }
+ }
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct TokenUsageBreakdown {
+ #[ts(type = "number")]
+ pub total_tokens: i64,
+ #[ts(type = "number")]
+ pub input_tokens: i64,
+ #[ts(type = "number")]
+ pub cached_input_tokens: i64,
+ #[ts(type = "number")]
+ pub output_tokens: i64,
+ #[ts(type = "number")]
+ pub reasoning_output_tokens: i64,
+}
+
+impl From for TokenUsageBreakdown {
+ fn from(value: CoreTokenUsage) -> Self {
+ Self {
+ total_tokens: value.total_tokens,
+ input_tokens: value.input_tokens,
+ cached_input_tokens: value.cached_input_tokens,
+ output_tokens: value.output_tokens,
+ reasoning_output_tokens: value.reasoning_output_tokens,
+ }
+ }
+}
+
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct Turn {
pub id: String,
+ /// Only populated on a `thread/resume` or `thread/fork` response.
+ /// For all other responses and notifications returning a Turn,
+ /// the items field will be an empty list.
pub items: Vec,
pub status: TurnStatus,
+ /// Only populated when the Turn's status is failed.
pub error: Option,
}
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS, Error)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
+#[error("{message}")]
pub struct TurnError {
pub message: String,
+ pub codex_error_info: Option,
+ #[serde(default)]
+ pub additional_details: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ErrorNotification {
+ pub error: TurnError,
+ // Set to true if the error is transient and the app-server process will automatically retry.
+ // If true, this will not interrupt a turn.
+ pub will_retry: bool,
+ pub thread_id: String,
+ pub turn_id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -560,6 +1453,60 @@ pub struct TurnStartParams {
pub effort: Option,
/// Override the reasoning summary for this turn and subsequent turns.
pub summary: Option,
+ /// Optional JSON Schema used to constrain the final assistant message for this turn.
+ pub output_schema: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ReviewStartParams {
+ pub thread_id: String,
+ pub target: ReviewTarget,
+
+ /// Where to run the review: inline (default) on the current thread or
+ /// detached on a new thread (returned in `reviewThreadId`).
+ #[serde(default)]
+ pub delivery: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ReviewStartResponse {
+ pub turn: Turn,
+ /// Identifies the thread where the review runs.
+ ///
+ /// For inline reviews, this is the original thread id.
+ /// For detached reviews, this is the id of the new review thread.
+ pub review_thread_id: String,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(tag = "type", rename_all = "camelCase")]
+#[ts(tag = "type", export_to = "v2/")]
+pub enum ReviewTarget {
+ /// Review the working tree: staged, unstaged, and untracked files.
+ UncommittedChanges,
+
+ /// Review changes between the current branch and the given base branch.
+ #[serde(rename_all = "camelCase")]
+ #[ts(rename_all = "camelCase")]
+ BaseBranch { branch: String },
+
+ /// Review the changes introduced by a specific commit.
+ #[serde(rename_all = "camelCase")]
+ #[ts(rename_all = "camelCase")]
+ Commit {
+ sha: String,
+ /// Optional human-readable label (e.g., commit subject) for UIs.
+ title: Option,
+ },
+
+ /// Arbitrary instructions, equivalent to the old free-form prompt.
+ #[serde(rename_all = "camelCase")]
+ #[ts(rename_all = "camelCase")]
+ Custom { instructions: String },
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -591,6 +1538,7 @@ pub enum UserInput {
Text { text: String },
Image { url: String },
LocalImage { path: PathBuf },
+ Skill { name: String, path: PathBuf },
}
impl UserInput {
@@ -599,6 +1547,7 @@ impl UserInput {
UserInput::Text { text } => CoreUserInput::Text { text },
UserInput::Image { url } => CoreUserInput::Image { image_url: url },
UserInput::LocalImage { path } => CoreUserInput::LocalImage { path },
+ UserInput::Skill { name, path } => CoreUserInput::Skill { name, path },
}
}
}
@@ -609,6 +1558,7 @@ impl From for UserInput {
CoreUserInput::Text { text } => UserInput::Text { text },
CoreUserInput::Image { image_url } => UserInput::Image { url: image_url },
CoreUserInput::LocalImage { path } => UserInput::LocalImage { path },
+ CoreUserInput::Skill { name, path } => UserInput::Skill { name, path },
_ => unreachable!("unsupported user input variant"),
}
}
@@ -642,6 +1592,8 @@ pub enum ThreadItem {
command: String,
/// The command's working directory.
cwd: PathBuf,
+ /// Identifier for the underlying PTY process (when available).
+ process_id: Option,
status: CommandExecutionStatus,
/// A best-effort parsing of the command to understand the action(s) it will perform.
/// This returns a list of CommandAction objects because a single shell command may
@@ -652,6 +1604,7 @@ pub enum ThreadItem {
/// The command's exit code.
exit_code: Option,
/// The duration of the command execution in milliseconds.
+ #[ts(type = "number | null")]
duration_ms: Option,
},
#[serde(rename_all = "camelCase")]
@@ -671,19 +1624,22 @@ pub enum ThreadItem {
arguments: JsonValue,
result: Option,
error: Option,
+ /// The duration of the MCP tool call in milliseconds.
+ #[ts(type = "number | null")]
+ duration_ms: Option,
},
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
WebSearch { id: String, query: String },
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
- TodoList { id: String, items: Vec },
+ ImageView { id: String, path: String },
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
- ImageView { id: String, path: String },
+ EnteredReviewMode { id: String, review: String },
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
- CodeReview { id: String, review: String },
+ ExitedReviewMode { id: String, review: String },
}
impl From for ThreadItem {
@@ -723,6 +1679,7 @@ pub enum CommandExecutionStatus {
InProgress,
Completed,
Failed,
+ Declined,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -735,20 +1692,23 @@ pub struct FileUpdateChange {
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
-#[serde(rename_all = "camelCase")]
+#[serde(tag = "type", rename_all = "camelCase")]
+#[ts(tag = "type")]
#[ts(export_to = "v2/")]
pub enum PatchChangeKind {
Add,
Delete,
- Update,
+ Update { move_path: Option },
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum PatchApplyStatus {
+ InProgress,
Completed,
Failed,
+ Declined,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -775,15 +1735,6 @@ pub struct McpToolCallError {
pub message: String,
}
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
-#[serde(rename_all = "camelCase")]
-#[ts(export_to = "v2/")]
-pub struct TodoItem {
- pub id: String,
- pub text: String,
- pub completed: bool,
-}
-
// === Server Notifications ===
// Thread/Turn lifecycle notifications and item progress events
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -797,6 +1748,7 @@ pub struct ThreadStartedNotification {
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct TurnStartedNotification {
+ pub thread_id: String,
pub turn: Turn,
}
@@ -813,9 +1765,65 @@ pub struct Usage {
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct TurnCompletedNotification {
+ pub thread_id: String,
pub turn: Turn,
- // TODO: should usage be stored on the Turn object, and we return that instead?
- pub usage: Usage,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+/// Notification that the turn-level unified diff has changed.
+/// Contains the latest aggregated diff across all file changes in the turn.
+pub struct TurnDiffUpdatedNotification {
+ pub thread_id: String,
+ pub turn_id: String,
+ pub diff: String,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct TurnPlanUpdatedNotification {
+ pub thread_id: String,
+ pub turn_id: String,
+ pub explanation: Option,
+ pub plan: Vec,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct TurnPlanStep {
+ pub step: String,
+ pub status: TurnPlanStepStatus,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub enum TurnPlanStepStatus {
+ Pending,
+ InProgress,
+ Completed,
+}
+
+impl From for TurnPlanStep {
+ fn from(value: CorePlanItemArg) -> Self {
+ Self {
+ step: value.step,
+ status: value.status.into(),
+ }
+ }
+}
+
+impl From for TurnPlanStepStatus {
+ fn from(value: CorePlanStepStatus) -> Self {
+ match value {
+ CorePlanStepStatus::Pending => Self::Pending,
+ CorePlanStepStatus::InProgress => Self::InProgress,
+ CorePlanStepStatus::Completed => Self::Completed,
+ }
+ }
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -823,6 +1831,8 @@ pub struct TurnCompletedNotification {
#[ts(export_to = "v2/")]
pub struct ItemStartedNotification {
pub item: ThreadItem,
+ pub thread_id: String,
+ pub turn_id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -830,6 +1840,17 @@ pub struct ItemStartedNotification {
#[ts(export_to = "v2/")]
pub struct ItemCompletedNotification {
pub item: ThreadItem,
+ pub thread_id: String,
+ pub turn_id: String,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct RawResponseItemCompletedNotification {
+ pub thread_id: String,
+ pub turn_id: String,
+ pub item: ResponseItem,
}
// Item-specific progress notifications
@@ -837,6 +1858,8 @@ pub struct ItemCompletedNotification {
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct AgentMessageDeltaNotification {
+ pub thread_id: String,
+ pub turn_id: String,
pub item_id: String,
pub delta: String,
}
@@ -845,8 +1868,11 @@ pub struct AgentMessageDeltaNotification {
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ReasoningSummaryTextDeltaNotification {
+ pub thread_id: String,
+ pub turn_id: String,
pub item_id: String,
pub delta: String,
+ #[ts(type = "number")]
pub summary_index: i64,
}
@@ -854,7 +1880,10 @@ pub struct ReasoningSummaryTextDeltaNotification {
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ReasoningSummaryPartAddedNotification {
+ pub thread_id: String,
+ pub turn_id: String,
pub item_id: String,
+ #[ts(type = "number")]
pub summary_index: i64,
}
@@ -862,15 +1891,41 @@ pub struct ReasoningSummaryPartAddedNotification {
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ReasoningTextDeltaNotification {
+ pub thread_id: String,
+ pub turn_id: String,
pub item_id: String,
pub delta: String,
+ #[ts(type = "number")]
pub content_index: i64,
}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct TerminalInteractionNotification {
+ pub thread_id: String,
+ pub turn_id: String,
+ pub item_id: String,
+ pub process_id: String,
+ pub stdin: String,
+}
+
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct CommandExecutionOutputDeltaNotification {
+ pub thread_id: String,
+ pub turn_id: String,
+ pub item_id: String,
+ pub delta: String,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct FileChangeOutputDeltaNotification {
+ pub thread_id: String,
+ pub turn_id: String,
pub item_id: String,
pub delta: String,
}
@@ -879,10 +1934,40 @@ pub struct CommandExecutionOutputDeltaNotification {
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct McpToolCallProgressNotification {
+ pub thread_id: String,
+ pub turn_id: String,
pub item_id: String,
pub message: String,
}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct McpServerOauthLoginCompletedNotification {
+ pub name: String,
+ pub success: bool,
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ #[ts(optional)]
+ pub error: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct WindowsWorldWritableWarningNotification {
+ pub sample_paths: Vec,
+ pub extra_count: usize,
+ pub failed_scan: bool,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct ContextCompactedNotification {
+ pub thread_id: String,
+ pub turn_id: String,
+}
+
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
@@ -892,28 +1977,35 @@ pub struct CommandExecutionRequestApprovalParams {
pub item_id: String,
/// Optional explanatory reason (e.g. request for network access).
pub reason: Option,
- /// Optional model-provided risk assessment describing the blocked command.
- pub risk: Option,
+ /// Optional proposed execpolicy amendment to allow similar commands without prompting.
+ pub proposed_execpolicy_amendment: Option,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
-pub struct CommandExecutionRequestAcceptSettings {
- /// If true, automatically approve this command for the duration of the session.
- #[serde(default)]
- pub for_session: bool,
+pub struct CommandExecutionRequestApprovalResponse {
+ pub decision: CommandExecutionApprovalDecision,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
-pub struct CommandExecutionRequestApprovalResponse {
- pub decision: ApprovalDecision,
- /// Optional approval settings for when the decision is `accept`.
- /// Ignored if the decision is `decline` or `cancel`.
- #[serde(default)]
- pub accept_settings: Option,
+pub struct FileChangeRequestApprovalParams {
+ pub thread_id: String,
+ pub turn_id: String,
+ pub item_id: String,
+ /// Optional explanatory reason (e.g. request for extra write access).
+ pub reason: Option,
+ /// [UNSTABLE] When set, the agent is asking the user to allow writes under this root
+ /// for the remainder of the session (unclear if this is honored today).
+ pub grant_root: Option,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[ts(export_to = "v2/")]
+pub struct FileChangeRequestApprovalResponse {
+ pub decision: FileChangeApprovalDecision,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -929,6 +2021,8 @@ pub struct AccountRateLimitsUpdatedNotification {
pub struct RateLimitSnapshot {
pub primary: Option,
pub secondary: Option,
+ pub credits: Option,
+ pub plan_type: Option,
}
impl From for RateLimitSnapshot {
@@ -936,6 +2030,8 @@ impl From for RateLimitSnapshot {
Self {
primary: value.primary.map(RateLimitWindow::from),
secondary: value.secondary.map(RateLimitWindow::from),
+ credits: value.credits.map(CreditsSnapshot::from),
+ plan_type: value.plan_type,
}
}
}
@@ -945,7 +2041,9 @@ impl From for RateLimitSnapshot {
#[ts(export_to = "v2/")]
pub struct RateLimitWindow {
pub used_percent: i32,
+ #[ts(type = "number | null")]
pub window_duration_mins: Option,
+ #[ts(type = "number | null")]
pub resets_at: Option,
}
@@ -959,6 +2057,25 @@ impl From for RateLimitWindow {
}
}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct CreditsSnapshot {
+ pub has_credits: bool,
+ pub unlimited: bool,
+ pub balance: Option,
+}
+
+impl From for CreditsSnapshot {
+ fn from(value: CoreCreditsSnapshot) -> Self {
+ Self {
+ has_credits: value.has_credits,
+ unlimited: value.unlimited,
+ balance: value.balance,
+ }
+ }
+}
+
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
@@ -970,6 +2087,16 @@ pub struct AccountLoginCompletedNotification {
pub error: Option,
}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
+#[serde(rename_all = "camelCase")]
+#[ts(export_to = "v2/")]
+pub struct DeprecationNoticeNotification {
+ /// Concise summary of what is deprecated.
+ pub summary: String,
+ /// Optional extra guidance, such as migration steps or rationale.
+ pub details: Option,
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -979,10 +2106,30 @@ mod tests {
use codex_protocol::items::TurnItem;
use codex_protocol::items::UserMessageItem;
use codex_protocol::items::WebSearchItem;
+ use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
use codex_protocol::user_input::UserInput as CoreUserInput;
use pretty_assertions::assert_eq;
+ use serde_json::json;
use std::path::PathBuf;
+ #[test]
+ fn sandbox_policy_round_trips_external_sandbox_network_access() {
+ let v2_policy = SandboxPolicy::ExternalSandbox {
+ network_access: NetworkAccess::Enabled,
+ };
+
+ let core_policy = v2_policy.to_core();
+ assert_eq!(
+ core_policy,
+ codex_protocol::protocol::SandboxPolicy::ExternalSandbox {
+ network_access: CoreNetworkAccess::Enabled,
+ }
+ );
+
+ let back_to_v2 = SandboxPolicy::from(core_policy);
+ assert_eq!(back_to_v2, v2_policy);
+ }
+
#[test]
fn core_turn_item_into_thread_item_converts_supported_variants() {
let user_item = TurnItem::UserMessage(UserMessageItem {
@@ -997,6 +2144,10 @@ mod tests {
CoreUserInput::LocalImage {
path: PathBuf::from("local/image.png"),
},
+ CoreUserInput::Skill {
+ name: "skill-creator".to_string(),
+ path: PathBuf::from("/repo/.codex/skills/skill-creator/SKILL.md"),
+ },
],
});
@@ -1014,6 +2165,10 @@ mod tests {
UserInput::LocalImage {
path: PathBuf::from("local/image.png"),
},
+ UserInput::Skill {
+ name: "skill-creator".to_string(),
+ path: PathBuf::from("/repo/.codex/skills/skill-creator/SKILL.md"),
+ },
],
}
);
@@ -1066,4 +2221,44 @@ mod tests {
}
);
}
+
+ #[test]
+ fn skills_list_params_serialization_uses_force_reload() {
+ assert_eq!(
+ serde_json::to_value(SkillsListParams {
+ cwds: Vec::new(),
+ force_reload: false,
+ })
+ .unwrap(),
+ json!({}),
+ );
+
+ assert_eq!(
+ serde_json::to_value(SkillsListParams {
+ cwds: vec![PathBuf::from("/repo")],
+ force_reload: true,
+ })
+ .unwrap(),
+ json!({
+ "cwds": ["/repo"],
+ "forceReload": true,
+ }),
+ );
+ }
+
+ #[test]
+ fn codex_error_info_serializes_http_status_code_in_camel_case() {
+ let value = CodexErrorInfo::ResponseTooManyFailedAttempts {
+ http_status_code: Some(401),
+ };
+
+ assert_eq!(
+ serde_json::to_value(value).unwrap(),
+ json!({
+ "responseTooManyFailedAttempts": {
+ "httpStatusCode": 401
+ }
+ })
+ );
+ }
}
diff --git a/codex-rs/app-server-test-client/Cargo.toml b/codex-rs/app-server-test-client/Cargo.toml
index 2fd14fb15..25a881364 100644
--- a/codex-rs/app-server-test-client/Cargo.toml
+++ b/codex-rs/app-server-test-client/Cargo.toml
@@ -1,7 +1,8 @@
[package]
name = "codex-app-server-test-client"
-version = { workspace = true }
-edition = "2024"
+version.workspace = true
+edition.workspace = true
+license.workspace = true
[lints]
workspace = true
diff --git a/codex-rs/app-server-test-client/src/main.rs b/codex-rs/app-server-test-client/src/main.rs
index a243937b2..64577a553 100644
--- a/codex-rs/app-server-test-client/src/main.rs
+++ b/codex-rs/app-server-test-client/src/main.rs
@@ -13,6 +13,7 @@ use std::time::Duration;
use anyhow::Context;
use anyhow::Result;
use anyhow::bail;
+use clap::ArgAction;
use clap::Parser;
use clap::Subcommand;
use codex_app_server_protocol::AddConversationListenerParams;
@@ -20,15 +21,24 @@ use codex_app_server_protocol::AddConversationSubscriptionResponse;
use codex_app_server_protocol::AskForApproval;
use codex_app_server_protocol::ClientInfo;
use codex_app_server_protocol::ClientRequest;
+use codex_app_server_protocol::CommandExecutionApprovalDecision;
+use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
+use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
+use codex_app_server_protocol::FileChangeApprovalDecision;
+use codex_app_server_protocol::FileChangeRequestApprovalParams;
+use codex_app_server_protocol::FileChangeRequestApprovalResponse;
use codex_app_server_protocol::GetAccountRateLimitsResponse;
use codex_app_server_protocol::InitializeParams;
use codex_app_server_protocol::InitializeResponse;
use codex_app_server_protocol::InputItem;
use codex_app_server_protocol::JSONRPCMessage;
use codex_app_server_protocol::JSONRPCNotification;
+use codex_app_server_protocol::JSONRPCRequest;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::LoginChatGptCompleteNotification;
use codex_app_server_protocol::LoginChatGptResponse;
+use codex_app_server_protocol::ModelListParams;
+use codex_app_server_protocol::ModelListResponse;
use codex_app_server_protocol::NewConversationParams;
use codex_app_server_protocol::NewConversationResponse;
use codex_app_server_protocol::RequestId;
@@ -36,14 +46,17 @@ use codex_app_server_protocol::SandboxPolicy;
use codex_app_server_protocol::SendUserMessageParams;
use codex_app_server_protocol::SendUserMessageResponse;
use codex_app_server_protocol::ServerNotification;
+use codex_app_server_protocol::ServerRequest;
use codex_app_server_protocol::ThreadStartParams;
use codex_app_server_protocol::ThreadStartResponse;
use codex_app_server_protocol::TurnStartParams;
use codex_app_server_protocol::TurnStartResponse;
+use codex_app_server_protocol::TurnStatus;
use codex_app_server_protocol::UserInput as V2UserInput;
-use codex_protocol::ConversationId;
+use codex_protocol::ThreadId;
use codex_protocol::protocol::Event;
use codex_protocol::protocol::EventMsg;
+use serde::Serialize;
use serde::de::DeserializeOwned;
use serde_json::Value;
use uuid::Uuid;
@@ -56,6 +69,19 @@ struct Cli {
#[arg(long, env = "CODEX_BIN", default_value = "codex")]
codex_bin: String,
+ /// Forwarded to the `codex` CLI as `--config key=value`. Repeatable.
+ ///
+ /// Example:
+ /// `--config 'model_providers.mock.base_url="http://localhost:4010/v2"'`
+ #[arg(
+ short = 'c',
+ long = "config",
+ value_name = "key=value",
+ action = ArgAction::Append,
+ global = true
+ )]
+ config_overrides: Vec,
+
#[command(subcommand)]
command: CliCommand,
}
@@ -91,37 +117,67 @@ enum CliCommand {
/// Start a V2 turn that should not elicit an ExecCommand approval.
#[command(name = "no-trigger-cmd-approval")]
NoTriggerCmdApproval,
+ /// Send two sequential V2 turns in the same thread to test follow-up behavior.
+ SendFollowUpV2 {
+ /// Initial user message for the first turn.
+ #[arg()]
+ first_message: String,
+ /// Follow-up user message for the second turn.
+ #[arg()]
+ follow_up_message: String,
+ },
/// Trigger the ChatGPT login flow and wait for completion.
TestLogin,
/// Fetch the current account rate limits from the Codex app-server.
GetAccountRateLimits,
+ /// List the available models from the Codex app-server.
+ #[command(name = "model-list")]
+ ModelList,
}
fn main() -> Result<()> {
- let Cli { codex_bin, command } = Cli::parse();
+ let Cli {
+ codex_bin,
+ config_overrides,
+ command,
+ } = Cli::parse();
match command {
- CliCommand::SendMessage { user_message } => send_message(codex_bin, user_message),
- CliCommand::SendMessageV2 { user_message } => send_message_v2(codex_bin, user_message),
+ CliCommand::SendMessage { user_message } => {
+ send_message(&codex_bin, &config_overrides, user_message)
+ }
+ CliCommand::SendMessageV2 { user_message } => {
+ send_message_v2(&codex_bin, &config_overrides, user_message)
+ }
CliCommand::TriggerCmdApproval { user_message } => {
- trigger_cmd_approval(codex_bin, user_message)
+ trigger_cmd_approval(&codex_bin, &config_overrides, user_message)
}
CliCommand::TriggerPatchApproval { user_message } => {
- trigger_patch_approval(codex_bin, user_message)
+ trigger_patch_approval(&codex_bin, &config_overrides, user_message)
}
- CliCommand::NoTriggerCmdApproval => no_trigger_cmd_approval(codex_bin),
- CliCommand::TestLogin => test_login(codex_bin),
- CliCommand::GetAccountRateLimits => get_account_rate_limits(codex_bin),
+ CliCommand::NoTriggerCmdApproval => no_trigger_cmd_approval(&codex_bin, &config_overrides),
+ CliCommand::SendFollowUpV2 {
+ first_message,
+ follow_up_message,
+ } => send_follow_up_v2(
+ &codex_bin,
+ &config_overrides,
+ first_message,
+ follow_up_message,
+ ),
+ CliCommand::TestLogin => test_login(&codex_bin, &config_overrides),
+ CliCommand::GetAccountRateLimits => get_account_rate_limits(&codex_bin, &config_overrides),
+ CliCommand::ModelList => model_list(&codex_bin, &config_overrides),
}
}
-fn send_message(codex_bin: String, user_message: String) -> Result<()> {
- let mut client = CodexClient::spawn(codex_bin)?;
+fn send_message(codex_bin: &str, config_overrides: &[String], user_message: String) -> Result<()> {
+ let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
let initialize = client.initialize()?;
println!("< initialize response: {initialize:?}");
- let conversation = client.new_conversation()?;
+ let conversation = client.start_thread()?;
println!("< newConversation response: {conversation:?}");
let subscription = client.add_conversation_listener(&conversation.conversation_id)?;
@@ -132,51 +188,66 @@ fn send_message(codex_bin: String, user_message: String) -> Result<()> {
client.stream_conversation(&conversation.conversation_id)?;
- client.remove_conversation_listener(subscription.subscription_id)?;
+ client.remove_thread_listener(subscription.subscription_id)?;
Ok(())
}
-fn send_message_v2(codex_bin: String, user_message: String) -> Result<()> {
- send_message_v2_with_policies(codex_bin, user_message, None, None)
+fn send_message_v2(
+ codex_bin: &str,
+ config_overrides: &[String],
+ user_message: String,
+) -> Result<()> {
+ send_message_v2_with_policies(codex_bin, config_overrides, user_message, None, None)
}
-fn trigger_cmd_approval(codex_bin: String, user_message: Option) -> Result<()> {
+fn trigger_cmd_approval(
+ codex_bin: &str,
+ config_overrides: &[String],
+ user_message: Option,
+) -> Result<()> {
let default_prompt =
"Run `touch /tmp/should-trigger-approval` so I can confirm the file exists.";
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
send_message_v2_with_policies(
codex_bin,
+ config_overrides,
message,
Some(AskForApproval::OnRequest),
Some(SandboxPolicy::ReadOnly),
)
}
-fn trigger_patch_approval(codex_bin: String, user_message: Option) -> Result<()> {
+fn trigger_patch_approval(
+ codex_bin: &str,
+ config_overrides: &[String],
+ user_message: Option,
+) -> Result<()> {
let default_prompt =
"Create a file named APPROVAL_DEMO.txt containing a short hello message using apply_patch.";
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
send_message_v2_with_policies(
codex_bin,
+ config_overrides,
message,
Some(AskForApproval::OnRequest),
Some(SandboxPolicy::ReadOnly),
)
}
-fn no_trigger_cmd_approval(codex_bin: String) -> Result<()> {
+fn no_trigger_cmd_approval(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
let prompt = "Run `touch should_not_trigger_approval.txt`";
- send_message_v2_with_policies(codex_bin, prompt.to_string(), None, None)
+ send_message_v2_with_policies(codex_bin, config_overrides, prompt.to_string(), None, None)
}
fn send_message_v2_with_policies(
- codex_bin: String,
+ codex_bin: &str,
+ config_overrides: &[String],
user_message: String,
approval_policy: Option,
sandbox_policy: Option,
) -> Result<()> {
- let mut client = CodexClient::spawn(codex_bin)?;
+ let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
let initialize = client.initialize()?;
println!("< initialize response: {initialize:?}");
@@ -199,8 +270,47 @@ fn send_message_v2_with_policies(
Ok(())
}
-fn test_login(codex_bin: String) -> Result<()> {
- let mut client = CodexClient::spawn(codex_bin)?;
+fn send_follow_up_v2(
+ codex_bin: &str,
+ config_overrides: &[String],
+ first_message: String,
+ follow_up_message: String,
+) -> Result<()> {
+ let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
+
+ let initialize = client.initialize()?;
+ println!("< initialize response: {initialize:?}");
+
+ let thread_response = client.thread_start(ThreadStartParams::default())?;
+ println!("< thread/start response: {thread_response:?}");
+
+ let first_turn_params = TurnStartParams {
+ thread_id: thread_response.thread.id.clone(),
+ input: vec![V2UserInput::Text {
+ text: first_message,
+ }],
+ ..Default::default()
+ };
+ let first_turn_response = client.turn_start(first_turn_params)?;
+ println!("< turn/start response (initial): {first_turn_response:?}");
+ client.stream_turn(&thread_response.thread.id, &first_turn_response.turn.id)?;
+
+ let follow_up_params = TurnStartParams {
+ thread_id: thread_response.thread.id.clone(),
+ input: vec![V2UserInput::Text {
+ text: follow_up_message,
+ }],
+ ..Default::default()
+ };
+ let follow_up_response = client.turn_start(follow_up_params)?;
+ println!("< turn/start response (follow-up): {follow_up_response:?}");
+ client.stream_turn(&thread_response.thread.id, &follow_up_response.turn.id)?;
+
+ Ok(())
+}
+
+fn test_login(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
+ let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
let initialize = client.initialize()?;
println!("< initialize response: {initialize:?}");
@@ -229,8 +339,8 @@ fn test_login(codex_bin: String) -> Result<()> {
}
}
-fn get_account_rate_limits(codex_bin: String) -> Result<()> {
- let mut client = CodexClient::spawn(codex_bin)?;
+fn get_account_rate_limits(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
+ let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
let initialize = client.initialize()?;
println!("< initialize response: {initialize:?}");
@@ -241,6 +351,18 @@ fn get_account_rate_limits(codex_bin: String) -> Result<()> {
Ok(())
}
+fn model_list(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
+ let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
+
+ let initialize = client.initialize()?;
+ println!("< initialize response: {initialize:?}");
+
+ let response = client.model_list(ModelListParams::default())?;
+ println!("< model/list response: {response:?}");
+
+ Ok(())
+}
+
struct CodexClient {
child: Child,
stdin: Option,
@@ -249,8 +371,12 @@ struct CodexClient {
}
impl CodexClient {
- fn spawn(codex_bin: String) -> Result {
- let mut codex_app_server = Command::new(&codex_bin)
+ fn spawn(codex_bin: &str, config_overrides: &[String]) -> Result {
+ let mut cmd = Command::new(codex_bin);
+ for override_kv in config_overrides {
+ cmd.arg("--config").arg(override_kv);
+ }
+ let mut codex_app_server = cmd
.arg("app-server")
.stdin(Stdio::piped())
.stdout(Stdio::piped())
@@ -291,7 +417,7 @@ impl CodexClient {
self.send_request(request, request_id, "initialize")
}
- fn new_conversation(&mut self) -> Result {
+ fn start_thread(&mut self) -> Result {
let request_id = self.request_id();
let request = ClientRequest::NewConversation {
request_id: request_id.clone(),
@@ -303,7 +429,7 @@ impl CodexClient {
fn add_conversation_listener(
&mut self,
- conversation_id: &ConversationId,
+ conversation_id: &ThreadId,
) -> Result {
let request_id = self.request_id();
let request = ClientRequest::AddConversationListener {
@@ -317,7 +443,7 @@ impl CodexClient {
self.send_request(request, request_id, "addConversationListener")
}
- fn remove_conversation_listener(&mut self, subscription_id: Uuid) -> Result<()> {
+ fn remove_thread_listener(&mut self, subscription_id: Uuid) -> Result<()> {
let request_id = self.request_id();
let request = ClientRequest::RemoveConversationListener {
request_id: request_id.clone(),
@@ -335,7 +461,7 @@ impl CodexClient {
fn send_user_message(
&mut self,
- conversation_id: &ConversationId,
+ conversation_id: &ThreadId,
message: &str,
) -> Result {
let request_id = self.request_id();
@@ -392,7 +518,17 @@ impl CodexClient {
self.send_request(request, request_id, "account/rateLimits/read")
}
- fn stream_conversation(&mut self, conversation_id: &ConversationId) -> Result<()> {
+ fn model_list(&mut self, params: ModelListParams) -> Result {
+ let request_id = self.request_id();
+ let request = ClientRequest::ModelList {
+ request_id: request_id.clone(),
+ params,
+ };
+
+ self.send_request(request, request_id, "model/list")
+ }
+
+ fn stream_conversation(&mut self, conversation_id: &ThreadId) -> Result<()> {
loop {
let notification = self.next_notification()?;
@@ -493,6 +629,10 @@ impl CodexClient {
print!("{}", delta.delta);
std::io::stdout().flush().ok();
}
+ ServerNotification::TerminalInteraction(delta) => {
+ println!("[stdin sent: {}]", delta.stdin);
+ std::io::stdout().flush().ok();
+ }
ServerNotification::ItemStarted(payload) => {
println!("\n< item started: {:?}", payload.item);
}
@@ -502,10 +642,11 @@ impl CodexClient {
ServerNotification::TurnCompleted(payload) => {
if payload.turn.id == turn_id {
println!("\n< turn/completed notification: {:?}", payload.turn.status);
- if let Some(error) = payload.turn.error {
+ if payload.turn.status == TurnStatus::Failed
+ && let Some(error) = payload.turn.error
+ {
println!("[turn error] {}", error.message);
}
- println!("< usage: {:?}", payload.usage);
break;
}
}
@@ -524,7 +665,7 @@ impl CodexClient {
fn extract_event(
&self,
notification: JSONRPCNotification,
- conversation_id: &ConversationId,
+ conversation_id: &ThreadId,
) -> Result> {
let params = notification
.params
@@ -538,7 +679,7 @@ impl CodexClient {
let conversation_value = map
.remove("conversationId")
.context("event missing conversationId")?;
- let notification_conversation: ConversationId = serde_json::from_value(conversation_value)
+ let notification_conversation: ThreadId = serde_json::from_value(conversation_value)
.context("conversationId was not a valid UUID")?;
if ¬ification_conversation != conversation_id {
@@ -603,8 +744,8 @@ impl CodexClient {
JSONRPCMessage::Notification(notification) => {
self.pending_notifications.push_back(notification);
}
- JSONRPCMessage::Request(_) => {
- bail!("unexpected request from codex app-server");
+ JSONRPCMessage::Request(request) => {
+ self.handle_server_request(request)?;
}
}
}
@@ -624,8 +765,8 @@ impl CodexClient {
// No outstanding requests, so ignore stray responses/errors for now.
continue;
}
- JSONRPCMessage::Request(_) => {
- bail!("unexpected request from codex app-server");
+ JSONRPCMessage::Request(request) => {
+ self.handle_server_request(request)?;
}
}
}
@@ -661,6 +802,114 @@ impl CodexClient {
fn request_id(&self) -> RequestId {
RequestId::String(Uuid::new_v4().to_string())
}
+
+ fn handle_server_request(&mut self, request: JSONRPCRequest) -> Result<()> {
+ let server_request = ServerRequest::try_from(request)
+ .context("failed to deserialize ServerRequest from JSONRPCRequest")?;
+
+ match server_request {
+ ServerRequest::CommandExecutionRequestApproval { request_id, params } => {
+ self.handle_command_execution_request_approval(request_id, params)?;
+ }
+ ServerRequest::FileChangeRequestApproval { request_id, params } => {
+ self.approve_file_change_request(request_id, params)?;
+ }
+ other => {
+ bail!("received unsupported server request: {other:?}");
+ }
+ }
+
+ Ok(())
+ }
+
+ fn handle_command_execution_request_approval(
+ &mut self,
+ request_id: RequestId,
+ params: CommandExecutionRequestApprovalParams,
+ ) -> Result<()> {
+ let CommandExecutionRequestApprovalParams {
+ thread_id,
+ turn_id,
+ item_id,
+ reason,
+ proposed_execpolicy_amendment,
+ } = params;
+
+ println!(
+ "\n< commandExecution approval requested for thread {thread_id}, turn {turn_id}, item {item_id}"
+ );
+ if let Some(reason) = reason.as_deref() {
+ println!("< reason: {reason}");
+ }
+ if let Some(execpolicy_amendment) = proposed_execpolicy_amendment.as_ref() {
+ println!("< proposed execpolicy amendment: {execpolicy_amendment:?}");
+ }
+
+ let response = CommandExecutionRequestApprovalResponse {
+ decision: CommandExecutionApprovalDecision::Accept,
+ };
+ self.send_server_request_response(request_id, &response)?;
+ println!("< approved commandExecution request for item {item_id}");
+ Ok(())
+ }
+
+ fn approve_file_change_request(
+ &mut self,
+ request_id: RequestId,
+ params: FileChangeRequestApprovalParams,
+ ) -> Result<()> {
+ let FileChangeRequestApprovalParams {
+ thread_id,
+ turn_id,
+ item_id,
+ reason,
+ grant_root,
+ } = params;
+
+ println!(
+ "\n< fileChange approval requested for thread {thread_id}, turn {turn_id}, item {item_id}"
+ );
+ if let Some(reason) = reason.as_deref() {
+ println!("< reason: {reason}");
+ }
+ if let Some(grant_root) = grant_root.as_deref() {
+ println!("< grant root: {}", grant_root.display());
+ }
+
+ let response = FileChangeRequestApprovalResponse {
+ decision: FileChangeApprovalDecision::Accept,
+ };
+ self.send_server_request_response(request_id, &response)?;
+ println!("< approved fileChange request for item {item_id}");
+ Ok(())
+ }
+
+ fn send_server_request_response(&mut self, request_id: RequestId, response: &T) -> Result<()>
+ where
+ T: Serialize,
+ {
+ let message = JSONRPCMessage::Response(JSONRPCResponse {
+ id: request_id,
+ result: serde_json::to_value(response)?,
+ });
+ self.write_jsonrpc_message(message)
+ }
+
+ fn write_jsonrpc_message(&mut self, message: JSONRPCMessage) -> Result<()> {
+ let payload = serde_json::to_string(&message)?;
+ let pretty = serde_json::to_string_pretty(&message)?;
+ print_multiline_with_prefix("> ", &pretty);
+
+ if let Some(stdin) = self.stdin.as_mut() {
+ writeln!(stdin, "{payload}")?;
+ stdin
+ .flush()
+ .context("failed to flush response to codex app-server")?;
+ return Ok(());
+ }
+
+ bail!("codex app-server stdin closed")
+ }
}
fn print_multiline_with_prefix(prefix: &str, payload: &str) {
diff --git a/codex-rs/app-server/Cargo.toml b/codex-rs/app-server/Cargo.toml
index 96f64afdf..fbe9150a1 100644
--- a/codex-rs/app-server/Cargo.toml
+++ b/codex-rs/app-server/Cargo.toml
@@ -1,7 +1,8 @@
[package]
-edition = "2024"
name = "codex-app-server"
-version = { workspace = true }
+version.workspace = true
+edition.workspace = true
+license.workspace = true
[[bin]]
name = "codex-app-server"
@@ -25,10 +26,15 @@ codex-login = { workspace = true }
codex-protocol = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-feedback = { workspace = true }
+codex-rmcp-client = { workspace = true }
+codex-utils-absolute-path = { workspace = true }
codex-utils-json-to-toml = { workspace = true }
chrono = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
+mcp-types = { workspace = true }
+tempfile = { workspace = true }
+toml = { workspace = true }
tokio = { workspace = true, features = [
"io-std",
"macros",
@@ -38,18 +44,15 @@ tokio = { workspace = true, features = [
] }
tracing = { workspace = true, features = ["log"] }
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
-opentelemetry-appender-tracing = { workspace = true }
uuid = { workspace = true, features = ["serde", "v7"] }
[dev-dependencies]
app_test_support = { workspace = true }
-assert_cmd = { workspace = true }
base64 = { workspace = true }
core_test_support = { workspace = true }
mcp-types = { workspace = true }
os_info = { workspace = true }
pretty_assertions = { workspace = true }
serial_test = { workspace = true }
-tempfile = { workspace = true }
-toml = { workspace = true }
wiremock = { workspace = true }
+shlex = { workspace = true }
diff --git a/codex-rs/app-server/README.md b/codex-rs/app-server/README.md
index 5f9b87458..04e97ed62 100644
--- a/codex-rs/app-server/README.md
+++ b/codex-rs/app-server/README.md
@@ -1,16 +1,19 @@
# codex-app-server
-`codex app-server` is the interface Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt). The message schema is currently unstable, but those who wish to build experimental UIs on top of Codex may find it valuable.
+`codex app-server` is the interface Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt).
## Table of Contents
+
- [Protocol](#protocol)
- [Message Schema](#message-schema)
+- [Core Primitives](#core-primitives)
- [Lifecycle Overview](#lifecycle-overview)
- [Initialization](#initialization)
-- [Core primitives](#core-primitives)
-- [Thread & turn endpoints](#thread--turn-endpoints)
+- [API Overview](#api-overview)
+- [Events](#events)
+- [Approvals](#approvals)
+- [Skills](#skills)
- [Auth endpoints](#auth-endpoints)
-- [Events (work-in-progress)](#v2-streaming-events-work-in-progress)
## Protocol
@@ -25,10 +28,20 @@ codex app-server generate-ts --out DIR
codex app-server generate-json-schema --out DIR
```
+## Core Primitives
+
+The API exposes three top level primitives representing an interaction between a user and Codex:
+
+- **Thread**: A conversation between a user and the Codex agent. Each thread contains multiple turns.
+- **Turn**: One turn of the conversation, typically starting with a user message and finishing with an agent message. Each turn contains multiple items.
+- **Item**: Represents user inputs and agent outputs as part of the turn, persisted and used as the context for future conversations. Example items include user message, agent reasoning, agent message, shell command, file edit, etc.
+
+Use the thread APIs to create, list, or archive conversations. Drive a conversation with turn APIs and stream progress via turn notifications.
+
## Lifecycle Overview
- Initialize once: Immediately after launching the codex app-server process, send an `initialize` request with your client metadata, then emit an `initialized` notification. Any other request before this handshake gets rejected.
-- Start (or resume) a thread: Call `thread/start` to open a fresh conversation. The response returns the thread object and you’ll also get a `thread/started` notification. If you’re continuing an existing conversation, call `thread/resume` with its ID instead.
+- Start (or resume) a thread: Call `thread/start` to open a fresh conversation. The response returns the thread object and you’ll also get a `thread/started` notification. If you’re continuing an existing conversation, call `thread/resume` with its ID instead. If you want to branch from an existing conversation, call `thread/fork` to create a new thread id with copied history.
- Begin a turn: To send user input, call `turn/start` with the target `threadId` and the user's input. Optional fields let you override model, cwd, sandbox policy, etc. This immediately returns the new turn object and triggers a `turn/started` notification.
- Stream events: After `turn/start`, keep reading JSON-RPC notifications on stdout. You’ll see `item/started`, `item/completed`, deltas like `item/agentMessage/delta`, tool progress, etc. These represent streaming model output plus any side effects (commands, tool calls, reasoning notes).
- Finish the turn: When the model is done (or the turn is interrupted via making the `turn/interrupt` call), the server sends `turn/completed` with the final turn state and token usage.
@@ -37,36 +50,49 @@ codex app-server generate-json-schema --out DIR
Clients must send a single `initialize` request before invoking any other method, then acknowledge with an `initialized` notification. The server returns the user agent string it will present to upstream services; subsequent requests issued before initialization receive a `"Not initialized"` error, and repeated `initialize` calls receive an `"Already initialized"` error.
-Example:
+Applications building on top of `codex app-server` should identify themselves via the `clientInfo` parameter.
+
+Example (from OpenAI's official VSCode extension):
```json
-{ "method": "initialize", "id": 0, "params": {
- "clientInfo": { "name": "codex-vscode", "title": "Codex VS Code Extension", "version": "0.1.0" }
-} }
-{ "id": 0, "result": { "userAgent": "codex-app-server/0.1.0 codex-vscode/0.1.0" } }
-{ "method": "initialized" }
+{
+ "method": "initialize",
+ "id": 0,
+ "params": {
+ "clientInfo": {
+ "name": "codex-vscode",
+ "title": "Codex VS Code Extension",
+ "version": "0.1.0"
+ }
+ }
+}
```
-## Core primitives
-
-We have 3 top level primitives:
-- Thread - a conversation between the Codex agent and a user. Each thread contains multiple turns.
-- Turn - one turn of the conversation, typically starting with a user message and finishing with an agent message. Each turn contains multiple items.
-- Item - represents user inputs and agent outputs as part of the turn, persisted and used as the context for future conversations.
-
-## Thread & turn endpoints
+## API Overview
-The JSON-RPC API exposes dedicated methods for managing Codex conversations. Threads store long-lived conversation metadata, and turns store the per-message exchange (input → Codex output, including streamed items). Use the thread APIs to create, list, or archive sessions, then drive the conversation with turn APIs and notifications.
-
-### Quick reference
- `thread/start` — create a new thread; emits `thread/started` and auto-subscribes you to turn/item events for that thread.
- `thread/resume` — reopen an existing thread by id so subsequent `turn/start` calls append to it.
+- `thread/fork` — fork an existing thread into a new thread id by copying the stored history; emits `thread/started` and auto-subscribes you to turn/item events for the new thread.
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders` filtering.
+- `thread/loaded/list` — list the thread ids currently loaded in memory.
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
+- `thread/rollback` — drop the last N turns from the agent’s in-memory context and persist a rollback marker in the rollout so future resumes see the pruned history; returns the updated `thread` (with `turns` populated) on success.
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications.
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
-
-### 1) Start or resume a thread
+- `review/start` — kick off Codex’s automated reviewer for a thread; responds like `turn/start` and emits `item/started`/`item/completed` notifications with `enteredReviewMode` and `exitedReviewMode` items, plus a final assistant `agentMessage` containing the review.
+- `command/exec` — run a single command under the server sandbox without starting a thread/turn (handy for utilities and validation).
+- `model/list` — list available models (with reasoning effort options).
+- `skills/list` — list skills for one or more `cwd` values (optional `forceReload`).
+- `mcpServer/oauth/login` — start an OAuth login for a configured MCP server; returns an `authorization_url` and later emits `mcpServer/oauthLogin/completed` once the browser flow finishes.
+- `mcpServerStatus/list` — enumerate configured MCP servers with their tools, resources, resource templates, and auth status; supports cursor+limit pagination.
+- `feedback/upload` — submit a feedback report (classification + optional reason/logs and conversation_id); returns the tracking thread id.
+- `command/exec` — run a single command under the server sandbox without starting a thread/turn (handy for utilities and validation).
+- `config/read` — fetch the effective config on disk after resolving config layering.
+- `config/value/write` — write a single config key/value to the user's config.toml on disk.
+- `config/batchWrite` — apply multiple config edits atomically to the user's config.toml on disk.
+- `configRequirements/read` — fetch the loaded requirements allow-lists from `requirements.toml` and/or MDM (or `null` if none are configured).
+
+### Example: Start or resume a thread
Start a fresh thread when you need a new Codex conversation.
@@ -97,9 +123,18 @@ To continue a stored session, call `thread/resume` with the `thread.id` you prev
{ "id": 11, "result": { "thread": { "id": "thr_123", … } } }
```
-### 2) List threads (pagination & filters)
+To branch from a stored session, call `thread/fork` with the `thread.id`. This creates a new thread id and emits a `thread/started` notification for it:
+
+```json
+{ "method": "thread/fork", "id": 12, "params": { "threadId": "thr_123" } }
+{ "id": 12, "result": { "thread": { "id": "thr_456", … } } }
+{ "method": "thread/started", "params": { "thread": { … } } }
+```
+
+### Example: List threads (with pagination & filters)
`thread/list` lets you render a history UI. Pass any combination of:
+
- `cursor` — opaque string from a prior response; omit for the first page.
- `limit` — server defaults to a reasonable page size if unset.
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
@@ -122,7 +157,18 @@ Example:
When `nextCursor` is `null`, you’ve reached the final page.
-### 3) Archive a thread
+### Example: List loaded threads
+
+`thread/loaded/list` returns thread ids currently loaded in memory. This is useful when you want to check which sessions are active without scanning rollouts on disk.
+
+```json
+{ "method": "thread/loaded/list", "id": 21 }
+{ "id": 21, "result": {
+ "data": ["thr_123", "thr_456"]
+} }
+```
+
+### Example: Archive a thread
Use `thread/archive` to move the persisted rollout (stored as a JSONL file on disk) into the archived sessions directory.
@@ -133,7 +179,7 @@ Use `thread/archive` to move the persisted rollout (stored as a JSONL file on di
An archived thread will not appear in future calls to `thread/list`.
-### 4) Start a turn (send user input)
+### Example: Start a turn (send user input)
Turns attach user input (text or images) to a thread and trigger Codex generation. The `input` field is a list of discriminated unions:
@@ -141,7 +187,7 @@ Turns attach user input (text or images) to a thread and trigger Codex generatio
- `{"type":"image","url":"https://…png"}`
- `{"type":"localImage","path":"/tmp/screenshot.png"}`
-You can optionally specify config overrides on the new turn. If specified, these settings become the default for subsequent turns on the same thread.
+You can optionally specify config overrides on the new turn. If specified, these settings become the default for subsequent turns on the same thread. `outputSchema` applies only to the current turn.
```json
{ "method": "turn/start", "id": 30, "params": {
@@ -151,13 +197,20 @@ You can optionally specify config overrides on the new turn. If specified, these
"cwd": "/Users/me/project",
"approvalPolicy": "unlessTrusted",
"sandboxPolicy": {
- "mode": "workspaceWrite",
+ "type": "workspaceWrite",
"writableRoots": ["/Users/me/project"],
"networkAccess": true
},
"model": "gpt-5.1-codex",
"effort": "medium",
- "summary": "concise"
+ "summary": "concise",
+ // Optional JSON Schema to constrain the final assistant message for this turn.
+ "outputSchema": {
+ "type": "object",
+ "properties": { "answer": { "type": "string" } },
+ "required": ["answer"],
+ "additionalProperties": false
+ }
} }
{ "id": 30, "result": { "turn": {
"id": "turn_456",
@@ -167,7 +220,27 @@ You can optionally specify config overrides on the new turn. If specified, these
} } }
```
-### 5) Interrupt an active turn
+### Example: Start a turn (invoke a skill)
+
+Invoke a skill explicitly by including `$` in the text input and adding a `skill` input item alongside it.
+
+```json
+{ "method": "turn/start", "id": 33, "params": {
+ "threadId": "thr_123",
+ "input": [
+ { "type": "text", "text": "$skill-creator Add a new skill for triaging flaky CI and include step-by-step usage." },
+ { "type": "skill", "name": "skill-creator", "path": "/Users/me/.codex/skills/skill-creator/SKILL.md" }
+ ]
+} }
+{ "id": 33, "result": { "turn": {
+ "id": "turn_457",
+ "status": "inProgress",
+ "items": [],
+ "error": null
+} } }
+```
+
+### Example: Interrupt an active turn
You can cancel a running Turn with `turn/interrupt`.
@@ -181,11 +254,247 @@ You can cancel a running Turn with `turn/interrupt`.
The server requests cancellations for running subprocesses, then emits a `turn/completed` event with `status: "interrupted"`. Rely on the `turn/completed` to know when Codex-side cleanup is done.
+### Example: Request a code review
+
+Use `review/start` to run Codex’s reviewer on the currently checked-out project. The request takes the thread id plus a `target` describing what should be reviewed:
+
+- `{"type":"uncommittedChanges"}` — staged, unstaged, and untracked files.
+- `{"type":"baseBranch","branch":"main"}` — diff against the provided branch’s upstream (see prompt for the exact `git merge-base`/`git diff` instructions Codex will run).
+- `{"type":"commit","sha":"abc1234","title":"Optional subject"}` — review a specific commit.
+- `{"type":"custom","instructions":"Free-form reviewer instructions"}` — fallback prompt equivalent to the legacy manual review request.
+- `delivery` (`"inline"` or `"detached"`, default `"inline"`) — where the review runs:
+ - `"inline"`: run the review as a new turn on the existing thread. The response’s `reviewThreadId` equals the original `threadId`, and no new `thread/started` notification is emitted.
+ - `"detached"`: fork a new review thread from the parent conversation and run the review there. The response’s `reviewThreadId` is the id of this new review thread, and the server emits a `thread/started` notification for it before streaming review items.
+
+Example request/response:
+
+```json
+{ "method": "review/start", "id": 40, "params": {
+ "threadId": "thr_123",
+ "delivery": "inline",
+ "target": { "type": "commit", "sha": "1234567deadbeef", "title": "Polish tui colors" }
+} }
+{ "id": 40, "result": {
+ "turn": {
+ "id": "turn_900",
+ "status": "inProgress",
+ "items": [
+ { "type": "userMessage", "id": "turn_900", "content": [ { "type": "text", "text": "Review commit 1234567: Polish tui colors" } ] }
+ ],
+ "error": null
+ },
+ "reviewThreadId": "thr_123"
+} }
+```
+
+For a detached review, use `"delivery": "detached"`. The response is the same shape, but `reviewThreadId` will be the id of the new review thread (different from the original `threadId`). The server also emits a `thread/started` notification for that new thread before streaming the review turn.
+
+Codex streams the usual `turn/started` notification followed by an `item/started`
+with an `enteredReviewMode` item so clients can show progress:
+
+```json
+{
+ "method": "item/started",
+ "params": {
+ "item": {
+ "type": "enteredReviewMode",
+ "id": "turn_900",
+ "review": "current changes"
+ }
+ }
+}
+```
+
+When the reviewer finishes, the server emits `item/started` and `item/completed`
+containing an `exitedReviewMode` item with the final review text:
+
+```json
+{
+ "method": "item/completed",
+ "params": {
+ "item": {
+ "type": "exitedReviewMode",
+ "id": "turn_900",
+ "review": "Looks solid overall...\n\n- Prefer Stylize helpers — app.rs:10-20\n ..."
+ }
+ }
+}
+```
+
+The `review` string is plain text that already bundles the overall explanation plus a bullet list for each structured finding (matching `ThreadItem::ExitedReviewMode` in the generated schema). Use this notification to render the reviewer output in your client.
+
+### Example: One-off command execution
+
+Run a standalone command (argv vector) in the server’s sandbox without creating a thread or turn:
+
+```json
+{ "method": "command/exec", "id": 32, "params": {
+ "command": ["ls", "-la"],
+ "cwd": "/Users/me/project", // optional; defaults to server cwd
+ "sandboxPolicy": { "type": "workspaceWrite" }, // optional; defaults to user config
+ "timeoutMs": 10000 // optional; ms timeout; defaults to server timeout
+} }
+{ "id": 32, "result": { "exitCode": 0, "stdout": "...", "stderr": "" } }
+```
+
+- For clients that are already sandboxed externally, set `sandboxPolicy` to `{"type":"externalSandbox","networkAccess":"enabled"}` (or omit `networkAccess` to keep it restricted). Codex will not enforce its own sandbox in this mode; it tells the model it has full file-system access and passes the `networkAccess` state through `environment_context`.
+
+Notes:
+
+- Empty `command` arrays are rejected.
+- `sandboxPolicy` accepts the same shape used by `turn/start` (e.g., `dangerFullAccess`, `readOnly`, `workspaceWrite` with flags, `externalSandbox` with `networkAccess` `restricted|enabled`).
+- When omitted, `timeoutMs` falls back to the server default.
+
+## Events
+
+Event notifications are the server-initiated event stream for thread lifecycles, turn lifecycles, and the items within them. After you start or resume a thread, keep reading stdout for `thread/started`, `turn/*`, and `item/*` notifications.
+
+### Turn events
+
+The app-server streams JSON-RPC notifications while a turn is running. Each turn starts with `turn/started` (initial `turn`) and ends with `turn/completed` (final `turn` status). Token usage events stream separately via `thread/tokenUsage/updated`. Clients subscribe to the events they care about, rendering each item incrementally as updates arrive. The per-item lifecycle is always: `item/started` → zero or more item-specific deltas → `item/completed`.
+
+- `turn/started` — `{ turn }` with the turn id, empty `items`, and `status: "inProgress"`.
+- `turn/completed` — `{ turn }` where `turn.status` is `completed`, `interrupted`, or `failed`; failures carry `{ error: { message, codexErrorInfo?, additionalDetails? } }`.
+- `turn/diff/updated` — `{ threadId, turnId, diff }` represents the up-to-date snapshot of the turn-level unified diff, emitted after every FileChange item. `diff` is the latest aggregated unified diff across every file change in the turn. UIs can render this to show the full "what changed" view without stitching individual `fileChange` items.
+- `turn/plan/updated` — `{ turnId, explanation?, plan }` whenever the agent shares or changes its plan; each `plan` entry is `{ step, status }` with `status` in `pending`, `inProgress`, or `completed`.
+
+Today both notifications carry an empty `items` array even when item events were streamed; rely on `item/*` notifications for the canonical item list until this is fixed.
+
+#### Items
+
+`ThreadItem` is the tagged union carried in turn responses and `item/*` notifications. Currently we support events for the following items:
+
+- `userMessage` — `{id, content}` where `content` is a list of user inputs (`text`, `image`, or `localImage`).
+- `agentMessage` — `{id, text}` containing the accumulated agent reply.
+- `reasoning` — `{id, summary, content}` where `summary` holds streamed reasoning summaries (applicable for most OpenAI models) and `content` holds raw reasoning blocks (applicable for e.g. open source models).
+- `commandExecution` — `{id, command, cwd, status, commandActions, aggregatedOutput?, exitCode?, durationMs?}` for sandboxed commands; `status` is `inProgress`, `completed`, `failed`, or `declined`.
+- `fileChange` — `{id, changes, status}` describing proposed edits; `changes` list `{path, kind, diff}` and `status` is `inProgress`, `completed`, `failed`, or `declined`.
+- `mcpToolCall` — `{id, server, tool, status, arguments, result?, error?}` describing MCP calls; `status` is `inProgress`, `completed`, or `failed`.
+- `webSearch` — `{id, query}` for a web search request issued by the agent.
+- `imageView` — `{id, path}` emitted when the agent invokes the image viewer tool.
+- `enteredReviewMode` — `{id, review}` sent when the reviewer starts; `review` is a short user-facing label such as `"current changes"` or the requested target description.
+- `exitedReviewMode` — `{id, review}` emitted when the reviewer finishes; `review` is the full plain-text review (usually, overall notes plus bullet point findings).
+- `compacted` - `{threadId, turnId}` when codex compacts the conversation history. This can happen automatically.
+
+All items emit two shared lifecycle events:
+
+- `item/started` — emits the full `item` when a new unit of work begins so the UI can render it immediately; the `item.id` in this payload matches the `itemId` used by deltas.
+- `item/completed` — sends the final `item` once that work finishes (e.g., after a tool call or message completes); treat this as the authoritative state.
+
+There are additional item-specific events:
+
+#### agentMessage
+
+- `item/agentMessage/delta` — appends streamed text for the agent message; concatenate `delta` values for the same `itemId` in order to reconstruct the full reply.
+
+#### reasoning
+
+- `item/reasoning/summaryTextDelta` — streams readable reasoning summaries; `summaryIndex` increments when a new summary section opens.
+- `item/reasoning/summaryPartAdded` — marks the boundary between reasoning summary sections for an `itemId`; subsequent `summaryTextDelta` entries share the same `summaryIndex`.
+- `item/reasoning/textDelta` — streams raw reasoning text (only applicable for e.g. open source models); use `contentIndex` to group deltas that belong together before showing them in the UI.
+
+#### commandExecution
+
+- `item/commandExecution/outputDelta` — streams stdout/stderr for the command; append deltas in order to render live output alongside `aggregatedOutput` in the final item.
+ Final `commandExecution` items include parsed `commandActions`, `status`, `exitCode`, and `durationMs` so the UI can summarize what ran and whether it succeeded.
+
+#### fileChange
+
+- `item/fileChange/outputDelta` - contains the tool call response of the underlying `apply_patch` tool call.
+
+### Errors
+
+`error` event is emitted whenever the server hits an error mid-turn (for example, upstream model errors or quota limits). Carries the same `{ error: { message, codexErrorInfo?, additionalDetails? } }` payload as `turn.status: "failed"` and may precede that terminal notification.
+
+`codexErrorInfo` maps to the `CodexErrorInfo` enum. Common values:
+
+- `ContextWindowExceeded`
+- `UsageLimitExceeded`
+- `HttpConnectionFailed { httpStatusCode? }`: upstream HTTP failures including 4xx/5xx
+- `ResponseStreamConnectionFailed { httpStatusCode? }`: failure to connect to the response SSE stream
+- `ResponseStreamDisconnected { httpStatusCode? }`: disconnect of the response SSE stream in the middle of a turn before completion
+- `ResponseTooManyFailedAttempts { httpStatusCode? }`
+- `BadRequest`
+- `Unauthorized`
+- `SandboxError`
+- `InternalServerError`
+- `Other`: all unclassified errors
+
+When an upstream HTTP status is available (for example, from the Responses API or a provider), it is forwarded in `httpStatusCode` on the relevant `codexErrorInfo` variant.
+
+## Approvals
+
+Certain actions (shell commands or modifying files) may require explicit user approval depending on the user's config. When `turn/start` is used, the app-server drives an approval flow by sending a server-initiated JSON-RPC request to the client. The client must respond to tell Codex whether to proceed. UIs should present these requests inline with the active turn so users can review the proposed command or diff before choosing.
+
+- Requests include `threadId` and `turnId`—use them to scope UI state to the active conversation.
+- Respond with a single `{ "decision": "accept" | "decline" }` payload (plus optional `acceptSettings` on command executions). The server resumes or declines the work and ends the item with `item/completed`.
+
+### Command execution approvals
+
+Order of messages:
+
+1. `item/started` — shows the pending `commandExecution` item with `command`, `cwd`, and other fields so you can render the proposed action.
+2. `item/commandExecution/requestApproval` (request) — carries the same `itemId`, `threadId`, `turnId`, optionally `reason` or `risk`, plus `parsedCmd` for friendly display.
+3. Client response — `{ "decision": "accept", "acceptSettings": { "forSession": false } }` or `{ "decision": "decline" }`.
+4. `item/completed` — final `commandExecution` item with `status: "completed" | "failed" | "declined"` and execution output. Render this as the authoritative result.
+
+### File change approvals
+
+Order of messages:
+
+1. `item/started` — emits a `fileChange` item with `changes` (diff chunk summaries) and `status: "inProgress"`. Show the proposed edits and paths to the user.
+2. `item/fileChange/requestApproval` (request) — includes `itemId`, `threadId`, `turnId`, and an optional `reason`.
+3. Client response — `{ "decision": "accept" }` or `{ "decision": "decline" }`.
+4. `item/completed` — returns the same `fileChange` item with `status` updated to `completed`, `failed`, or `declined` after the patch attempt. Rely on this to show success/failure and finalize the diff state in your UI.
+
+UI guidance for IDEs: surface an approval dialog as soon as the request arrives. The turn will proceed after the server receives a response to the approval request. The terminal `item/completed` notification will be sent with the appropriate status.
+
+## Skills
+
+Invoke a skill by including `$` in the text input. Add a `skill` input item (recommended) so the backend injects full skill instructions instead of relying on the model to resolve the name.
+
+```json
+{
+ "method": "turn/start",
+ "id": 101,
+ "params": {
+ "threadId": "thread-1",
+ "input": [
+ { "type": "text", "text": "$skill-creator Add a new skill for triaging flaky CI." },
+ { "type": "skill", "name": "skill-creator", "path": "/Users/me/.codex/skills/skill-creator/SKILL.md" }
+ ]
+ }
+}
+```
+
+If you omit the `skill` item, the model will still parse the `$` marker and try to locate the skill, which can add latency.
+
+Example:
+
+```
+$skill-creator Add a new skill for triaging flaky CI and include step-by-step usage.
+```
+
+Use `skills/list` to fetch the available skills (optionally scoped by `cwd` and/or with `forceReload`).
+
+```json
+{ "method": "skills/list", "id": 25, "params": {
+ "cwd": "/Users/me/project",
+ "forceReload": false
+} }
+{ "id": 25, "result": {
+ "skills": [
+ { "name": "skill-creator", "description": "Create or update a Codex skill" }
+ ]
+} }
+```
+
## Auth endpoints
The JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
-### Quick reference
+### API Overview
+
- `account/read` — fetch current account info; optionally refresh tokens.
- `account/login/start` — begin login (`apiKey` or `chatgpt`).
- `account/login/completed` (notify) — emitted when a login attempt finishes (success or error).
@@ -193,15 +502,19 @@ The JSON-RPC auth/account surface exposes request/response methods plus server-i
- `account/logout` — sign out; triggers `account/updated`.
- `account/updated` (notify) — emitted whenever auth mode changes (`authMode`: `apikey`, `chatgpt`, or `null`).
- `account/rateLimits/read` — fetch ChatGPT rate limits; updates arrive via `account/rateLimits/updated` (notify).
+- `account/rateLimits/updated` (notify) — emitted whenever a user's ChatGPT rate limits change.
+- `mcpServer/oauthLogin/completed` (notify) — emitted after a `mcpServer/oauth/login` flow finishes for a server; payload includes `{ name, success, error? }`.
### 1) Check auth state
Request:
+
```json
{ "method": "account/read", "id": 1, "params": { "refreshToken": false } }
```
Response examples:
+
```json
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": false } } // No OpenAI auth needed (e.g., OSS/local models)
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": true } } // OpenAI auth required (typical for OpenAI-hosted models)
@@ -210,6 +523,7 @@ Response examples:
```
Field notes:
+
- `refreshToken` (bool): set `true` to force a token refresh.
- `requiresOpenaiAuth` reflects the active provider; when `false`, Codex can run without OpenAI credentials.
@@ -217,7 +531,11 @@ Field notes:
1. Send:
```json
- { "method": "account/login/start", "id": 2, "params": { "type": "apiKey", "apiKey": "sk-…" } }
+ {
+ "method": "account/login/start",
+ "id": 2,
+ "params": { "type": "apiKey", "apiKey": "sk-…" }
+ }
```
2. Expect:
```json
@@ -267,42 +585,7 @@ Field notes:
```
Field notes:
+
- `usedPercent` is current usage within the OpenAI quota window.
- `windowDurationMins` is the quota window length.
- `resetsAt` is a Unix timestamp (seconds) for the next reset.
-
-### Dev notes
-
-- `codex app-server generate-ts --out ` emits v2 types under `v2/`.
-- `codex app-server generate-json-schema --out ` outputs `codex_app_server_protocol.schemas.json`.
-- See [“Authentication and authorization” in the config docs](../../docs/config.md#authentication-and-authorization) for configuration knobs.
-
-
-## Events (work-in-progress)
-
-Event notifications are the server-initiated event stream for thread lifecycles, turn lifecycles, and the items within them. After you start or resume a thread, keep reading stdout for `thread/started`, `turn/*`, and `item/*` notifications.
-
-### Turn events
-
-The app-server streams JSON-RPC notifications while a turn is running. Each turn starts with `turn/started` (initial `turn`) and ends with `turn/completed` (final `turn` plus token `usage`), and clients subscribe to the events they care about, rendering each item incrementally as updates arrive. The per-item lifecycle is always: `item/started` → zero or more item-specific deltas → `item/completed`.
-
-#### Thread items
-
-`ThreadItem` is the tagged union carried in turn responses and `item/*` notifications. Currently we support events for the following items:
-- `userMessage` — `{id, content}` where `content` is a list of user inputs (`text`, `image`, or `localImage`).
-- `agentMessage` — `{id, text}` containing the accumulated agent reply.
-- `reasoning` — `{id, summary, content}` where `summary` holds streamed reasoning summaries (applicable for most OpenAI models) and `content` holds raw reasoning blocks (applicable for e.g. open source models).
-- `mcpToolCall` — `{id, server, tool, status, arguments, result?, error?}` describing MCP calls; `status` is `inProgress`, `completed`, or `failed`.
-- `webSearch` — `{id, query}` for a web search request issued by the agent.
-
-All items emit two shared lifecycle events:
-- `item/started` — emits the full `item` when a new unit of work begins so the UI can render it immediately; the `item.id` in this payload matches the `itemId` used by deltas.
-- `item/completed` — sends the final `item` once that work finishes (e.g., after a tool call or message completes); treat this as the authoritative state.
-
-There are additional item-specific events:
-#### agentMessage
-- `item/agentMessage/delta` — appends streamed text for the agent message; concatenate `delta` values for the same `itemId` in order to reconstruct the full reply.
-#### reasoning
-- `item/reasoning/summaryTextDelta` — streams readable reasoning summaries; `summaryIndex` increments when a new summary section opens.
-- `item/reasoning/summaryPartAdded` — marks the boundary between reasoning summary sections for an `itemId`; subsequent `summaryTextDelta` entries share the same `summaryIndex`.
-- `item/reasoning/textDelta` — streams raw reasoning text (only applicable for e.g. open source models); use `contentIndex` to group deltas that belong together before showing them in the UI.
diff --git a/codex-rs/app-server/src/bespoke_event_handling.rs b/codex-rs/app-server/src/bespoke_event_handling.rs
index 8ed343f03..d18a7878f 100644
--- a/codex-rs/app-server/src/bespoke_event_handling.rs
+++ b/codex-rs/app-server/src/bespoke_event_handling.rs
@@ -1,88 +1,201 @@
use crate::codex_message_processor::ApiVersion;
use crate::codex_message_processor::PendingInterrupts;
+use crate::codex_message_processor::PendingRollbacks;
+use crate::codex_message_processor::TurnSummary;
+use crate::codex_message_processor::TurnSummaryStore;
+use crate::codex_message_processor::read_event_msgs_from_rollout;
+use crate::codex_message_processor::read_summary_from_rollout;
+use crate::codex_message_processor::summary_to_thread;
+use crate::error_code::INTERNAL_ERROR_CODE;
+use crate::error_code::INVALID_REQUEST_ERROR_CODE;
use crate::outgoing_message::OutgoingMessageSender;
use codex_app_server_protocol::AccountRateLimitsUpdatedNotification;
use codex_app_server_protocol::AgentMessageDeltaNotification;
use codex_app_server_protocol::ApplyPatchApprovalParams;
use codex_app_server_protocol::ApplyPatchApprovalResponse;
-use codex_app_server_protocol::ApprovalDecision;
+use codex_app_server_protocol::CodexErrorInfo as V2CodexErrorInfo;
use codex_app_server_protocol::CommandAction as V2ParsedCommand;
+use codex_app_server_protocol::CommandExecutionApprovalDecision;
use codex_app_server_protocol::CommandExecutionOutputDeltaNotification;
use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
use codex_app_server_protocol::CommandExecutionStatus;
+use codex_app_server_protocol::ContextCompactedNotification;
+use codex_app_server_protocol::DeprecationNoticeNotification;
+use codex_app_server_protocol::ErrorNotification;
use codex_app_server_protocol::ExecCommandApprovalParams;
use codex_app_server_protocol::ExecCommandApprovalResponse;
+use codex_app_server_protocol::ExecPolicyAmendment as V2ExecPolicyAmendment;
+use codex_app_server_protocol::FileChangeApprovalDecision;
+use codex_app_server_protocol::FileChangeOutputDeltaNotification;
+use codex_app_server_protocol::FileChangeRequestApprovalParams;
+use codex_app_server_protocol::FileChangeRequestApprovalResponse;
+use codex_app_server_protocol::FileUpdateChange;
use codex_app_server_protocol::InterruptConversationResponse;
use codex_app_server_protocol::ItemCompletedNotification;
use codex_app_server_protocol::ItemStartedNotification;
+use codex_app_server_protocol::JSONRPCErrorError;
use codex_app_server_protocol::McpToolCallError;
use codex_app_server_protocol::McpToolCallResult;
use codex_app_server_protocol::McpToolCallStatus;
+use codex_app_server_protocol::PatchApplyStatus;
+use codex_app_server_protocol::PatchChangeKind as V2PatchChangeKind;
+use codex_app_server_protocol::RawResponseItemCompletedNotification;
use codex_app_server_protocol::ReasoningSummaryPartAddedNotification;
use codex_app_server_protocol::ReasoningSummaryTextDeltaNotification;
use codex_app_server_protocol::ReasoningTextDeltaNotification;
-use codex_app_server_protocol::SandboxCommandAssessment as V2SandboxCommandAssessment;
use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ServerRequestPayload;
+use codex_app_server_protocol::TerminalInteractionNotification;
use codex_app_server_protocol::ThreadItem;
+use codex_app_server_protocol::ThreadRollbackResponse;
+use codex_app_server_protocol::ThreadTokenUsage;
+use codex_app_server_protocol::ThreadTokenUsageUpdatedNotification;
+use codex_app_server_protocol::Turn;
+use codex_app_server_protocol::TurnCompletedNotification;
+use codex_app_server_protocol::TurnDiffUpdatedNotification;
+use codex_app_server_protocol::TurnError;
use codex_app_server_protocol::TurnInterruptResponse;
-use codex_core::CodexConversation;
+use codex_app_server_protocol::TurnPlanStep;
+use codex_app_server_protocol::TurnPlanUpdatedNotification;
+use codex_app_server_protocol::TurnStatus;
+use codex_app_server_protocol::build_turns_from_event_msgs;
+use codex_core::CodexThread;
use codex_core::parse_command::shlex_join;
use codex_core::protocol::ApplyPatchApprovalRequestEvent;
+use codex_core::protocol::CodexErrorInfo as CoreCodexErrorInfo;
use codex_core::protocol::Event;
use codex_core::protocol::EventMsg;
use codex_core::protocol::ExecApprovalRequestEvent;
use codex_core::protocol::ExecCommandEndEvent;
+use codex_core::protocol::FileChange as CoreFileChange;
use codex_core::protocol::McpToolCallBeginEvent;
use codex_core::protocol::McpToolCallEndEvent;
use codex_core::protocol::Op;
use codex_core::protocol::ReviewDecision;
-use codex_protocol::ConversationId;
+use codex_core::protocol::TokenCountEvent;
+use codex_core::protocol::TurnDiffEvent;
+use codex_core::review_format::format_review_findings_block;
+use codex_core::review_prompts;
+use codex_protocol::ThreadId;
+use codex_protocol::plan_tool::UpdatePlanArgs;
+use codex_protocol::protocol::ReviewOutputEvent;
+use std::collections::HashMap;
use std::convert::TryFrom;
+use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::oneshot;
use tracing::error;
type JsonValue = serde_json::Value;
+#[allow(clippy::too_many_arguments)]
pub(crate) async fn apply_bespoke_event_handling(
event: Event,
- conversation_id: ConversationId,
- conversation: Arc,
+ conversation_id: ThreadId,
+ conversation: Arc,
outgoing: Arc,
pending_interrupts: PendingInterrupts,
+ pending_rollbacks: PendingRollbacks,
+ turn_summary_store: TurnSummaryStore,
api_version: ApiVersion,
+ fallback_model_provider: String,
) {
- let Event { id: event_id, msg } = event;
+ let Event {
+ id: event_turn_id,
+ msg,
+ } = event;
match msg {
+ EventMsg::TaskComplete(_ev) => {
+ handle_turn_complete(
+ conversation_id,
+ event_turn_id,
+ &outgoing,
+ &turn_summary_store,
+ )
+ .await;
+ }
EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent {
call_id,
+ turn_id,
changes,
reason,
grant_root,
- }) => {
- let params = ApplyPatchApprovalParams {
- conversation_id,
- call_id,
- file_changes: changes,
- reason,
- grant_root,
- };
- let rx = outgoing
- .send_request(ServerRequestPayload::ApplyPatchApproval(params))
- .await;
- tokio::spawn(async move {
- on_patch_approval_response(event_id, rx, conversation).await;
- });
- }
+ }) => match api_version {
+ ApiVersion::V1 => {
+ let params = ApplyPatchApprovalParams {
+ conversation_id,
+ call_id,
+ file_changes: changes.clone(),
+ reason,
+ grant_root,
+ };
+ let rx = outgoing
+ .send_request(ServerRequestPayload::ApplyPatchApproval(params))
+ .await;
+ tokio::spawn(async move {
+ on_patch_approval_response(event_turn_id, rx, conversation).await;
+ });
+ }
+ ApiVersion::V2 => {
+ // Until we migrate the core to be aware of a first class FileChangeItem
+ // and emit the corresponding EventMsg, we repurpose the call_id as the item_id.
+ let item_id = call_id.clone();
+ let patch_changes = convert_patch_changes(&changes);
+
+ let first_start = {
+ let mut map = turn_summary_store.lock().await;
+ let summary = map.entry(conversation_id).or_default();
+ summary.file_change_started.insert(item_id.clone())
+ };
+ if first_start {
+ let item = ThreadItem::FileChange {
+ id: item_id.clone(),
+ changes: patch_changes.clone(),
+ status: PatchApplyStatus::InProgress,
+ };
+ let notification = ItemStartedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::ItemStarted(notification))
+ .await;
+ }
+
+ let params = FileChangeRequestApprovalParams {
+ thread_id: conversation_id.to_string(),
+ turn_id: turn_id.clone(),
+ item_id: item_id.clone(),
+ reason,
+ grant_root,
+ };
+ let rx = outgoing
+ .send_request(ServerRequestPayload::FileChangeRequestApproval(params))
+ .await;
+ tokio::spawn(async move {
+ on_file_change_request_approval_response(
+ event_turn_id,
+ conversation_id,
+ item_id,
+ patch_changes,
+ rx,
+ conversation,
+ outgoing,
+ turn_summary_store,
+ )
+ .await;
+ });
+ }
+ },
EventMsg::ExecApprovalRequest(ExecApprovalRequestEvent {
call_id,
turn_id,
command,
cwd,
reason,
- risk,
+ proposed_execpolicy_amendment,
parsed_cmd,
}) => match api_version {
ApiVersion::V1 => {
@@ -92,25 +205,34 @@ pub(crate) async fn apply_bespoke_event_handling(
command,
cwd,
reason,
- risk,
parsed_cmd,
};
let rx = outgoing
.send_request(ServerRequestPayload::ExecCommandApproval(params))
.await;
tokio::spawn(async move {
- on_exec_approval_response(event_id, rx, conversation).await;
+ on_exec_approval_response(event_turn_id, rx, conversation).await;
});
}
ApiVersion::V2 => {
+ let item_id = call_id.clone();
+ let command_actions = parsed_cmd
+ .iter()
+ .cloned()
+ .map(V2ParsedCommand::from)
+ .collect::>();
+ let command_string = shlex_join(&command);
+ let proposed_execpolicy_amendment_v2 =
+ proposed_execpolicy_amendment.map(V2ExecPolicyAmendment::from);
+
let params = CommandExecutionRequestApprovalParams {
thread_id: conversation_id.to_string(),
turn_id: turn_id.clone(),
// Until we migrate the core to be aware of a first class CommandExecutionItem
// and emit the corresponding EventMsg, we repurpose the call_id as the item_id.
- item_id: call_id.clone(),
+ item_id: item_id.clone(),
reason,
- risk: risk.map(V2SandboxCommandAssessment::from),
+ proposed_execpolicy_amendment: proposed_execpolicy_amendment_v2,
};
let rx = outgoing
.send_request(ServerRequestPayload::CommandExecutionRequestApproval(
@@ -118,26 +240,48 @@ pub(crate) async fn apply_bespoke_event_handling(
))
.await;
tokio::spawn(async move {
- on_command_execution_request_approval_response(event_id, rx, conversation)
- .await;
+ on_command_execution_request_approval_response(
+ event_turn_id,
+ conversation_id,
+ item_id,
+ command_string,
+ cwd,
+ command_actions,
+ rx,
+ conversation,
+ outgoing,
+ )
+ .await;
});
}
},
// TODO(celia): properly construct McpToolCall TurnItem in core.
EventMsg::McpToolCallBegin(begin_event) => {
- let notification = construct_mcp_tool_call_notification(begin_event).await;
+ let notification = construct_mcp_tool_call_notification(
+ begin_event,
+ conversation_id.to_string(),
+ event_turn_id.clone(),
+ )
+ .await;
outgoing
.send_server_notification(ServerNotification::ItemStarted(notification))
.await;
}
EventMsg::McpToolCallEnd(end_event) => {
- let notification = construct_mcp_tool_call_end_notification(end_event).await;
+ let notification = construct_mcp_tool_call_end_notification(
+ end_event,
+ conversation_id.to_string(),
+ event_turn_id.clone(),
+ )
+ .await;
outgoing
.send_server_notification(ServerNotification::ItemCompleted(notification))
.await;
}
EventMsg::AgentMessageContentDelta(event) => {
let notification = AgentMessageDeltaNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
item_id: event.item_id,
delta: event.delta,
};
@@ -145,8 +289,28 @@ pub(crate) async fn apply_bespoke_event_handling(
.send_server_notification(ServerNotification::AgentMessageDelta(notification))
.await;
}
+ EventMsg::ContextCompacted(..) => {
+ let notification = ContextCompactedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ };
+ outgoing
+ .send_server_notification(ServerNotification::ContextCompacted(notification))
+ .await;
+ }
+ EventMsg::DeprecationNotice(event) => {
+ let notification = DeprecationNoticeNotification {
+ summary: event.summary,
+ details: event.details,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::DeprecationNotice(notification))
+ .await;
+ }
EventMsg::ReasoningContentDelta(event) => {
let notification = ReasoningSummaryTextDeltaNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
item_id: event.item_id,
delta: event.delta,
summary_index: event.summary_index,
@@ -159,6 +323,8 @@ pub(crate) async fn apply_bespoke_event_handling(
}
EventMsg::ReasoningRawContentDelta(event) => {
let notification = ReasoningTextDeltaNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
item_id: event.item_id,
delta: event.delta,
content_index: event.content_index,
@@ -169,6 +335,8 @@ pub(crate) async fn apply_bespoke_event_handling(
}
EventMsg::AgentReasoningSectionBreak(event) => {
let notification = ReasoningSummaryPartAddedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
item_id: event.item_id,
summary_index: event.summary_index,
};
@@ -179,59 +347,298 @@ pub(crate) async fn apply_bespoke_event_handling(
.await;
}
EventMsg::TokenCount(token_count_event) => {
- if let Some(rate_limits) = token_count_event.rate_limits {
- outgoing
- .send_server_notification(ServerNotification::AccountRateLimitsUpdated(
- AccountRateLimitsUpdatedNotification {
- rate_limits: rate_limits.into(),
- },
- ))
- .await;
- }
+ handle_token_count_event(conversation_id, event_turn_id, token_count_event, &outgoing)
+ .await;
+ }
+ EventMsg::Error(ev) => {
+ let message = ev.message.clone();
+ let codex_error_info = ev.codex_error_info.clone();
+
+ // If this error belongs to an in-flight `thread/rollback` request, fail that request
+ // (and clear pending state) so subsequent rollbacks are unblocked.
+ //
+ // Don't send a notification for this error.
+ if matches!(
+ codex_error_info,
+ Some(CoreCodexErrorInfo::ThreadRollbackFailed)
+ ) {
+ return handle_thread_rollback_failed(
+ conversation_id,
+ message,
+ &pending_rollbacks,
+ &outgoing,
+ )
+ .await;
+ };
+
+ let turn_error = TurnError {
+ message: ev.message,
+ codex_error_info: ev.codex_error_info.map(V2CodexErrorInfo::from),
+ additional_details: None,
+ };
+ handle_error(conversation_id, turn_error.clone(), &turn_summary_store).await;
+ outgoing
+ .send_server_notification(ServerNotification::Error(ErrorNotification {
+ error: turn_error.clone(),
+ will_retry: false,
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ }))
+ .await;
+ }
+ EventMsg::StreamError(ev) => {
+ // We don't need to update the turn summary store for stream errors as they are intermediate error states for retries,
+ // but we notify the client.
+ let turn_error = TurnError {
+ message: ev.message,
+ codex_error_info: ev.codex_error_info.map(V2CodexErrorInfo::from),
+ additional_details: ev.additional_details,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::Error(ErrorNotification {
+ error: turn_error,
+ will_retry: true,
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ }))
+ .await;
+ }
+ EventMsg::ViewImageToolCall(view_image_event) => {
+ let item = ThreadItem::ImageView {
+ id: view_image_event.call_id.clone(),
+ path: view_image_event.path.to_string_lossy().into_owned(),
+ };
+ let started = ItemStartedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item: item.clone(),
+ };
+ outgoing
+ .send_server_notification(ServerNotification::ItemStarted(started))
+ .await;
+ let completed = ItemCompletedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::ItemCompleted(completed))
+ .await;
+ }
+ EventMsg::EnteredReviewMode(review_request) => {
+ let review = review_request
+ .user_facing_hint
+ .unwrap_or_else(|| review_prompts::user_facing_hint(&review_request.target));
+ let item = ThreadItem::EnteredReviewMode {
+ id: event_turn_id.clone(),
+ review,
+ };
+ let started = ItemStartedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item: item.clone(),
+ };
+ outgoing
+ .send_server_notification(ServerNotification::ItemStarted(started))
+ .await;
+ let completed = ItemCompletedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::ItemCompleted(completed))
+ .await;
}
EventMsg::ItemStarted(item_started_event) => {
let item: ThreadItem = item_started_event.item.clone().into();
- let notification = ItemStartedNotification { item };
+ let notification = ItemStartedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item,
+ };
outgoing
.send_server_notification(ServerNotification::ItemStarted(notification))
.await;
}
EventMsg::ItemCompleted(item_completed_event) => {
let item: ThreadItem = item_completed_event.item.clone().into();
- let notification = ItemCompletedNotification { item };
+ let notification = ItemCompletedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item,
+ };
outgoing
.send_server_notification(ServerNotification::ItemCompleted(notification))
.await;
}
+ EventMsg::ExitedReviewMode(review_event) => {
+ let review = match review_event.review_output {
+ Some(output) => render_review_output_text(&output),
+ None => REVIEW_FALLBACK_MESSAGE.to_string(),
+ };
+ let item = ThreadItem::ExitedReviewMode {
+ id: event_turn_id.clone(),
+ review,
+ };
+ let started = ItemStartedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item: item.clone(),
+ };
+ outgoing
+ .send_server_notification(ServerNotification::ItemStarted(started))
+ .await;
+ let completed = ItemCompletedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::ItemCompleted(completed))
+ .await;
+ }
+ EventMsg::RawResponseItem(raw_response_item_event) => {
+ maybe_emit_raw_response_item_completed(
+ api_version,
+ conversation_id,
+ &event_turn_id,
+ raw_response_item_event.item,
+ outgoing.as_ref(),
+ )
+ .await;
+ }
+ EventMsg::PatchApplyBegin(patch_begin_event) => {
+ // Until we migrate the core to be aware of a first class FileChangeItem
+ // and emit the corresponding EventMsg, we repurpose the call_id as the item_id.
+ let item_id = patch_begin_event.call_id.clone();
+
+ let first_start = {
+ let mut map = turn_summary_store.lock().await;
+ let summary = map.entry(conversation_id).or_default();
+ summary.file_change_started.insert(item_id.clone())
+ };
+ if first_start {
+ let item = ThreadItem::FileChange {
+ id: item_id.clone(),
+ changes: convert_patch_changes(&patch_begin_event.changes),
+ status: PatchApplyStatus::InProgress,
+ };
+ let notification = ItemStartedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::ItemStarted(notification))
+ .await;
+ }
+ }
+ EventMsg::PatchApplyEnd(patch_end_event) => {
+ // Until we migrate the core to be aware of a first class FileChangeItem
+ // and emit the corresponding EventMsg, we repurpose the call_id as the item_id.
+ let item_id = patch_end_event.call_id.clone();
+
+ let status = if patch_end_event.success {
+ PatchApplyStatus::Completed
+ } else {
+ PatchApplyStatus::Failed
+ };
+ let changes = convert_patch_changes(&patch_end_event.changes);
+ complete_file_change_item(
+ conversation_id,
+ item_id,
+ changes,
+ status,
+ event_turn_id.clone(),
+ outgoing.as_ref(),
+ &turn_summary_store,
+ )
+ .await;
+ }
EventMsg::ExecCommandBegin(exec_command_begin_event) => {
+ let item_id = exec_command_begin_event.call_id.clone();
+ let command_actions = exec_command_begin_event
+ .parsed_cmd
+ .into_iter()
+ .map(V2ParsedCommand::from)
+ .collect::>();
+ let command = shlex_join(&exec_command_begin_event.command);
+ let cwd = exec_command_begin_event.cwd;
+ let process_id = exec_command_begin_event.process_id;
+
let item = ThreadItem::CommandExecution {
- id: exec_command_begin_event.call_id.clone(),
- command: shlex_join(&exec_command_begin_event.command),
- cwd: exec_command_begin_event.cwd,
+ id: item_id,
+ command,
+ cwd,
+ process_id,
status: CommandExecutionStatus::InProgress,
- command_actions: exec_command_begin_event
- .parsed_cmd
- .into_iter()
- .map(V2ParsedCommand::from)
- .collect(),
+ command_actions,
aggregated_output: None,
exit_code: None,
duration_ms: None,
};
- let notification = ItemStartedNotification { item };
+ let notification = ItemStartedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item,
+ };
outgoing
.send_server_notification(ServerNotification::ItemStarted(notification))
.await;
}
EventMsg::ExecCommandOutputDelta(exec_command_output_delta_event) => {
- let notification = CommandExecutionOutputDeltaNotification {
- item_id: exec_command_output_delta_event.call_id.clone(),
- delta: String::from_utf8_lossy(&exec_command_output_delta_event.chunk).to_string(),
+ let item_id = exec_command_output_delta_event.call_id.clone();
+ let delta = String::from_utf8_lossy(&exec_command_output_delta_event.chunk).to_string();
+ // The underlying EventMsg::ExecCommandOutputDelta is used for shell, unified_exec,
+ // and apply_patch tool calls. We represent apply_patch with the FileChange item, and
+ // everything else with the CommandExecution item.
+ //
+ // We need to detect which item type it is so we can emit the right notification.
+ // We already have state tracking FileChange items on item/started, so let's use that.
+ let is_file_change = {
+ let map = turn_summary_store.lock().await;
+ map.get(&conversation_id)
+ .is_some_and(|summary| summary.file_change_started.contains(&item_id))
+ };
+ if is_file_change {
+ let notification = FileChangeOutputDeltaNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item_id,
+ delta,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::FileChangeOutputDelta(
+ notification,
+ ))
+ .await;
+ } else {
+ let notification = CommandExecutionOutputDeltaNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item_id,
+ delta,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::CommandExecutionOutputDelta(
+ notification,
+ ))
+ .await;
+ }
+ }
+ EventMsg::TerminalInteraction(terminal_event) => {
+ let item_id = terminal_event.call_id.clone();
+
+ let notification = TerminalInteractionNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item_id,
+ process_id: terminal_event.process_id,
+ stdin: terminal_event.stdin,
};
outgoing
- .send_server_notification(ServerNotification::CommandExecutionOutputDelta(
- notification,
- ))
+ .send_server_notification(ServerNotification::TerminalInteraction(notification))
.await;
}
EventMsg::ExecCommandEnd(exec_command_end_event) => {
@@ -240,6 +647,7 @@ pub(crate) async fn apply_bespoke_event_handling(
command,
cwd,
parsed_cmd,
+ process_id,
aggregated_output,
exit_code,
duration,
@@ -251,6 +659,10 @@ pub(crate) async fn apply_bespoke_event_handling(
} else {
CommandExecutionStatus::Failed
};
+ let command_actions = parsed_cmd
+ .into_iter()
+ .map(V2ParsedCommand::from)
+ .collect::>();
let aggregated_output = if aggregated_output.is_empty() {
None
@@ -264,14 +676,19 @@ pub(crate) async fn apply_bespoke_event_handling(
id: call_id,
command: shlex_join(&command),
cwd,
+ process_id,
status,
- command_actions: parsed_cmd.into_iter().map(V2ParsedCommand::from).collect(),
+ command_actions,
aggregated_output,
exit_code: Some(exit_code),
duration_ms: Some(duration_ms),
};
- let notification = ItemCompletedNotification { item };
+ let notification = ItemCompletedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.clone(),
+ item,
+ };
outgoing
.send_server_notification(ServerNotification::ItemCompleted(notification))
.await;
@@ -298,16 +715,349 @@ pub(crate) async fn apply_bespoke_event_handling(
}
}
}
+
+ handle_turn_interrupted(
+ conversation_id,
+ event_turn_id,
+ &outgoing,
+ &turn_summary_store,
+ )
+ .await;
+ }
+ EventMsg::ThreadRolledBack(_rollback_event) => {
+ let pending = {
+ let mut map = pending_rollbacks.lock().await;
+ map.remove(&conversation_id)
+ };
+
+ if let Some(request_id) = pending {
+ let rollout_path = conversation.rollout_path();
+ let response = match read_summary_from_rollout(
+ rollout_path.as_path(),
+ fallback_model_provider.as_str(),
+ )
+ .await
+ {
+ Ok(summary) => {
+ let mut thread = summary_to_thread(summary);
+ match read_event_msgs_from_rollout(rollout_path.as_path()).await {
+ Ok(events) => {
+ thread.turns = build_turns_from_event_msgs(&events);
+ ThreadRollbackResponse { thread }
+ }
+ Err(err) => {
+ let error = JSONRPCErrorError {
+ code: INTERNAL_ERROR_CODE,
+ message: format!(
+ "failed to load rollout `{}`: {err}",
+ rollout_path.display()
+ ),
+ data: None,
+ };
+ outgoing.send_error(request_id, error).await;
+ return;
+ }
+ }
+ }
+ Err(err) => {
+ let error = JSONRPCErrorError {
+ code: INTERNAL_ERROR_CODE,
+ message: format!(
+ "failed to load rollout `{}`: {err}",
+ rollout_path.display()
+ ),
+ data: None,
+ };
+ outgoing.send_error(request_id, error).await;
+ return;
+ }
+ };
+
+ outgoing.send_response(request_id, response).await;
+ }
+ }
+ EventMsg::TurnDiff(turn_diff_event) => {
+ handle_turn_diff(
+ conversation_id,
+ &event_turn_id,
+ turn_diff_event,
+ api_version,
+ outgoing.as_ref(),
+ )
+ .await;
+ }
+ EventMsg::PlanUpdate(plan_update_event) => {
+ handle_turn_plan_update(
+ conversation_id,
+ &event_turn_id,
+ plan_update_event,
+ api_version,
+ outgoing.as_ref(),
+ )
+ .await;
}
_ => {}
}
}
+async fn handle_turn_diff(
+ conversation_id: ThreadId,
+ event_turn_id: &str,
+ turn_diff_event: TurnDiffEvent,
+ api_version: ApiVersion,
+ outgoing: &OutgoingMessageSender,
+) {
+ if let ApiVersion::V2 = api_version {
+ let notification = TurnDiffUpdatedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.to_string(),
+ diff: turn_diff_event.unified_diff,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::TurnDiffUpdated(notification))
+ .await;
+ }
+}
+
+async fn handle_turn_plan_update(
+ conversation_id: ThreadId,
+ event_turn_id: &str,
+ plan_update_event: UpdatePlanArgs,
+ api_version: ApiVersion,
+ outgoing: &OutgoingMessageSender,
+) {
+ if let ApiVersion::V2 = api_version {
+ let notification = TurnPlanUpdatedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: event_turn_id.to_string(),
+ explanation: plan_update_event.explanation,
+ plan: plan_update_event
+ .plan
+ .into_iter()
+ .map(TurnPlanStep::from)
+ .collect(),
+ };
+ outgoing
+ .send_server_notification(ServerNotification::TurnPlanUpdated(notification))
+ .await;
+ }
+}
+
+async fn emit_turn_completed_with_status(
+ conversation_id: ThreadId,
+ event_turn_id: String,
+ status: TurnStatus,
+ error: Option,
+ outgoing: &OutgoingMessageSender,
+) {
+ let notification = TurnCompletedNotification {
+ thread_id: conversation_id.to_string(),
+ turn: Turn {
+ id: event_turn_id,
+ items: vec![],
+ error,
+ status,
+ },
+ };
+ outgoing
+ .send_server_notification(ServerNotification::TurnCompleted(notification))
+ .await;
+}
+
+async fn complete_file_change_item(
+ conversation_id: ThreadId,
+ item_id: String,
+ changes: Vec,
+ status: PatchApplyStatus,
+ turn_id: String,
+ outgoing: &OutgoingMessageSender,
+ turn_summary_store: &TurnSummaryStore,
+) {
+ {
+ let mut map = turn_summary_store.lock().await;
+ if let Some(summary) = map.get_mut(&conversation_id) {
+ summary.file_change_started.remove(&item_id);
+ }
+ }
+
+ let item = ThreadItem::FileChange {
+ id: item_id,
+ changes,
+ status,
+ };
+ let notification = ItemCompletedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id,
+ item,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::ItemCompleted(notification))
+ .await;
+}
+
+#[allow(clippy::too_many_arguments)]
+async fn complete_command_execution_item(
+ conversation_id: ThreadId,
+ turn_id: String,
+ item_id: String,
+ command: String,
+ cwd: PathBuf,
+ process_id: Option,
+ command_actions: Vec,
+ status: CommandExecutionStatus,
+ outgoing: &OutgoingMessageSender,
+) {
+ let item = ThreadItem::CommandExecution {
+ id: item_id,
+ command,
+ cwd,
+ process_id,
+ status,
+ command_actions,
+ aggregated_output: None,
+ exit_code: None,
+ duration_ms: None,
+ };
+ let notification = ItemCompletedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id,
+ item,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::ItemCompleted(notification))
+ .await;
+}
+
+async fn maybe_emit_raw_response_item_completed(
+ api_version: ApiVersion,
+ conversation_id: ThreadId,
+ turn_id: &str,
+ item: codex_protocol::models::ResponseItem,
+ outgoing: &OutgoingMessageSender,
+) {
+ let ApiVersion::V2 = api_version else {
+ return;
+ };
+
+ let notification = RawResponseItemCompletedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id: turn_id.to_string(),
+ item,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::RawResponseItemCompleted(notification))
+ .await;
+}
+
+async fn find_and_remove_turn_summary(
+ conversation_id: ThreadId,
+ turn_summary_store: &TurnSummaryStore,
+) -> TurnSummary {
+ let mut map = turn_summary_store.lock().await;
+ map.remove(&conversation_id).unwrap_or_default()
+}
+
+async fn handle_turn_complete(
+ conversation_id: ThreadId,
+ event_turn_id: String,
+ outgoing: &OutgoingMessageSender,
+ turn_summary_store: &TurnSummaryStore,
+) {
+ let turn_summary = find_and_remove_turn_summary(conversation_id, turn_summary_store).await;
+
+ let (status, error) = match turn_summary.last_error {
+ Some(error) => (TurnStatus::Failed, Some(error)),
+ None => (TurnStatus::Completed, None),
+ };
+
+ emit_turn_completed_with_status(conversation_id, event_turn_id, status, error, outgoing).await;
+}
+
+async fn handle_turn_interrupted(
+ conversation_id: ThreadId,
+ event_turn_id: String,
+ outgoing: &OutgoingMessageSender,
+ turn_summary_store: &TurnSummaryStore,
+) {
+ find_and_remove_turn_summary(conversation_id, turn_summary_store).await;
+
+ emit_turn_completed_with_status(
+ conversation_id,
+ event_turn_id,
+ TurnStatus::Interrupted,
+ None,
+ outgoing,
+ )
+ .await;
+}
+
+async fn handle_thread_rollback_failed(
+ conversation_id: ThreadId,
+ message: String,
+ pending_rollbacks: &PendingRollbacks,
+ outgoing: &OutgoingMessageSender,
+) {
+ let pending_rollback = {
+ let mut map = pending_rollbacks.lock().await;
+ map.remove(&conversation_id)
+ };
+
+ if let Some(request_id) = pending_rollback {
+ outgoing
+ .send_error(
+ request_id,
+ JSONRPCErrorError {
+ code: INVALID_REQUEST_ERROR_CODE,
+ message: message.clone(),
+ data: None,
+ },
+ )
+ .await;
+ }
+}
+
+async fn handle_token_count_event(
+ conversation_id: ThreadId,
+ turn_id: String,
+ token_count_event: TokenCountEvent,
+ outgoing: &OutgoingMessageSender,
+) {
+ let TokenCountEvent { info, rate_limits } = token_count_event;
+ if let Some(token_usage) = info.map(ThreadTokenUsage::from) {
+ let notification = ThreadTokenUsageUpdatedNotification {
+ thread_id: conversation_id.to_string(),
+ turn_id,
+ token_usage,
+ };
+ outgoing
+ .send_server_notification(ServerNotification::ThreadTokenUsageUpdated(notification))
+ .await;
+ }
+ if let Some(rate_limits) = rate_limits {
+ outgoing
+ .send_server_notification(ServerNotification::AccountRateLimitsUpdated(
+ AccountRateLimitsUpdatedNotification {
+ rate_limits: rate_limits.into(),
+ },
+ ))
+ .await;
+ }
+}
+
+async fn handle_error(
+ conversation_id: ThreadId,
+ error: TurnError,
+ turn_summary_store: &TurnSummaryStore,
+) {
+ let mut map = turn_summary_store.lock().await;
+ map.entry(conversation_id).or_default().last_error = Some(error);
+}
+
async fn on_patch_approval_response(
- event_id: String,
+ event_turn_id: String,
receiver: oneshot::Receiver,
- codex: Arc,
+ codex: Arc,
) {
let response = receiver.await;
let value = match response {
@@ -316,7 +1066,7 @@ async fn on_patch_approval_response(
error!("request failed: {err:?}");
if let Err(submit_err) = codex
.submit(Op::PatchApproval {
- id: event_id.clone(),
+ id: event_turn_id.clone(),
decision: ReviewDecision::Denied,
})
.await
@@ -337,7 +1087,7 @@ async fn on_patch_approval_response(
if let Err(err) = codex
.submit(Op::PatchApproval {
- id: event_id,
+ id: event_turn_id,
decision: response.decision,
})
.await
@@ -347,9 +1097,9 @@ async fn on_patch_approval_response(
}
async fn on_exec_approval_response(
- event_id: String,
+ event_turn_id: String,
receiver: oneshot::Receiver,
- conversation: Arc,
+ conversation: Arc,
) {
let response = receiver.await;
let value = match response {
@@ -373,7 +1123,7 @@ async fn on_exec_approval_response(
if let Err(err) = conversation
.submit(Op::ExecApproval {
- id: event_id,
+ id: event_turn_id,
decision: response.decision,
})
.await
@@ -382,45 +1132,214 @@ async fn on_exec_approval_response(
}
}
-async fn on_command_execution_request_approval_response(
- event_id: String,
+const REVIEW_FALLBACK_MESSAGE: &str = "Reviewer failed to output a response.";
+
+fn render_review_output_text(output: &ReviewOutputEvent) -> String {
+ let mut sections = Vec::new();
+ let explanation = output.overall_explanation.trim();
+ if !explanation.is_empty() {
+ sections.push(explanation.to_string());
+ }
+ if !output.findings.is_empty() {
+ let findings = format_review_findings_block(&output.findings, None);
+ let trimmed = findings.trim();
+ if !trimmed.is_empty() {
+ sections.push(trimmed.to_string());
+ }
+ }
+ if sections.is_empty() {
+ REVIEW_FALLBACK_MESSAGE.to_string()
+ } else {
+ sections.join("\n\n")
+ }
+}
+
+fn convert_patch_changes(changes: &HashMap) -> Vec {
+ let mut converted: Vec = changes
+ .iter()
+ .map(|(path, change)| FileUpdateChange {
+ path: path.to_string_lossy().into_owned(),
+ kind: map_patch_change_kind(change),
+ diff: format_file_change_diff(change),
+ })
+ .collect();
+ converted.sort_by(|a, b| a.path.cmp(&b.path));
+ converted
+}
+
+fn map_patch_change_kind(change: &CoreFileChange) -> V2PatchChangeKind {
+ match change {
+ CoreFileChange::Add { .. } => V2PatchChangeKind::Add,
+ CoreFileChange::Delete { .. } => V2PatchChangeKind::Delete,
+ CoreFileChange::Update { move_path, .. } => V2PatchChangeKind::Update {
+ move_path: move_path.clone(),
+ },
+ }
+}
+
+fn format_file_change_diff(change: &CoreFileChange) -> String {
+ match change {
+ CoreFileChange::Add { content } => content.clone(),
+ CoreFileChange::Delete { content } => content.clone(),
+ CoreFileChange::Update {
+ unified_diff,
+ move_path,
+ } => {
+ if let Some(path) = move_path {
+ format!("{unified_diff}\n\nMoved to: {}", path.display())
+ } else {
+ unified_diff.clone()
+ }
+ }
+ }
+}
+
+fn map_file_change_approval_decision(
+ decision: FileChangeApprovalDecision,
+) -> (ReviewDecision, Option) {
+ match decision {
+ FileChangeApprovalDecision::Accept => (ReviewDecision::Approved, None),
+ FileChangeApprovalDecision::AcceptForSession => (ReviewDecision::ApprovedForSession, None),
+ FileChangeApprovalDecision::Decline => {
+ (ReviewDecision::Denied, Some(PatchApplyStatus::Declined))
+ }
+ FileChangeApprovalDecision::Cancel => {
+ (ReviewDecision::Abort, Some(PatchApplyStatus::Declined))
+ }
+ }
+}
+
+#[allow(clippy::too_many_arguments)]
+async fn on_file_change_request_approval_response(
+ event_turn_id: String,
+ conversation_id: ThreadId,
+ item_id: String,
+ changes: Vec,
receiver: oneshot::Receiver,
- conversation: Arc