diff --git a/README.md b/README.md
index e3e05c6..906b065 100644
--- a/README.md
+++ b/README.md
@@ -46,6 +46,29 @@ else:
- **Tape-first memory**: Use anchor/handoff to bound context windows and replay full evidence.
- **Event streaming**: Subscribe to text deltas, tool calls, tool results, usage, and final state.
+## Provider Auth Resolver
+
+Republic can resolve provider keys dynamically via `api_key_resolver`.
+
+```python
+from republic import LLM, login_openai_codex_oauth, openai_codex_oauth_resolver
+
+# First-time login (paste redirect URL when prompted by your app/CLI wrapper).
+# You can wire `prompt_for_redirect` to your own input UI.
+login_openai_codex_oauth(
+ prompt_for_redirect=lambda authorize_url: input(f"Open this URL and paste callback URL:\n{authorize_url}\n> "),
+)
+
+llm = LLM(
+ model="openai:gpt-5.3-codex",
+ api_key_resolver=openai_codex_oauth_resolver(),
+)
+print(llm.chat("Say hello in one sentence."))
+```
+
+`openai_codex_oauth_resolver()` reads `~/.codex/auth.json` (or `$CODEX_HOME/auth.json`) and returns
+the current access token for `openai`, refreshing it automatically when it is near expiry.
+If you omit `prompt_for_redirect`, login will try to capture the callback from `redirect_uri` automatically.
## Development
```bash
diff --git a/examples/06_openai_codex_oauth.py b/examples/06_openai_codex_oauth.py
new file mode 100644
index 0000000..883464b
--- /dev/null
+++ b/examples/06_openai_codex_oauth.py
@@ -0,0 +1,63 @@
+from __future__ import annotations
+
+import argparse
+import os
+
+from republic import LLM, login_openai_codex_oauth, openai_codex_oauth_resolver
+
+
+def parse_args() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(
+ description="Authenticate with OpenAI Codex OAuth and run a simple Republic chat.",
+ )
+ parser.add_argument(
+ "--login-only",
+ action="store_true",
+ help="Run OAuth login and persist tokens without sending a chat request.",
+ )
+ parser.add_argument(
+ "--model",
+ default=os.getenv("REPUBLIC_CODEX_MODEL", "openai:gpt-5-codex"),
+ help="Model to use after login.",
+ )
+ parser.add_argument(
+ "--prompt",
+ default="Explain tape-first workflows in one sentence.",
+ help="Prompt to send after login.",
+ )
+ return parser.parse_args()
+
+
+def prompt_for_redirect(authorize_url: str) -> str:
+ print("Open this URL in your browser and complete the sign-in flow:\n")
+ print(authorize_url)
+ print("\nPaste the full callback URL (or the authorization code) here.")
+ return input("> ").strip()
+
+
+def main() -> None:
+ args = parse_args()
+
+ tokens = login_openai_codex_oauth(
+ prompt_for_redirect=None,
+ )
+ print("login: ok")
+ print("account_id:", tokens.account_id or "-")
+
+ if args.login_only:
+ return
+
+ llm = LLM(
+ model=args.model,
+ api_key_resolver=openai_codex_oauth_resolver(),
+ )
+ out = llm.chat(args.prompt)
+
+ if out.error:
+ print("error:", out.error.kind, out.error.message)
+ return
+ print("text:", out.value)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/pyproject.toml b/pyproject.toml
index 2a30267..7f37c9c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -23,6 +23,8 @@ keywords = [
requires-python = ">=3.11,<4.0"
dependencies = [
"any-llm-sdk>=1.7.0",
+ "authlib>=1.6.5",
+ "httpx>=0.28.1",
"pydantic>=2.7.0",
]
classifiers = [
diff --git a/src/republic/__init__.py b/src/republic/__init__.py
index 4174dd8..32bd9fd 100644
--- a/src/republic/__init__.py
+++ b/src/republic/__init__.py
@@ -1,5 +1,9 @@
"""Republic public API."""
+from republic.auth.openai_codex import (
+ login_openai_codex_oauth,
+ openai_codex_oauth_resolver,
+)
from republic.core.results import (
AsyncStreamEvents,
AsyncTextStream,
@@ -34,6 +38,8 @@
"ToolAutoResult",
"ToolContext",
"ToolSet",
+ "login_openai_codex_oauth",
+ "openai_codex_oauth_resolver",
"schema_from_model",
"tool",
"tool_from_model",
diff --git a/src/republic/auth/__init__.py b/src/republic/auth/__init__.py
new file mode 100644
index 0000000..5a3ff3d
--- /dev/null
+++ b/src/republic/auth/__init__.py
@@ -0,0 +1,3 @@
+"""Authentication helpers."""
+
+from republic.auth.openai_codex import * # noqa: F403
diff --git a/src/republic/auth/openai_codex.py b/src/republic/auth/openai_codex.py
new file mode 100644
index 0000000..1ead805
--- /dev/null
+++ b/src/republic/auth/openai_codex.py
@@ -0,0 +1,522 @@
+"""Authentication helper utilities."""
+
+from __future__ import annotations
+
+import json
+import os
+import secrets
+import threading
+import time
+import urllib.parse
+import webbrowser
+from base64 import urlsafe_b64decode, urlsafe_b64encode
+from collections.abc import Callable
+from contextlib import suppress
+from dataclasses import dataclass
+from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
+from pathlib import Path
+from typing import Any
+
+from authlib.integrations.httpx_client import OAuth2Client
+
+_CODEX_PROVIDERS = {"openai"}
+# Keep aligned with the official Codex client default (codex-rs core/src/auth.rs::CLIENT_ID).
+_DEFAULT_CODEX_OAUTH_CLIENT_ID = "app_EMoamEEZ73f0CkXaXp7hrann"
+_DEFAULT_CODEX_OAUTH_TOKEN_URL = "https://auth.openai.com/oauth/token" # noqa: S105
+_DEFAULT_CODEX_OAUTH_AUTHORIZE_URL = "https://auth.openai.com/oauth/authorize"
+_DEFAULT_CODEX_OAUTH_SCOPE = "openid profile email offline_access"
+_DEFAULT_CODEX_OAUTH_ORIGINATOR = "codex_cli_rs"
+
+
+class CodexOAuthResponseError(TypeError):
+ """Raised when Codex OAuth token response is malformed."""
+
+
+class CodexOAuthLoginError(RuntimeError):
+ """Raised when Codex OAuth login flow cannot complete."""
+
+
+class CodexOAuthStateMismatchError(CodexOAuthLoginError):
+ """Raised when OAuth state validation fails."""
+
+
+class CodexOAuthMissingCodeError(CodexOAuthLoginError):
+ """Raised when OAuth redirect does not include authorization code."""
+
+
+def _build_oauth_callback_error_message(*, redirect_uri: str, timeout_seconds: float) -> str:
+ return (
+ "Did not receive OAuth callback. "
+ f"redirect_uri={redirect_uri!r}, timeout_seconds={timeout_seconds}. "
+ "Possible causes: callback wait timed out, local callback port is unavailable, "
+ "or redirect_uri is not a loopback HTTP address. "
+ "Try increasing timeout_seconds or use prompt_for_redirect for manual paste."
+ )
+
+
+def codex_cli_api_key_resolver(codex_home: str | Path | None = None) -> Callable[[str], str | None]:
+ """Build a provider-scoped resolver that reads Codex CLI OAuth token.
+
+ The resolver only returns a token for provider `openai`.
+ It reads from `$CODEX_HOME/auth.json` (default `~/.codex/auth.json`).
+ """
+
+ auth_path = _resolve_codex_auth_path(codex_home)
+
+ def _resolver(provider: str) -> str | None:
+ if provider not in _CODEX_PROVIDERS:
+ return None
+ try:
+ payload = json.loads(auth_path.read_text(encoding="utf-8"))
+ except (OSError, json.JSONDecodeError):
+ return None
+ if not isinstance(payload, dict):
+ return None
+
+ tokens = payload.get("tokens")
+ if not isinstance(tokens, dict):
+ return None
+
+ access_token = tokens.get("access_token")
+ if not isinstance(access_token, str):
+ return None
+ token = access_token.strip()
+ return token or None
+
+ return _resolver
+
+
+@dataclass(frozen=True)
+class OpenAICodexOAuthTokens:
+ access_token: str
+ refresh_token: str
+ expires_at: int
+ account_id: str | None = None
+
+
+def _resolve_codex_auth_path(codex_home: str | Path | None = None) -> Path:
+ if codex_home is None:
+ codex_home = os.getenv("CODEX_HOME", "~/.codex")
+ return Path(codex_home).expanduser() / "auth.json"
+
+
+def _parse_tokens(payload: dict[str, Any]) -> OpenAICodexOAuthTokens | None:
+ tokens = payload.get("tokens")
+ if not isinstance(tokens, dict):
+ return None
+
+ access_token = tokens.get("access_token")
+ refresh_token = tokens.get("refresh_token")
+ if not isinstance(access_token, str) or not isinstance(refresh_token, str):
+ return None
+ access = access_token.strip()
+ refresh = refresh_token.strip()
+ if not access or not refresh:
+ return None
+
+ expires_raw = tokens.get("expires_at")
+ if isinstance(expires_raw, (int, float)):
+ expires_at = int(expires_raw)
+ else:
+ # Codex CLI file may not persist explicit expiry.
+ # Use last_refresh + 1h or "now + 1h" as best-effort fallback.
+ last_refresh_raw = payload.get("last_refresh")
+ last_refresh = int(last_refresh_raw) if isinstance(last_refresh_raw, (int, float)) else int(time.time())
+ expires_at = last_refresh + 3600
+
+ account_id = tokens.get("account_id")
+ if not isinstance(account_id, str):
+ account_id = None
+ return OpenAICodexOAuthTokens(
+ access_token=access,
+ refresh_token=refresh,
+ expires_at=expires_at,
+ account_id=account_id,
+ )
+
+
+def load_openai_codex_oauth_tokens(codex_home: str | Path | None = None) -> OpenAICodexOAuthTokens | None:
+ auth_path = _resolve_codex_auth_path(codex_home)
+ try:
+ payload = json.loads(auth_path.read_text(encoding="utf-8"))
+ except (OSError, json.JSONDecodeError):
+ return None
+ if not isinstance(payload, dict):
+ return None
+ return _parse_tokens(payload)
+
+
+def save_openai_codex_oauth_tokens(
+ tokens: OpenAICodexOAuthTokens,
+ codex_home: str | Path | None = None,
+) -> Path:
+ auth_path = _resolve_codex_auth_path(codex_home)
+ auth_path.parent.mkdir(parents=True, exist_ok=True)
+ payload: dict[str, Any]
+ try:
+ raw = json.loads(auth_path.read_text(encoding="utf-8"))
+ except (OSError, json.JSONDecodeError):
+ raw = {}
+ payload = raw if isinstance(raw, dict) else {}
+
+ tokens_node = payload.get("tokens")
+ if not isinstance(tokens_node, dict):
+ tokens_node = {}
+ tokens_node.update({
+ "access_token": tokens.access_token,
+ "refresh_token": tokens.refresh_token,
+ "expires_at": tokens.expires_at,
+ })
+ if tokens.account_id:
+ tokens_node["account_id"] = tokens.account_id
+ payload["tokens"] = tokens_node
+ payload["last_refresh"] = int(time.time())
+
+ auth_path.write_text(json.dumps(payload, ensure_ascii=True, indent=2) + "\n", encoding="utf-8")
+ with suppress(OSError):
+ os.chmod(auth_path, 0o600)
+ return auth_path
+
+
+def refresh_openai_codex_oauth_tokens(
+ refresh_token: str,
+ *,
+ timeout_seconds: float = 15.0,
+ client_id: str = _DEFAULT_CODEX_OAUTH_CLIENT_ID,
+ token_url: str = _DEFAULT_CODEX_OAUTH_TOKEN_URL,
+) -> OpenAICodexOAuthTokens:
+ with OAuth2Client(client_id=client_id, timeout=timeout_seconds, trust_env=False) as oauth:
+ payload = oauth.refresh_token(
+ url=token_url,
+ refresh_token=refresh_token,
+ )
+ return _tokens_from_token_payload(payload, account_id=None)
+
+
+def _build_pkce_pair() -> str:
+ verifier = urlsafe_b64encode(secrets.token_bytes(32)).decode("ascii").rstrip("=")
+ return verifier
+
+
+def _build_authorize_url(
+ *,
+ client_id: str,
+ redirect_uri: str,
+ code_challenge: str,
+ state: str,
+ authorize_url: str,
+ scope: str,
+ originator: str,
+) -> str:
+ with OAuth2Client(
+ client_id=client_id,
+ redirect_uri=redirect_uri,
+ scope=scope,
+ code_challenge_method="S256",
+ trust_env=False,
+ ) as oauth:
+ url, _ = oauth.create_authorization_url(
+ authorize_url,
+ state=state,
+ code_verifier=code_challenge,
+ id_token_add_organizations="true", # noqa: S106
+ codex_cli_simplified_flow="true",
+ originator=originator,
+ )
+ return str(url)
+
+
+def _extract_code_and_state(input_value: str) -> tuple[str | None, str | None]:
+ raw = input_value.strip()
+ if not raw:
+ return None, None
+
+ parsed = urllib.parse.urlsplit(raw)
+ query = urllib.parse.parse_qs(parsed.query)
+ code = query.get("code", [None])[0]
+ state = query.get("state", [None])[0]
+ if isinstance(code, str) or isinstance(state, str):
+ return code if isinstance(code, str) else None, state if isinstance(state, str) else None
+
+ if "code=" in raw:
+ parsed_qs = urllib.parse.parse_qs(raw)
+ code = parsed_qs.get("code", [None])[0]
+ state = parsed_qs.get("state", [None])[0]
+ return code if isinstance(code, str) else None, state if isinstance(state, str) else None
+
+ return raw, None
+
+
+def _is_loopback_redirect_uri(redirect_uri: str) -> bool:
+ parsed = urllib.parse.urlsplit(redirect_uri)
+ if parsed.scheme != "http":
+ return False
+ host = (parsed.hostname or "").strip().lower()
+ return host in {"127.0.0.1", "localhost"}
+
+
+def _wait_for_local_oauth_callback(
+ *,
+ redirect_uri: str,
+ timeout_seconds: float,
+) -> tuple[str | None, str | None] | None:
+ if not _is_loopback_redirect_uri(redirect_uri):
+ return None
+
+ parsed_redirect = urllib.parse.urlsplit(redirect_uri)
+ host = parsed_redirect.hostname or "localhost"
+ port = parsed_redirect.port
+ path = parsed_redirect.path or "/"
+ if port is None:
+ return None
+
+ lock = threading.Lock()
+ state: dict[str, str | None] = {"code": None, "state": None}
+ done = threading.Event()
+
+ class _Handler(BaseHTTPRequestHandler):
+ def log_message(self, format: str, *args: Any) -> None: # noqa: A002
+ return
+
+ def do_GET(self) -> None:
+ parsed = urllib.parse.urlsplit(self.path)
+ if parsed.path != path:
+ self.send_response(404)
+ self.end_headers()
+ return
+
+ query = urllib.parse.parse_qs(parsed.query)
+ code = query.get("code", [None])[0]
+ returned_state = query.get("state", [None])[0]
+ with lock:
+ state["code"] = code if isinstance(code, str) else None
+ state["state"] = returned_state if isinstance(returned_state, str) else None
+ done.set()
+
+ body = (
+ b"
Authentication successful. Return to your terminal.
"
+ )
+ self.send_response(200)
+ self.send_header("Content-Type", "text/html; charset=utf-8")
+ self.send_header("Content-Length", str(len(body)))
+ self.end_headers()
+ self.wfile.write(body)
+
+ try:
+ server = ThreadingHTTPServer((host, port), _Handler)
+ except OSError:
+ return None
+
+ server.timeout = 0.2
+ deadline = time.monotonic() + timeout_seconds
+ try:
+ while not done.is_set() and time.monotonic() < deadline:
+ server.handle_request()
+ finally:
+ server.server_close()
+
+ if not done.is_set():
+ return None
+
+ with lock:
+ return state["code"], state["state"]
+
+
+def extract_openai_codex_account_id(access_token: str) -> str | None:
+ parts = access_token.split(".")
+ if len(parts) != 3:
+ return None
+ payload_segment = parts[1]
+ padding = "=" * (-len(payload_segment) % 4)
+ try:
+ payload = json.loads(urlsafe_b64decode((payload_segment + padding).encode("ascii")).decode("utf-8"))
+ except Exception:
+ return None
+ if not isinstance(payload, dict):
+ return None
+ auth = payload.get("https://api.openai.com/auth")
+ if not isinstance(auth, dict):
+ return None
+ account_id = auth.get("chatgpt_account_id")
+ if not isinstance(account_id, str):
+ return None
+ normalized = account_id.strip()
+ return normalized or None
+
+
+def _exchange_openai_codex_authorization_code(
+ code: str,
+ *,
+ verifier: str,
+ redirect_uri: str,
+ timeout_seconds: float,
+ client_id: str,
+ token_url: str,
+) -> OpenAICodexOAuthTokens:
+ with OAuth2Client(
+ client_id=client_id,
+ redirect_uri=redirect_uri,
+ code_challenge_method="S256",
+ timeout=timeout_seconds,
+ trust_env=False,
+ ) as oauth:
+ payload = oauth.fetch_token(
+ url=token_url,
+ grant_type="authorization_code",
+ code=code,
+ code_verifier=verifier,
+ )
+ account_id = extract_openai_codex_account_id(str(payload.get("access_token", "")))
+ return _tokens_from_token_payload(payload, account_id=account_id)
+
+
+def login_openai_codex_oauth(
+ *,
+ codex_home: str | Path | None = None,
+ prompt_for_redirect: Callable[[str], str] | None = None,
+ open_browser: bool = True,
+ browser_opener: Callable[[str], Any] | None = None,
+ redirect_uri: str = "http://localhost:1455/auth/callback",
+ timeout_seconds: float = 300.0,
+ client_id: str = _DEFAULT_CODEX_OAUTH_CLIENT_ID,
+ authorize_url: str = _DEFAULT_CODEX_OAUTH_AUTHORIZE_URL,
+ token_url: str = _DEFAULT_CODEX_OAUTH_TOKEN_URL,
+ scope: str = _DEFAULT_CODEX_OAUTH_SCOPE,
+ originator: str = _DEFAULT_CODEX_OAUTH_ORIGINATOR,
+) -> OpenAICodexOAuthTokens:
+ """Run minimal OpenAI Codex OAuth login flow and persist tokens."""
+
+ verifier = _build_pkce_pair()
+ state = secrets.token_hex(16)
+ oauth_url = _build_authorize_url(
+ client_id=client_id,
+ redirect_uri=redirect_uri,
+ code_challenge=verifier,
+ state=state,
+ authorize_url=authorize_url,
+ scope=scope,
+ originator=originator,
+ )
+
+ if open_browser:
+ opener = browser_opener or webbrowser.open
+ opener(oauth_url)
+
+ if prompt_for_redirect is not None:
+ callback_input = prompt_for_redirect(oauth_url)
+ code, returned_state = _extract_code_and_state(callback_input)
+ else:
+ callback_values = _wait_for_local_oauth_callback(
+ redirect_uri=redirect_uri,
+ timeout_seconds=timeout_seconds,
+ )
+ if callback_values is None:
+ message = _build_oauth_callback_error_message(
+ redirect_uri=redirect_uri,
+ timeout_seconds=timeout_seconds,
+ )
+ raise CodexOAuthLoginError(message)
+ code, returned_state = callback_values
+
+ if returned_state and returned_state != state:
+ raise CodexOAuthStateMismatchError
+ if not isinstance(code, str) or not code.strip():
+ raise CodexOAuthMissingCodeError
+
+ tokens = _exchange_openai_codex_authorization_code(
+ code=code.strip(),
+ verifier=verifier,
+ redirect_uri=redirect_uri,
+ timeout_seconds=timeout_seconds,
+ client_id=client_id,
+ token_url=token_url,
+ )
+ save_openai_codex_oauth_tokens(tokens, codex_home)
+ return tokens
+
+
+def openai_codex_oauth_resolver(
+ codex_home: str | Path | None = None,
+ *,
+ refresh_skew_seconds: int = 120,
+ refresh_timeout_seconds: float = 15.0,
+ client_id: str = _DEFAULT_CODEX_OAUTH_CLIENT_ID,
+ token_url: str = _DEFAULT_CODEX_OAUTH_TOKEN_URL,
+ refresher: Callable[[str], OpenAICodexOAuthTokens] | None = None,
+) -> Callable[[str], str | None]:
+ """Build a resolver for OpenAI Codex OAuth tokens with auto-refresh."""
+
+ lock = threading.Lock()
+ if refresher is None:
+ refresher = lambda refresh_token: refresh_openai_codex_oauth_tokens(
+ refresh_token,
+ timeout_seconds=refresh_timeout_seconds,
+ client_id=client_id,
+ token_url=token_url,
+ )
+
+ def _resolver(provider: str) -> str | None:
+ if provider not in _CODEX_PROVIDERS:
+ return None
+ with lock:
+ tokens = load_openai_codex_oauth_tokens(codex_home)
+ if tokens is None:
+ return None
+ now = int(time.time())
+ if tokens.expires_at > now + refresh_skew_seconds:
+ return tokens.access_token
+
+ try:
+ refreshed = refresher(tokens.refresh_token)
+ except Exception:
+ # Keep serving current token if it has not expired yet.
+ if tokens.expires_at > now:
+ return tokens.access_token
+ return None
+
+ persisted = OpenAICodexOAuthTokens(
+ access_token=refreshed.access_token,
+ refresh_token=refreshed.refresh_token,
+ expires_at=refreshed.expires_at,
+ account_id=refreshed.account_id or tokens.account_id,
+ )
+ save_openai_codex_oauth_tokens(persisted, codex_home)
+ return persisted.access_token
+
+ return _resolver
+
+
+def _tokens_from_token_payload(
+ payload: dict[str, Any],
+ *,
+ account_id: str | None,
+) -> OpenAICodexOAuthTokens:
+ access_token = payload.get("access_token")
+ refresh_token = payload.get("refresh_token")
+ expires_in = payload.get("expires_in")
+ if not isinstance(access_token, str) or not isinstance(refresh_token, str):
+ raise CodexOAuthResponseError
+ if not isinstance(expires_in, (int, float)):
+ raise CodexOAuthResponseError
+ normalized_access = access_token.strip()
+ return OpenAICodexOAuthTokens(
+ access_token=normalized_access,
+ refresh_token=refresh_token.strip(),
+ expires_at=int(time.time() + float(expires_in)),
+ account_id=account_id or extract_openai_codex_account_id(normalized_access),
+ )
+
+
+__all__ = [
+ "CodexOAuthLoginError",
+ "CodexOAuthMissingCodeError",
+ "CodexOAuthStateMismatchError",
+ "OpenAICodexOAuthTokens",
+ "codex_cli_api_key_resolver",
+ "extract_openai_codex_account_id",
+ "load_openai_codex_oauth_tokens",
+ "login_openai_codex_oauth",
+ "openai_codex_oauth_resolver",
+ "refresh_openai_codex_oauth_tokens",
+ "save_openai_codex_oauth_tokens",
+]
diff --git a/src/republic/clients/openai_codex.py b/src/republic/clients/openai_codex.py
new file mode 100644
index 0000000..df9aac9
--- /dev/null
+++ b/src/republic/clients/openai_codex.py
@@ -0,0 +1,603 @@
+"""Minimal ChatGPT Codex backend for OAuth-backed OpenAI sessions."""
+
+from __future__ import annotations
+
+import asyncio
+import json
+from collections.abc import Iterable, Iterator
+from dataclasses import dataclass
+from types import SimpleNamespace
+from typing import Any
+
+import httpx
+
+from republic.auth.openai_codex import extract_openai_codex_account_id
+
+DEFAULT_CODEX_BASE_URL = "https://chatgpt.com/backend-api"
+
+
+class OpenAICodexTransportError(RuntimeError):
+ def __init__(self, status_code: int | None, message: str, body: str | None = None) -> None:
+ super().__init__(message)
+ self.status_code = status_code
+ self.body = body
+
+
+@dataclass(frozen=True)
+class OpenAICodexBackendConfig:
+ api_key: str
+ api_base: str | None = None
+ originator: str = "republic"
+ timeout_seconds: float = 60.0
+
+
+def should_use_openai_codex_backend(provider: str, api_key: str | None) -> bool:
+ if provider != "openai" or not api_key:
+ return False
+ return extract_openai_codex_account_id(api_key) is not None
+
+
+class OpenAICodexClient:
+ def __init__(self, config: OpenAICodexBackendConfig) -> None:
+ self._config = config
+ account_id = extract_openai_codex_account_id(config.api_key)
+ if account_id is None:
+ raise OpenAICodexTransportError(None, "OpenAI Codex OAuth token is missing chatgpt_account_id")
+ self._account_id = account_id
+
+ def completion(
+ self,
+ *,
+ model: str,
+ messages: list[dict[str, Any]],
+ tools: list[dict[str, Any]] | None = None,
+ stream: bool = False,
+ reasoning_effort: str | None = None,
+ max_completion_tokens: int | None = None,
+ max_tokens: int | None = None,
+ **_: Any,
+ ) -> Any:
+ payload = self._build_payload(
+ model=model,
+ messages=messages,
+ tools=tools,
+ reasoning_effort=reasoning_effort,
+ max_tokens=max_completion_tokens if max_completion_tokens is not None else max_tokens,
+ )
+ return self._perform_request(payload, stream=stream)
+
+ async def acompletion(self, **kwargs: Any) -> Any:
+ response = await asyncio.to_thread(self.completion, **kwargs)
+ if not kwargs.get("stream"):
+ return response
+
+ iterator = iter(response)
+ sentinel = object()
+
+ async def _iterator() -> Any:
+ while True:
+ chunk = await asyncio.to_thread(lambda: next(iterator, sentinel))
+ if chunk is sentinel:
+ return
+ yield chunk
+
+ return _iterator()
+
+ def embedding(self, **_: Any) -> Any:
+ raise OpenAICodexTransportError(None, "OpenAI Codex backend does not support embeddings")
+
+ async def aembedding(self, **_: Any) -> Any:
+ raise OpenAICodexTransportError(None, "OpenAI Codex backend does not support embeddings")
+
+ def _perform_request(self, payload: dict[str, Any], *, stream: bool) -> Any:
+ if stream:
+ return self._stream_request(payload)
+
+ try:
+ with httpx.Client(timeout=self._config.timeout_seconds, trust_env=False) as client:
+ response = client.post(
+ self._resolve_url(self._config.api_base),
+ headers=self._build_headers(),
+ json=payload,
+ )
+ status_code = response.status_code
+ body = self._read_response_text(response)
+ if status_code >= 400:
+ raise OpenAICodexTransportError(status_code, self._format_http_error(status_code, body), body)
+ parsed = self._parse_sse_raw(body)
+ if parsed is None:
+ raise self._sse_parse_error(status_code, response.headers.get("content-type", ""), body)
+ except httpx.HTTPError as exc:
+ raise OpenAICodexTransportError(None, str(exc)) from exc
+
+ return self._build_response(parsed)
+
+ def _stream_request(self, payload: dict[str, Any]) -> Iterator[Any]:
+ def _iterator() -> Iterator[Any]:
+ try:
+ with (
+ httpx.Client(timeout=self._config.timeout_seconds, trust_env=False) as client,
+ client.stream(
+ "POST",
+ self._resolve_url(self._config.api_base),
+ headers=self._build_headers(),
+ json=payload,
+ ) as response,
+ ):
+ status_code = response.status_code
+ if status_code >= 400:
+ body = self._read_response_text(response)
+ raise OpenAICodexTransportError(status_code, self._format_http_error(status_code, body), body)
+ yielded = False
+ for chunk in self._iter_stream_chunks_from_lines(response.iter_lines()):
+ yielded = True
+ yield chunk
+ if not yielded:
+ body = self._read_response_text(response)
+ parsed = self._parse_sse_raw(body)
+ if parsed is None:
+ raise self._sse_parse_error(status_code, response.headers.get("content-type", ""), body)
+ yield from parsed["chunks"]
+ except httpx.HTTPError as exc:
+ raise OpenAICodexTransportError(None, str(exc)) from exc
+
+ return _iterator()
+
+ def _build_payload(
+ self,
+ *,
+ model: str,
+ messages: list[dict[str, Any]],
+ tools: list[dict[str, Any]] | None,
+ reasoning_effort: str | None,
+ max_tokens: int | None,
+ ) -> dict[str, Any]:
+ instructions, input_items = self._convert_messages(messages)
+ payload: dict[str, Any] = {
+ "model": model,
+ "store": False,
+ "stream": True,
+ "instructions": instructions or "You are Codex.",
+ "input": input_items,
+ "include": ["reasoning.encrypted_content"],
+ "text": {"verbosity": "medium"},
+ }
+ responses_tools = self._convert_tools(tools)
+ if responses_tools:
+ payload["tools"] = responses_tools
+ payload["tool_choice"] = "auto"
+ payload["parallel_tool_calls"] = True
+ if reasoning_effort is not None:
+ payload["reasoning"] = {"effort": reasoning_effort, "summary": "auto"}
+ return payload
+
+ def _build_headers(self) -> dict[str, str]:
+ return {
+ "Authorization": f"Bearer {self._config.api_key}",
+ "chatgpt-account-id": self._account_id,
+ "OpenAI-Beta": "responses=experimental",
+ "originator": self._config.originator,
+ "accept": "text/event-stream",
+ "content-type": "application/json",
+ "user-agent": "republic-openai-codex/0",
+ }
+
+ @staticmethod
+ def _resolve_url(api_base: str | None) -> str:
+ raw = (api_base or DEFAULT_CODEX_BASE_URL).rstrip("/")
+ if raw.endswith("/codex/responses"):
+ return raw
+ if raw.endswith("/codex"):
+ return f"{raw}/responses"
+ return f"{raw}/codex/responses"
+
+ @staticmethod
+ def _convert_tools(tools: list[dict[str, Any]] | None) -> list[dict[str, Any]] | None:
+ if not tools:
+ return None
+ converted: list[dict[str, Any]] = []
+ for tool in tools:
+ if not isinstance(tool, dict):
+ continue
+ function = tool.get("function")
+ if isinstance(function, dict):
+ name = function.get("name")
+ parameters = function.get("parameters")
+ if isinstance(name, str) and isinstance(parameters, dict):
+ converted.append({
+ "type": "function",
+ "name": name,
+ "description": function.get("description", "") or "",
+ "parameters": parameters,
+ })
+ continue
+ if tool.get("type") == "function" and isinstance(tool.get("name"), str):
+ converted.append(dict(tool))
+ return converted or None
+
+ @staticmethod
+ def _stringify_content(content: Any) -> str:
+ if isinstance(content, str):
+ return content
+ if isinstance(content, list):
+ parts: list[str] = []
+ for item in content:
+ if isinstance(item, str):
+ parts.append(item)
+ elif isinstance(item, dict):
+ text = item.get("text") or item.get("content")
+ if isinstance(text, str):
+ parts.append(text)
+ return "\n".join(part for part in parts if part)
+ if isinstance(content, dict):
+ text = content.get("text") or content.get("content")
+ if isinstance(text, str):
+ return text
+ return str(content)
+
+ @staticmethod
+ def _stringify_arguments(value: Any) -> str | None:
+ if isinstance(value, str):
+ return value
+ if isinstance(value, (dict, list, int, float, bool)) or value is None:
+ try:
+ return json.dumps(value, ensure_ascii=False, separators=(",", ":"))
+ except TypeError:
+ return None
+ return None
+
+ @classmethod
+ def _extract_assistant_function_calls(cls, message: dict[str, Any]) -> list[dict[str, Any]]:
+ raw_calls = message.get("tool_calls")
+ if not isinstance(raw_calls, list):
+ return []
+
+ calls: list[dict[str, Any]] = []
+ for raw in raw_calls:
+ if not isinstance(raw, dict):
+ continue
+ function = raw.get("function")
+ if not isinstance(function, dict):
+ continue
+ name = function.get("name")
+ arguments = cls._stringify_arguments(function.get("arguments"))
+ if not isinstance(name, str) or not name or arguments is None:
+ continue
+
+ call: dict[str, Any] = {
+ "type": "function_call",
+ "name": name,
+ "arguments": arguments,
+ }
+ call_id = raw.get("id")
+ if isinstance(call_id, str) and call_id:
+ call["call_id"] = call_id
+ calls.append(call)
+ return calls
+
+ @classmethod
+ def _convert_messages(cls, messages: list[dict[str, Any]]) -> tuple[str | None, list[dict[str, Any]]]:
+ instructions: list[str] = []
+ items: list[dict[str, Any]] = []
+ for message in messages:
+ role = str(message.get("role", "user"))
+ content = cls._stringify_content(message.get("content", ""))
+ if role == "system":
+ if content:
+ instructions.append(content)
+ continue
+ if role == "assistant":
+ if content:
+ items.append({
+ "role": "assistant",
+ "content": [{"type": "output_text", "text": content}],
+ })
+ items.extend(cls._extract_assistant_function_calls(message))
+ continue
+ if role == "tool":
+ call_id = message.get("tool_call_id")
+ if isinstance(call_id, str) and call_id and content:
+ items.append({
+ "type": "function_call_output",
+ "call_id": call_id,
+ "output": content,
+ })
+ continue
+ if content:
+ items.append({
+ "role": role,
+ "content": [{"type": "input_text", "text": content}],
+ })
+ return ("\n\n".join(instructions) or None), items
+
+ @staticmethod
+ def _extract_fallback_text(item: Any) -> str | None:
+ if not isinstance(item, dict) or item.get("type") != "message":
+ return None
+ content = item.get("content")
+ if not isinstance(content, list):
+ return None
+ collected: list[str] = []
+ for entry in content:
+ if not isinstance(entry, dict):
+ continue
+ text = entry.get("text")
+ if isinstance(text, str) and text:
+ collected.append(text)
+ return "".join(collected) or None
+
+ @staticmethod
+ def _extract_tool_call(item: Any) -> dict[str, Any] | None:
+ if not isinstance(item, dict) or item.get("type") != "function_call":
+ return None
+ name = item.get("name")
+ arguments = item.get("arguments")
+ if not isinstance(name, str) or not isinstance(arguments, str):
+ return None
+ call: dict[str, Any] = {
+ "type": "function",
+ "function": {
+ "name": name,
+ "arguments": arguments,
+ },
+ }
+ call_id = item.get("call_id")
+ if isinstance(call_id, str) and call_id:
+ call["id"] = call_id
+ return call
+
+ @staticmethod
+ def _make_text_chunk(text: str) -> Any:
+ delta = SimpleNamespace(content=text, tool_calls=[])
+ choice = SimpleNamespace(delta=delta)
+ return SimpleNamespace(choices=[choice], usage=None)
+
+ @staticmethod
+ def _make_tool_chunk(tool_call: dict[str, Any]) -> Any:
+ function = tool_call.get("function", {})
+ delta_tool_call = SimpleNamespace(
+ id=tool_call.get("id"),
+ type=tool_call.get("type"),
+ function=SimpleNamespace(
+ name=function.get("name"),
+ arguments=function.get("arguments"),
+ ),
+ )
+ delta = SimpleNamespace(content="", tool_calls=[delta_tool_call])
+ choice = SimpleNamespace(delta=delta)
+ return SimpleNamespace(choices=[choice], usage=None)
+
+ @staticmethod
+ def _make_usage_chunk(usage: dict[str, Any]) -> Any:
+ delta = SimpleNamespace(content="", tool_calls=[])
+ choice = SimpleNamespace(delta=delta)
+ return SimpleNamespace(choices=[choice], usage=usage)
+
+ @classmethod
+ def _handle_stream_event(
+ cls,
+ event: dict[str, Any],
+ *,
+ parts: list[str],
+ tool_calls: list[dict[str, Any]],
+ chunks: list[Any],
+ fallback_text: str | None,
+ usage: dict[str, Any] | None,
+ ) -> tuple[str | None, dict[str, Any] | None]:
+ event_type = event.get("type")
+ if event_type == "response.output_text.delta":
+ delta = event.get("delta")
+ if isinstance(delta, str):
+ parts.append(delta)
+ chunks.append(cls._make_text_chunk(delta))
+ return fallback_text, usage
+ if event_type == "response.output_item.done":
+ item = event.get("item")
+ tool_call = cls._extract_tool_call(item)
+ if tool_call is not None:
+ tool_calls.append(tool_call)
+ chunks.append(cls._make_tool_chunk(tool_call))
+ return fallback_text, usage
+ return cls._extract_fallback_text(item) or fallback_text, usage
+ if event_type in {"response.completed", "response.done"}:
+ next_usage = cls._update_usage_from_event(event) or usage
+ if next_usage is not None:
+ chunks.append(cls._make_usage_chunk(next_usage))
+ return fallback_text, next_usage
+ return fallback_text, usage
+
+ @classmethod
+ def _iter_stream_chunks(cls, events: Any) -> Iterator[Any]:
+ for event in cls._iter_sse_events(events):
+ event_type = event.get("type")
+ if event_type == "response.output_text.delta":
+ delta = event.get("delta")
+ if isinstance(delta, str):
+ yield cls._make_text_chunk(delta)
+ continue
+ if event_type == "response.output_item.done":
+ tool_call = cls._extract_tool_call(event.get("item"))
+ if tool_call is not None:
+ yield cls._make_tool_chunk(tool_call)
+ continue
+ if event_type in {"response.completed", "response.done"}:
+ usage = cls._update_usage_from_event(event)
+ if usage is not None:
+ yield cls._make_usage_chunk(usage)
+
+ @classmethod
+ def _iter_stream_chunks_from_lines(cls, lines: Iterator[str]) -> Iterator[Any]:
+ pseudo_events = (SimpleNamespace(data=data) for data in cls._extract_sse_data_messages(lines))
+ yield from cls._iter_stream_chunks(pseudo_events)
+
+ @staticmethod
+ def _update_usage_from_event(event: dict[str, Any]) -> dict[str, Any] | None:
+ response = event.get("response")
+ if not isinstance(response, dict):
+ return None
+ raw_usage = response.get("usage")
+ if isinstance(raw_usage, dict):
+ return raw_usage
+ return None
+
+ @staticmethod
+ def _raise_event_error(event: dict[str, Any]) -> None:
+ event_type = event.get("type")
+ if event_type == "error":
+ message = event.get("message")
+ if isinstance(message, str) and message:
+ raise OpenAICodexTransportError(502, message)
+ return
+ if event_type != "response.failed":
+ return
+ response = event.get("response")
+ if not isinstance(response, dict):
+ return
+ error = response.get("error")
+ if not isinstance(error, dict):
+ return
+ message = error.get("message")
+ if isinstance(message, str) and message:
+ raise OpenAICodexTransportError(502, message)
+
+ @classmethod
+ def _iter_sse_events(cls, events: Any) -> Iterator[dict[str, Any]]:
+ for message in events:
+ data = getattr(message, "data", None)
+ if not isinstance(data, str):
+ continue
+ data = data.strip()
+ if not data or data == "[DONE]":
+ continue
+ try:
+ event = json.loads(data)
+ except json.JSONDecodeError:
+ continue
+ cls._raise_event_error(event)
+ if isinstance(event, dict):
+ yield event
+
+ @classmethod
+ def _parse_sse(cls, events: Any) -> dict[str, Any]:
+ parts: list[str] = []
+ usage: dict[str, Any] | None = None
+ fallback_text: str | None = None
+ tool_calls: list[dict[str, Any]] = []
+ chunks: list[Any] = []
+ for event in cls._iter_sse_events(events):
+ fallback_text, usage = cls._handle_stream_event(
+ event,
+ parts=parts,
+ tool_calls=tool_calls,
+ chunks=chunks,
+ fallback_text=fallback_text,
+ usage=usage,
+ )
+ return {
+ "text": "".join(parts) or fallback_text or "",
+ "usage": usage,
+ "tool_calls": tool_calls,
+ "chunks": chunks,
+ }
+
+ @staticmethod
+ def _build_response(parsed: dict[str, Any]) -> Any:
+ tool_calls_raw = parsed.get("tool_calls") or []
+ tool_calls = [
+ SimpleNamespace(
+ id=item.get("id"),
+ type=item.get("type"),
+ function=SimpleNamespace(
+ name=item.get("function", {}).get("name"),
+ arguments=item.get("function", {}).get("arguments"),
+ ),
+ )
+ for item in tool_calls_raw
+ if isinstance(item, dict)
+ ]
+ message = SimpleNamespace(content=parsed.get("text", ""), tool_calls=tool_calls)
+ choice = SimpleNamespace(message=message)
+ usage = parsed.get("usage")
+ return SimpleNamespace(choices=[choice], usage=usage)
+
+ @staticmethod
+ def _format_http_error(status_code: int, body: str) -> str:
+ try:
+ payload = json.loads(body)
+ except json.JSONDecodeError:
+ payload = None
+ if isinstance(payload, dict):
+ detail = payload.get("detail")
+ if isinstance(detail, str) and detail:
+ return f"Error code: {status_code} - {detail}"
+ error = payload.get("error")
+ if isinstance(error, dict):
+ message = error.get("message")
+ if isinstance(message, str) and message:
+ return f"Error code: {status_code} - {payload}"
+ return f"Error code: {status_code} - {body}"
+
+ @staticmethod
+ def _read_response_text(response: httpx.Response) -> str:
+ try:
+ raw = response.read()
+ except Exception:
+ return ""
+ encoding = response.encoding or "utf-8"
+ return raw.decode(encoding, errors="replace")
+
+ @staticmethod
+ def _sse_parse_error(status_code: int, content_type: str, body: str) -> OpenAICodexTransportError:
+ body_preview = body.strip()[:800]
+ if len(body.strip()) > 800:
+ body_preview += "...(truncated)"
+ message = (
+ "Failed to parse SSE response; "
+ f"status={status_code}; content-type={content_type!r}; "
+ f"body={body_preview or ''}"
+ )
+ return OpenAICodexTransportError(status_code, message, body or None)
+
+ @classmethod
+ def _parse_sse_raw(cls, body: str) -> dict[str, Any] | None:
+ if not cls._looks_like_sse_payload(body):
+ return None
+ messages = cls._extract_sse_data_messages(body.splitlines())
+ if not messages:
+ return None
+ pseudo_events = [SimpleNamespace(data=item) for item in messages]
+ return cls._parse_sse(pseudo_events)
+
+ @staticmethod
+ def _looks_like_sse_payload(body: str) -> bool:
+ stripped = body.lstrip()
+ return stripped.startswith("data:") or "\ndata:" in body or stripped.startswith("event:")
+
+ @staticmethod
+ def _extract_sse_data_messages(lines: Iterable[str]) -> list[str]:
+ messages: list[str] = []
+ current: list[str] = []
+ for raw_line in lines:
+ line = raw_line.rstrip("\r")
+ if line.startswith("data:"):
+ current.append(line[5:].lstrip())
+ continue
+ if not line:
+ if current:
+ messages.append("\n".join(current))
+ current = []
+ continue
+ if current and not line.startswith(("event:", "id:", "retry:")):
+ current.append(line)
+ if current:
+ messages.append("\n".join(current))
+ return messages
+
+
+__all__ = [
+ "DEFAULT_CODEX_BASE_URL",
+ "OpenAICodexBackendConfig",
+ "OpenAICodexClient",
+ "OpenAICodexTransportError",
+ "should_use_openai_codex_backend",
+]
diff --git a/src/republic/core/execution.py b/src/republic/core/execution.py
index ccc3a73..dcd4fe6 100644
--- a/src/republic/core/execution.py
+++ b/src/republic/core/execution.py
@@ -26,6 +26,11 @@
UnsupportedProviderError,
)
+from republic.clients.openai_codex import (
+ OpenAICodexBackendConfig,
+ OpenAICodexClient,
+ should_use_openai_codex_backend,
+)
from republic.core.errors import ErrorKind, RepublicError
logger = logging.getLogger(__name__)
@@ -59,6 +64,7 @@ def __init__(
fallback_models: list[str],
max_retries: int,
api_key: str | dict[str, str] | None,
+ api_key_resolver: Callable[[str], str | None] | None,
api_base: str | dict[str, str] | None,
client_args: dict[str, Any],
use_responses: bool,
@@ -70,12 +76,13 @@ def __init__(
self._fallback_models = fallback_models
self._max_retries = max_retries
self._api_key = api_key
+ self._api_key_resolver = api_key_resolver
self._api_base = api_base
self._client_args = client_args
self._use_responses = use_responses
self._verbose = verbose
self._error_classifier = error_classifier
- self._client_cache: dict[str, AnyLLM] = {}
+ self._client_cache: dict[str, Any] = {}
@property
def provider(self) -> str:
@@ -143,8 +150,17 @@ def iter_clients(self, override_model: str | None, override_provider: str | None
def _resolve_api_key(self, provider: str) -> str | None:
if isinstance(self._api_key, dict):
- return self._api_key.get(provider)
- return self._api_key
+ key = self._api_key.get(provider)
+ if key is not None:
+ return key
+ if self._api_key_resolver is not None:
+ return self._api_key_resolver(provider)
+ return None
+ if self._api_key is not None:
+ return self._api_key
+ if self._api_key_resolver is not None:
+ return self._api_key_resolver(provider)
+ return None
def _resolve_api_base(self, provider: str) -> str | None:
if isinstance(self._api_base, dict):
@@ -169,17 +185,25 @@ def _freeze(value: Any) -> Any:
}
return json.dumps(payload, sort_keys=True, separators=(",", ":"))
- def get_client(self, provider: str) -> AnyLLM:
+ def get_client(self, provider: str) -> Any:
api_key = self._resolve_api_key(provider)
api_base = self._resolve_api_base(provider)
cache_key = self._freeze_cache_key(provider, api_key, api_base)
if cache_key not in self._client_cache:
- self._client_cache[cache_key] = AnyLLM.create(
- provider,
- api_key=api_key,
- api_base=api_base,
- **self._client_args,
- )
+ if should_use_openai_codex_backend(provider, api_key):
+ self._client_cache[cache_key] = OpenAICodexClient(
+ OpenAICodexBackendConfig(
+ api_key=api_key or "",
+ api_base=api_base,
+ )
+ )
+ else:
+ self._client_cache[cache_key] = AnyLLM.create(
+ provider,
+ api_key=api_key,
+ api_base=api_base,
+ **self._client_args,
+ )
return self._client_cache[cache_key]
def log_error(self, error: RepublicError, provider: str, model: str, attempt: int) -> None:
diff --git a/src/republic/llm.py b/src/republic/llm.py
index 445f27b..422925a 100644
--- a/src/republic/llm.py
+++ b/src/republic/llm.py
@@ -46,6 +46,7 @@ def __init__(
fallback_models: list[str] | None = None,
max_retries: int = 3,
api_key: str | dict[str, str] | None = None,
+ api_key_resolver: Callable[[str], str | None] | None = None,
api_base: str | dict[str, str] | None = None,
client_args: dict[str, Any] | None = None,
use_responses: bool = False,
@@ -71,6 +72,7 @@ def __init__(
fallback_models=fallback_models or [],
max_retries=max_retries,
api_key=api_key,
+ api_key_resolver=api_key_resolver,
api_base=api_base,
client_args=client_args or {},
use_responses=use_responses,
diff --git a/tests/test_auth_resolver.py b/tests/test_auth_resolver.py
new file mode 100644
index 0000000..8c3f74d
--- /dev/null
+++ b/tests/test_auth_resolver.py
@@ -0,0 +1,306 @@
+from __future__ import annotations
+
+import pytest
+
+import republic.core.execution as execution
+from republic import (
+ LLM,
+ login_openai_codex_oauth,
+ openai_codex_oauth_resolver,
+)
+from republic.auth.openai_codex import (
+ CodexOAuthLoginError,
+ CodexOAuthMissingCodeError,
+ CodexOAuthStateMismatchError,
+ OpenAICodexOAuthTokens,
+ codex_cli_api_key_resolver,
+ save_openai_codex_oauth_tokens,
+)
+
+from .fakes import FakeAnyLLMFactory, make_response
+
+
+def _setup_anyllm_create(monkeypatch) -> tuple[FakeAnyLLMFactory, list[tuple[str, dict[str, object]]]]:
+ created: list[tuple[str, dict[str, object]]] = []
+ factory = FakeAnyLLMFactory()
+
+ def _create(provider: str, **kwargs: object):
+ created.append((provider, dict(kwargs)))
+ return factory.create(provider, **kwargs)
+
+ monkeypatch.setattr(execution.AnyLLM, "create", _create)
+ return factory, created
+
+
+def _set_fixed_oauth_state(monkeypatch) -> None:
+ monkeypatch.setattr("republic.auth.openai_codex.secrets.token_hex", lambda _: "state-fixed")
+
+
+def _oauth_redirect_url(*, state: str, code: str | None = None) -> str:
+ base = "http://127.0.0.1:1455/auth/callback"
+ if code is None:
+ return f"{base}?state={state}"
+ return f"{base}?code={code}&state={state}"
+
+
+def _save_oauth_tokens(
+ tmp_path,
+ *,
+ access_token: str,
+ refresh_token: str,
+ expires_at: int,
+ account_id: str | None = None,
+) -> None:
+ save_openai_codex_oauth_tokens(
+ OpenAICodexOAuthTokens(
+ access_token=access_token,
+ refresh_token=refresh_token,
+ expires_at=expires_at,
+ account_id=account_id,
+ ),
+ tmp_path,
+ )
+
+
+def test_llm_uses_api_key_resolver_when_api_key_is_missing(monkeypatch) -> None:
+ factory, created = _setup_anyllm_create(monkeypatch)
+
+ client = factory.ensure("openai")
+ client.queue_completion(make_response(text="ok"))
+
+ llm = LLM(
+ model="openai:gpt-5.3-codex",
+ api_key_resolver=lambda provider: "oauth-token" if provider == "openai" else None,
+ )
+ assert llm.chat("hello") == "ok"
+ assert created[0][0] == "openai"
+ assert created[0][1]["api_key"] == "oauth-token"
+
+
+def test_explicit_api_key_has_priority_over_resolver(monkeypatch) -> None:
+ factory, created = _setup_anyllm_create(monkeypatch)
+
+ client = factory.ensure("openai")
+ client.queue_completion(make_response(text="ok"))
+
+ llm = LLM(
+ model="openai:gpt-5.3-codex",
+ api_key={"openai": "explicit-key"},
+ api_key_resolver=lambda _: "oauth-token",
+ )
+ assert llm.chat("hello") == "ok"
+ assert created[0][1]["api_key"] == "explicit-key"
+
+
+def test_provider_map_falls_back_to_resolver_for_missing_provider(monkeypatch) -> None:
+ factory, created = _setup_anyllm_create(monkeypatch)
+
+ client = factory.ensure("openai")
+ client.queue_completion(make_response(text="ok"))
+
+ llm = LLM(
+ model="openai:gpt-5.3-codex",
+ api_key={"anthropic": "anthropic-key"},
+ api_key_resolver=lambda provider: "oauth-token" if provider == "openai" else None,
+ )
+ assert llm.chat("hello") == "ok"
+ assert created[0][1]["api_key"] == "oauth-token"
+
+
+def test_codex_cli_api_key_resolver_reads_access_token(tmp_path) -> None:
+ auth_path = tmp_path / "auth.json"
+ auth_path.write_text('{"tokens": {"access_token": " token-123 "}}', encoding="utf-8")
+
+ resolver = codex_cli_api_key_resolver(tmp_path)
+ assert resolver("openai") == "token-123"
+ assert resolver("openai-codex") is None
+ assert resolver("anthropic") is None
+
+
+def test_openai_codex_oauth_resolver_refreshes_expiring_token(tmp_path) -> None:
+ _save_oauth_tokens(
+ tmp_path,
+ access_token="old-token", # noqa: S106
+ refresh_token="refresh-1", # noqa: S106
+ expires_at=1,
+ account_id="acct-1",
+ )
+
+ calls: list[str] = []
+
+ def _refresher(refresh_token: str) -> OpenAICodexOAuthTokens:
+ calls.append(refresh_token)
+ return OpenAICodexOAuthTokens(
+ access_token="new-token", # noqa: S106
+ refresh_token="refresh-2", # noqa: S106
+ expires_at=4_102_444_800, # 2100-01-01
+ )
+
+ resolver = openai_codex_oauth_resolver(tmp_path, refresher=_refresher)
+ assert resolver("openai") == "new-token"
+ assert calls == ["refresh-1"]
+
+ # Should persist refreshed token and avoid another refresh.
+ assert resolver("openai") == "new-token"
+ assert calls == ["refresh-1"]
+
+
+def test_openai_codex_oauth_resolver_returns_none_when_expired_and_refresh_fails(tmp_path) -> None:
+ _save_oauth_tokens(
+ tmp_path,
+ access_token="old-token", # noqa: S106
+ refresh_token="refresh-1", # noqa: S106
+ expires_at=1,
+ )
+
+ resolver = openai_codex_oauth_resolver(
+ tmp_path,
+ refresher=lambda _: (_ for _ in ()).throw(RuntimeError("refresh failed")),
+ )
+ assert resolver("openai") is None
+
+
+def test_openai_codex_oauth_resolver_uses_current_token_if_refresh_fails_but_not_expired(tmp_path) -> None:
+ _save_oauth_tokens(
+ tmp_path,
+ access_token="still-valid", # noqa: S106
+ refresh_token="refresh-1", # noqa: S106
+ expires_at=4_102_444_800,
+ )
+
+ resolver = openai_codex_oauth_resolver(
+ tmp_path,
+ refresh_skew_seconds=4_102_444_799,
+ refresher=lambda _: (_ for _ in ()).throw(RuntimeError("refresh failed")),
+ )
+ assert resolver("openai") == "still-valid"
+
+
+def test_login_openai_codex_oauth_success_persists_tokens(monkeypatch, tmp_path) -> None:
+ exchange_calls: list[tuple[str, str, str, float, str, str]] = []
+
+ def _exchange(
+ code: str,
+ *,
+ verifier: str,
+ redirect_uri: str,
+ timeout_seconds: float,
+ client_id: str,
+ token_url: str,
+ ) -> OpenAICodexOAuthTokens:
+ exchange_calls.append((code, verifier, redirect_uri, timeout_seconds, client_id, token_url))
+ return OpenAICodexOAuthTokens(
+ access_token="access-token", # noqa: S106
+ refresh_token="refresh-token", # noqa: S106
+ expires_at=4_102_444_800,
+ account_id="acct-1",
+ )
+
+ monkeypatch.setattr("republic.auth.openai_codex._exchange_openai_codex_authorization_code", _exchange)
+ _set_fixed_oauth_state(monkeypatch)
+
+ opened: list[str] = []
+
+ def _open(url: str):
+ opened.append(url)
+ return True
+
+ def _prompt(url: str) -> str:
+ assert "state=state-fixed" in url
+ return _oauth_redirect_url(state="state-fixed", code="auth-code")
+
+ tokens = login_openai_codex_oauth(
+ codex_home=tmp_path,
+ prompt_for_redirect=_prompt,
+ browser_opener=_open,
+ )
+
+ expected_access_token = "access-token" # noqa: S105
+ assert tokens.access_token == expected_access_token
+ assert len(exchange_calls) == 1
+ assert exchange_calls[0][0] == "auth-code"
+ assert opened
+
+ resolver = codex_cli_api_key_resolver(tmp_path)
+ assert resolver("openai") == expected_access_token
+
+
+def test_login_openai_codex_oauth_raises_on_state_mismatch(monkeypatch, tmp_path) -> None:
+ monkeypatch.setattr(
+ "republic.auth.openai_codex._exchange_openai_codex_authorization_code",
+ lambda *args, **kwargs: (_ for _ in ()).throw(AssertionError),
+ )
+ _set_fixed_oauth_state(monkeypatch)
+
+ def _prompt(_: str) -> str:
+ return _oauth_redirect_url(state="wrong", code="auth-code")
+
+ with pytest.raises(CodexOAuthStateMismatchError):
+ login_openai_codex_oauth(
+ codex_home=tmp_path,
+ prompt_for_redirect=_prompt,
+ open_browser=False,
+ )
+
+
+def test_login_openai_codex_oauth_raises_on_missing_code(monkeypatch, tmp_path) -> None:
+ monkeypatch.setattr(
+ "republic.auth.openai_codex._exchange_openai_codex_authorization_code",
+ lambda *args, **kwargs: (_ for _ in ()).throw(AssertionError),
+ )
+ _set_fixed_oauth_state(monkeypatch)
+
+ def _prompt(_: str) -> str:
+ return _oauth_redirect_url(state="state-fixed")
+
+ with pytest.raises(CodexOAuthMissingCodeError):
+ login_openai_codex_oauth(
+ codex_home=tmp_path,
+ prompt_for_redirect=_prompt,
+ open_browser=False,
+ )
+
+
+def test_login_openai_codex_oauth_uses_local_callback_without_prompt(monkeypatch, tmp_path) -> None:
+ _set_fixed_oauth_state(monkeypatch)
+ monkeypatch.setattr(
+ "republic.auth.openai_codex._wait_for_local_oauth_callback",
+ lambda **_: ("auth-code", "state-fixed"),
+ )
+
+ def _exchange(
+ code: str,
+ *,
+ verifier: str,
+ redirect_uri: str,
+ timeout_seconds: float,
+ client_id: str,
+ token_url: str,
+ ) -> OpenAICodexOAuthTokens:
+ assert code == "auth-code"
+ return OpenAICodexOAuthTokens(
+ access_token="access-token", # noqa: S106
+ refresh_token="refresh-token", # noqa: S106
+ expires_at=4_102_444_800,
+ )
+
+ monkeypatch.setattr("republic.auth.openai_codex._exchange_openai_codex_authorization_code", _exchange)
+
+ tokens = login_openai_codex_oauth(
+ codex_home=tmp_path,
+ prompt_for_redirect=None,
+ open_browser=False,
+ )
+ assert tokens.access_token == "access-token" # noqa: S105
+
+
+def test_login_openai_codex_oauth_raises_without_prompt_and_without_callback(monkeypatch, tmp_path) -> None:
+ _set_fixed_oauth_state(monkeypatch)
+ monkeypatch.setattr("republic.auth.openai_codex._wait_for_local_oauth_callback", lambda **_: None)
+
+ with pytest.raises(CodexOAuthLoginError, match="Did not receive OAuth callback"):
+ login_openai_codex_oauth(
+ codex_home=tmp_path,
+ prompt_for_redirect=None,
+ open_browser=False,
+ )
diff --git a/tests/test_openai_codex_transport.py b/tests/test_openai_codex_transport.py
new file mode 100644
index 0000000..81a053c
--- /dev/null
+++ b/tests/test_openai_codex_transport.py
@@ -0,0 +1,345 @@
+from __future__ import annotations
+
+import base64
+import json
+from types import SimpleNamespace
+from typing import Any, cast
+
+import pytest
+
+import republic.core.execution as execution
+from republic import LLM, tool
+from republic.auth.openai_codex import extract_openai_codex_account_id
+
+JWT_PAYLOAD = json.dumps({"https://api.openai.com/auth": {"chatgpt_account_id": "acct-test"}}).encode("utf-8")
+TOKEN = "aaa." + base64.urlsafe_b64encode(JWT_PAYLOAD).decode("ascii").rstrip("=") + ".bbb"
+
+
+class FakeHTTPBodyResponse:
+ def __init__(self, *, status_code: int = 200, body: str = "", content_type: str = "text/event-stream") -> None:
+ self.status_code = status_code
+ self._body = body
+ self.headers = {"content-type": content_type}
+ self.encoding = "utf-8"
+ self.text = body
+
+ def read(self) -> bytes:
+ return self._body.encode("utf-8")
+
+
+class FakeHTTPStreamResponse(FakeHTTPBodyResponse):
+ def __enter__(self) -> FakeHTTPStreamResponse:
+ return self
+
+ def __exit__(self, exc_type, exc, tb) -> None:
+ return None
+
+ def iter_lines(self):
+ yield from self._body.split("\n")
+
+
+class FakeHTTPClient:
+ def __init__(
+ self,
+ *,
+ sse: str,
+ captured: dict[str, Any] | None,
+ status_code: int = 200,
+ content_type: str = "text/event-stream",
+ ) -> None:
+ self._sse = sse
+ self._captured = captured
+ self._status_code = status_code
+ self._content_type = content_type
+
+ def __enter__(self) -> FakeHTTPClient:
+ return self
+
+ def __exit__(self, exc_type, exc, tb) -> None:
+ return None
+
+ def post(self, url, *, headers=None, json=None):
+ if self._captured is not None:
+ self._captured["method"] = "POST"
+ self._captured["url"] = url
+ self._captured["headers"] = headers or {}
+ self._captured["body"] = json
+ return FakeHTTPBodyResponse(status_code=self._status_code, body=self._sse, content_type=self._content_type)
+
+ def stream(self, method, url, *, headers=None, json=None):
+ if self._captured is not None:
+ self._captured["method"] = method
+ self._captured["url"] = url
+ self._captured["headers"] = headers or {}
+ self._captured["body"] = json
+ return FakeHTTPStreamResponse(status_code=self._status_code, body=self._sse, content_type=self._content_type)
+
+
+def _patch_codex_stream(
+ monkeypatch,
+ *,
+ sse: str,
+ captured: dict[str, Any] | None = None,
+ status_code: int = 200,
+ content_type: str = "text/event-stream",
+) -> None:
+ monkeypatch.setattr(
+ "republic.clients.openai_codex.httpx.Client",
+ lambda *args, **kwargs: FakeHTTPClient(
+ sse=sse,
+ captured=captured,
+ status_code=status_code,
+ content_type=content_type,
+ ),
+ )
+
+
+def _unexpected_create(*args, **kwargs):
+ raise AssertionError
+
+
+def _sse(*events: dict[str, object]) -> str:
+ return "\n\n".join("data: " + json.dumps(event) for event in events) + "\n\n"
+
+
+def _build_codex_oauth_llm(
+ monkeypatch,
+ *,
+ sse: str,
+ model: str = "openai:gpt-5-codex",
+ capture_request: bool = False,
+) -> tuple[LLM, dict[str, Any] | None]:
+ monkeypatch.setattr(execution.AnyLLM, "create", _unexpected_create)
+ captured: dict[str, Any] | None = {} if capture_request else None
+ _patch_codex_stream(monkeypatch, sse=sse, captured=captured)
+ llm = LLM(
+ model=model,
+ api_key_resolver=lambda provider: TOKEN if provider == "openai" else None,
+ )
+ return llm, captured
+
+
+def test_extract_openai_codex_account_id_reads_jwt_claim() -> None:
+ assert extract_openai_codex_account_id(TOKEN) == "acct-test"
+ assert extract_openai_codex_account_id("sk-test") is None
+
+
+def test_openai_oauth_token_uses_codex_backend(monkeypatch) -> None:
+ llm, captured = _build_codex_oauth_llm(
+ monkeypatch,
+ sse=_sse(
+ {"type": "response.output_text.delta", "delta": "hello"},
+ {"type": "response.output_text.delta", "delta": " world"},
+ {
+ "type": "response.completed",
+ "response": {"usage": {"input_tokens": 1, "output_tokens": 2, "total_tokens": 3}},
+ },
+ ),
+ model="openai:gpt-5.3-codex",
+ capture_request=True,
+ )
+
+ assert llm.chat("Say hello") == "hello world"
+ assert isinstance(captured, dict)
+ assert captured["url"] == "https://chatgpt.com/backend-api/codex/responses"
+ raw_headers = cast(dict[str, Any], captured["headers"])
+ headers = {str(k).lower(): str(v) for k, v in raw_headers.items()}
+ assert headers["authorization"] == f"Bearer {TOKEN}"
+ assert headers["chatgpt-account-id"] == "acct-test"
+ assert headers["openai-beta"] == "responses=experimental"
+ body = cast(dict[str, Any], captured["body"])
+ assert body["model"] == "gpt-5.3-codex"
+ assert body["stream"] is True
+ assert body["input"][0]["role"] == "user"
+
+
+def test_openai_oauth_tool_calls_are_parsed_and_tools_are_converted(monkeypatch) -> None:
+ sse = _sse(
+ {
+ "type": "response.output_item.done",
+ "item": {
+ "type": "function_call",
+ "id": "fc_1",
+ "call_id": "call_1",
+ "name": "echo",
+ "arguments": json.dumps({"message": "hello"}),
+ "status": "completed",
+ },
+ },
+ {"type": "response.completed", "response": {"status": "completed"}},
+ )
+
+ @tool
+ def echo(message: str) -> str:
+ return message
+
+ llm, captured = _build_codex_oauth_llm(monkeypatch, sse=sse, capture_request=True)
+
+ calls = llm.tool_calls("Use echo", tools=[echo])
+ assert calls == [
+ {
+ "id": "call_1",
+ "type": "function",
+ "function": {
+ "name": "echo",
+ "arguments": json.dumps({"message": "hello"}),
+ },
+ }
+ ]
+ assert isinstance(captured, dict)
+ body = cast(dict[str, Any], captured["body"])
+ assert body["tools"][0]["type"] == "function"
+ assert body["tools"][0]["name"] == "echo"
+ assert body["tools"][0]["description"] == ""
+ assert body["tools"][0]["parameters"]["type"] == "object"
+ assert body["tools"][0]["parameters"]["properties"]["message"]["type"] == "string"
+ assert body["tools"][0]["parameters"]["required"] == ["message"]
+
+
+def test_openai_oauth_run_tools_executes_tool(monkeypatch) -> None:
+ sse = _sse(
+ {
+ "type": "response.output_item.done",
+ "item": {
+ "type": "function_call",
+ "id": "fc_1",
+ "call_id": "call_1",
+ "name": "echo",
+ "arguments": json.dumps({"message": "hello"}),
+ "status": "completed",
+ },
+ },
+ {"type": "response.completed", "response": {"status": "completed"}},
+ )
+
+ @tool
+ def echo(message: str) -> str:
+ return f"echo:{message}"
+
+ llm, _ = _build_codex_oauth_llm(monkeypatch, sse=sse)
+
+ result = llm.run_tools("Use echo", tools=[echo])
+ assert result.kind == "tools"
+ assert result.tool_calls[0]["function"]["name"] == "echo"
+ assert result.tool_results == ["echo:hello"]
+
+
+def test_regular_openai_key_still_uses_anyllm(monkeypatch) -> None:
+ created: list[tuple[str, dict[str, object]]] = []
+
+ class StubClient:
+ def completion(self, **kwargs):
+ return SimpleNamespace(
+ choices=[SimpleNamespace(message=SimpleNamespace(content="ok", tool_calls=[]))], usage=None
+ )
+
+ async def acompletion(self, **kwargs):
+ return self.completion(**kwargs)
+
+ def _create(provider: str, **kwargs: object):
+ created.append((provider, dict(kwargs)))
+ return StubClient()
+
+ monkeypatch.setattr(execution.AnyLLM, "create", _create)
+
+ llm = LLM(
+ model="openai:gpt-4o-mini",
+ api_key="sk-test",
+ )
+
+ assert llm.chat("Say hello") == "ok"
+ assert created[0][0] == "openai"
+ assert created[0][1]["api_key"] == "sk-test"
+
+
+def test_openai_oauth_stream_yields_text_and_usage(monkeypatch) -> None:
+ llm, _ = _build_codex_oauth_llm(
+ monkeypatch,
+ sse=_sse(
+ {"type": "response.output_text.delta", "delta": "Checking "},
+ {"type": "response.output_text.delta", "delta": "tools"},
+ {
+ "type": "response.completed",
+ "response": {"usage": {"input_tokens": 3, "output_tokens": 2, "total_tokens": 5}},
+ },
+ ),
+ )
+
+ stream = llm.stream("Check tools")
+ assert list(stream) == ["Checking ", "tools"]
+ assert stream.error is None
+ assert stream.usage == {"input_tokens": 3, "output_tokens": 2, "total_tokens": 5}
+
+
+def test_openai_oauth_stream_events_carries_tools_usage_and_final(monkeypatch) -> None:
+ sse = _sse(
+ {"type": "response.output_text.delta", "delta": "Checking "},
+ {
+ "type": "response.output_item.done",
+ "item": {
+ "type": "function_call",
+ "id": "fc_1",
+ "call_id": "call_1",
+ "name": "echo",
+ "arguments": json.dumps({"message": "tokyo"}),
+ "status": "completed",
+ },
+ },
+ {
+ "type": "response.completed",
+ "response": {"usage": {"input_tokens": 5, "output_tokens": 3, "total_tokens": 8}},
+ },
+ )
+
+ @tool
+ def echo(message: str) -> str:
+ return message.upper()
+
+ llm, _ = _build_codex_oauth_llm(monkeypatch, sse=sse)
+
+ stream = llm.stream_events("Call echo for tokyo", tools=[echo])
+ events = list(stream)
+ kinds = [event.kind for event in events]
+
+ assert "text" in kinds
+ assert "tool_call" in kinds
+ assert "tool_result" in kinds
+ assert "usage" in kinds
+ assert kinds[-1] == "final"
+
+ tool_result = next(event for event in events if event.kind == "tool_result")
+ assert tool_result.data["result"] == "TOKYO"
+ assert stream.error is None
+ assert stream.usage == {"input_tokens": 5, "output_tokens": 3, "total_tokens": 8}
+
+
+@pytest.mark.asyncio
+async def test_openai_oauth_stream_events_async_executes_tool_handler(monkeypatch) -> None:
+ sse = _sse(
+ {
+ "type": "response.output_item.done",
+ "item": {
+ "type": "function_call",
+ "id": "fc_1",
+ "call_id": "call_1",
+ "name": "echo",
+ "arguments": json.dumps({"message": "tokyo"}),
+ "status": "completed",
+ },
+ },
+ {"type": "response.completed", "response": {"status": "completed"}},
+ )
+
+ @tool
+ async def echo(message: str) -> str:
+ return message.upper()
+
+ llm, _ = _build_codex_oauth_llm(monkeypatch, sse=sse)
+
+ stream = await llm.stream_events_async("Call echo for tokyo", tools=[echo])
+ events = [event async for event in stream]
+ tool_results = [event for event in events if event.kind == "tool_result"]
+
+ assert len(tool_results) == 1
+ assert tool_results[0].data["result"] == "TOKYO"
+ assert stream.error is None
diff --git a/uv.lock b/uv.lock
index caeb112..1d0d8eb 100644
--- a/uv.lock
+++ b/uv.lock
@@ -39,6 +39,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
]
+[[package]]
+name = "authlib"
+version = "1.6.9"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cryptography" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/af/98/00d3dd826d46959ad8e32af2dbb2398868fd9fd0683c26e56d0789bd0e68/authlib-1.6.9.tar.gz", hash = "sha256:d8f2421e7e5980cc1ddb4e32d3f5fa659cfaf60d8eaf3281ebed192e4ab74f04", size = 165134, upload-time = "2026-03-02T07:44:01.998Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/53/23/b65f568ed0c22f1efacb744d2db1a33c8068f384b8c9b482b52ebdbc3ef6/authlib-1.6.9-py2.py3-none-any.whl", hash = "sha256:f08b4c14e08f0861dc18a32357b33fbcfd2ea86cfe3fe149484b4d764c4a0ac3", size = 244197, upload-time = "2026-03-02T07:44:00.307Z" },
+]
+
[[package]]
name = "cachetools"
version = "7.0.0"
@@ -57,6 +69,76 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" },
]
+[[package]]
+name = "cffi"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pycparser", marker = "implementation_name != 'PyPy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" },
+ { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" },
+ { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" },
+ { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" },
+ { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" },
+ { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" },
+ { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" },
+ { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" },
+ { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" },
+ { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },
+ { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },
+ { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },
+ { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" },
+ { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" },
+ { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" },
+ { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },
+ { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },
+ { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" },
+ { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },
+ { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },
+ { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },
+]
+
[[package]]
name = "chardet"
version = "5.2.0"
@@ -87,6 +169,65 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
]
+[[package]]
+name = "cryptography"
+version = "46.0.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" },
+ { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" },
+ { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" },
+ { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" },
+ { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" },
+ { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" },
+ { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" },
+ { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" },
+ { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" },
+ { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" },
+ { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" },
+ { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" },
+ { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" },
+ { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" },
+ { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" },
+ { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" },
+ { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" },
+ { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" },
+ { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" },
+ { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" },
+]
+
[[package]]
name = "distlib"
version = "0.4.0"
@@ -586,6 +727,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/22/e7/740997ca82574d03426f897fd88afe3fc8a7306b8c7ea342a8bc1c538488/prek-0.3.2-py3-none-win_arm64.whl", hash = "sha256:9144d176d0daa2469a25c303ef6f6fa95a8df015eb275232f5cb53551ecefef0", size = 4336008, upload-time = "2026-02-06T13:49:52.27Z" },
]
+[[package]]
+name = "pycparser"
+version = "3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" },
+]
+
[[package]]
name = "pydantic"
version = "2.12.5"
@@ -846,6 +996,8 @@ version = "0.5.3"
source = { editable = "." }
dependencies = [
{ name = "any-llm-sdk" },
+ { name = "authlib" },
+ { name = "httpx" },
{ name = "pydantic" },
]
@@ -866,6 +1018,8 @@ dev = [
[package.metadata]
requires-dist = [
{ name = "any-llm-sdk", specifier = ">=1.7.0" },
+ { name = "authlib", specifier = ">=1.6.5" },
+ { name = "httpx", specifier = ">=0.28.1" },
{ name = "pydantic", specifier = ">=2.7.0" },
]