diff --git a/bun.lock b/bun.lock index 0a7a87cb..505b779e 100644 --- a/bun.lock +++ b/bun.lock @@ -13,6 +13,7 @@ "@stricli/auto-complete": "^1.2.4", "@stricli/core": "^1.2.4", "@types/bun": "latest", + "@types/http-cache-semantics": "^4.2.0", "@types/node": "^22", "@types/qrcode-terminal": "^0.12.2", "@types/semver": "^7.7.1", @@ -21,6 +22,7 @@ "cli-highlight": "^2.1.11", "esbuild": "^0.25.0", "fast-check": "^4.5.3", + "http-cache-semantics": "^4.2.0", "ignore": "^7.0.5", "marked": "^15", "p-limit": "^7.2.0", @@ -275,6 +277,8 @@ "@types/connect": ["@types/connect@3.4.38", "", { "dependencies": { "@types/node": "*" } }, "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug=="], + "@types/http-cache-semantics": ["@types/http-cache-semantics@4.2.0", "", {}, "sha512-L3LgimLHXtGkWikKnsPg0/VFx9OGZaC+eN1u4r+OB1XRqH3meBIAVC2zr1WdMH+RHmnRkqliQAOHNJ/E0j/e0Q=="], + "@types/mysql": ["@types/mysql@2.15.27", "", { "dependencies": { "@types/node": "*" } }, "sha512-YfWiV16IY0OeBfBCk8+hXKmdTKrKlwKN1MNKAPBu5JYxLwBEZl7QzeEpGnlZb3VMGJrrGmB84gXiH+ofs/TezA=="], "@types/node": ["@types/node@22.19.7", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-MciR4AKGHWl7xwxkBa6xUGxQJ4VBOmPTF7sL+iGzuahOFaO0jHCsuEfS80pan1ef4gWId1oWOweIhrDEYLuaOw=="], @@ -383,6 +387,8 @@ "highlight.js": ["highlight.js@10.7.3", "", {}, "sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A=="], + "http-cache-semantics": ["http-cache-semantics@4.2.0", "", {}, "sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ=="], + "https-proxy-agent": ["https-proxy-agent@5.0.1", "", { "dependencies": { "agent-base": "6", "debug": "4" } }, "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA=="], "ignore": ["ignore@7.0.5", "", {}, "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg=="], diff --git a/package.json b/package.json index 02caddba..58a3a295 100644 --- a/package.json +++ b/package.json @@ -14,6 +14,7 @@ "@stricli/auto-complete": "^1.2.4", "@stricli/core": "^1.2.4", "@types/bun": "latest", + "@types/http-cache-semantics": "^4.2.0", "@types/node": "^22", "@types/qrcode-terminal": "^0.12.2", "@types/semver": "^7.7.1", @@ -22,6 +23,7 @@ "cli-highlight": "^2.1.11", "esbuild": "^0.25.0", "fast-check": "^4.5.3", + "http-cache-semantics": "^4.2.0", "ignore": "^7.0.5", "marked": "^15", "p-limit": "^7.2.0", diff --git a/plugins/sentry-cli/skills/sentry-cli/SKILL.md b/plugins/sentry-cli/skills/sentry-cli/SKILL.md index 1ced85af..940bcdc6 100644 --- a/plugins/sentry-cli/skills/sentry-cli/SKILL.md +++ b/plugins/sentry-cli/skills/sentry-cli/SKILL.md @@ -89,6 +89,7 @@ View authentication status **Flags:** - `--show-token - Show the stored token (masked by default)` +- `-f, --fresh - Bypass cache and fetch fresh data` **Examples:** @@ -106,6 +107,7 @@ Show the currently authenticated user **Flags:** - `--json - Output as JSON` +- `-f, --fresh - Bypass cache and fetch fresh data` ### Org @@ -118,6 +120,7 @@ List organizations **Flags:** - `-n, --limit - Maximum number of organizations to list - (default: "30")` - `--json - Output JSON` +- `-f, --fresh - Bypass cache and fetch fresh data` **Examples:** @@ -134,6 +137,7 @@ View details of an organization **Flags:** - `--json - Output as JSON` - `-w, --web - Open in browser` +- `-f, --fresh - Bypass cache and fetch fresh data` **Examples:** @@ -166,6 +170,7 @@ List projects - `--json - Output JSON` - `-c, --cursor - Pagination cursor (use "last" to continue from previous page)` - `-p, --platform - Filter by platform (e.g., javascript, python)` +- `-f, --fresh - Bypass cache and fetch fresh data` **Examples:** @@ -187,6 +192,7 @@ View details of a project **Flags:** - `--json - Output as JSON` - `-w, --web - Open in browser` +- `-f, --fresh - Bypass cache and fetch fresh data` **Examples:** @@ -220,6 +226,7 @@ List issues in a project - `-t, --period - Time period for issue activity (e.g. 24h, 14d, 90d) - (default: "90d")` - `--json - Output JSON` - `-c, --cursor - Pagination cursor for / or multi-target modes (use "last" to continue)` +- `-f, --fresh - Bypass cache and fetch fresh data` **Examples:** @@ -264,6 +271,7 @@ Analyze an issue's root cause using Seer AI **Flags:** - `--json - Output as JSON` - `--force - Force new analysis even if one exists` +- `-f, --fresh - Bypass cache and fetch fresh data` **Examples:** @@ -291,6 +299,7 @@ Generate a solution plan using Seer AI - `--cause - Root cause ID to plan (required if multiple causes exist)` - `--json - Output as JSON` - `--force - Force new plan even if one exists` +- `-f, --fresh - Bypass cache and fetch fresh data` **Examples:** @@ -318,6 +327,7 @@ View details of a specific issue - `--json - Output as JSON` - `-w, --web - Open in browser` - `--spans - Span tree depth limit (number, "all" for unlimited, "no" to disable) - (default: "3")` +- `-f, --fresh - Bypass cache and fetch fresh data` **Examples:** @@ -345,6 +355,7 @@ View details of a specific event - `--json - Output as JSON` - `-w, --web - Open in browser` - `--spans - Span tree depth limit (number, "all" for unlimited, "no" to disable) - (default: "3")` +- `-f, --fresh - Bypass cache and fetch fresh data` **Examples:** @@ -467,6 +478,7 @@ List repositories - `-n, --limit - Maximum number of repositories to list - (default: "30")` - `--json - Output JSON` - `-c, --cursor - Pagination cursor (use "last" to continue from previous page)` +- `-f, --fresh - Bypass cache and fetch fresh data` ### Team @@ -480,6 +492,7 @@ List teams - `-n, --limit - Maximum number of teams to list - (default: "30")` - `--json - Output JSON` - `-c, --cursor - Pagination cursor (use "last" to continue from previous page)` +- `-f, --fresh - Bypass cache and fetch fresh data` **Examples:** @@ -510,6 +523,7 @@ List logs from a project - `-f, --follow - Stream logs (optionally specify poll interval in seconds)` - `--trace - Filter logs by trace ID (32-character hex string)` - `--json - Output as JSON` +- `--fresh - Bypass cache and fetch fresh data` **Examples:** @@ -555,6 +569,7 @@ View details of a specific log entry **Flags:** - `--json - Output as JSON` - `-w, --web - Open in browser` +- `-f, --fresh - Bypass cache and fetch fresh data` **Examples:** @@ -591,6 +606,7 @@ List recent traces in a project - `-s, --sort - Sort by: date, duration - (default: "date")` - `-c, --cursor - Pagination cursor (use "last" to continue from previous page)` - `--json - Output as JSON` +- `-f, --fresh - Bypass cache and fetch fresh data` #### `sentry trace view ` @@ -600,6 +616,7 @@ View details of a specific trace - `--json - Output as JSON` - `-w, --web - Open in browser` - `--spans - Span tree depth limit (number, "all" for unlimited, "no" to disable) - (default: "3")` +- `-f, --fresh - Bypass cache and fetch fresh data` #### `sentry trace logs ` @@ -611,6 +628,7 @@ View logs associated with a trace - `-t, --period - Time period to search (e.g., "14d", "7d", "24h"). Default: 14d - (default: "14d")` - `-n, --limit - Number of log entries (1-1000) - (default: "100")` - `-q, --query - Additional filter query (Sentry search syntax)` +- `-f, --fresh - Bypass cache and fetch fresh data` ### Issues @@ -627,6 +645,7 @@ List issues in a project - `-t, --period - Time period for issue activity (e.g. 24h, 14d, 90d) - (default: "90d")` - `--json - Output JSON` - `-c, --cursor - Pagination cursor for / or multi-target modes (use "last" to continue)` +- `-f, --fresh - Bypass cache and fetch fresh data` ### Orgs @@ -639,6 +658,7 @@ List organizations **Flags:** - `-n, --limit - Maximum number of organizations to list - (default: "30")` - `--json - Output JSON` +- `-f, --fresh - Bypass cache and fetch fresh data` ### Projects @@ -653,6 +673,7 @@ List projects - `--json - Output JSON` - `-c, --cursor - Pagination cursor (use "last" to continue from previous page)` - `-p, --platform - Filter by platform (e.g., javascript, python)` +- `-f, --fresh - Bypass cache and fetch fresh data` ### Repos @@ -666,6 +687,7 @@ List repositories - `-n, --limit - Maximum number of repositories to list - (default: "30")` - `--json - Output JSON` - `-c, --cursor - Pagination cursor (use "last" to continue from previous page)` +- `-f, --fresh - Bypass cache and fetch fresh data` ### Teams @@ -679,6 +701,7 @@ List teams - `-n, --limit - Maximum number of teams to list - (default: "30")` - `--json - Output JSON` - `-c, --cursor - Pagination cursor (use "last" to continue from previous page)` +- `-f, --fresh - Bypass cache and fetch fresh data` ### Logs @@ -694,6 +717,7 @@ List logs from a project - `-f, --follow - Stream logs (optionally specify poll interval in seconds)` - `--trace - Filter logs by trace ID (32-character hex string)` - `--json - Output as JSON` +- `--fresh - Bypass cache and fetch fresh data` ### Traces @@ -709,6 +733,7 @@ List recent traces in a project - `-s, --sort - Sort by: date, duration - (default: "date")` - `-c, --cursor - Pagination cursor (use "last" to continue from previous page)` - `--json - Output as JSON` +- `-f, --fresh - Bypass cache and fetch fresh data` ### Whoami @@ -720,6 +745,7 @@ Show the currently authenticated user **Flags:** - `--json - Output as JSON` +- `-f, --fresh - Bypass cache and fetch fresh data` ## Output Formats diff --git a/src/commands/auth/login.ts b/src/commands/auth/login.ts index 996370c6..162639c7 100644 --- a/src/commands/auth/login.ts +++ b/src/commands/auth/login.ts @@ -8,6 +8,7 @@ import { AuthError } from "../../lib/errors.js"; import { muted, success } from "../../lib/formatters/colors.js"; import { formatUserIdentity } from "../../lib/formatters/human.js"; import { runInteractiveLogin } from "../../lib/interactive-login.js"; +import { clearResponseCache } from "../../lib/response-cache.js"; type LoginFlags = { readonly token?: string; @@ -52,6 +53,13 @@ export const loginCommand = buildCommand({ // Token-based authentication if (flags.token) { + // Clear stale cached responses from a previous session + try { + await clearResponseCache(); + } catch { + // Non-fatal: cache directory may not exist + } + // Save token first, then validate by fetching user regions await setAuthToken(flags.token); @@ -90,6 +98,13 @@ export const loginCommand = buildCommand({ return; } + // Clear stale cached responses from a previous session + try { + await clearResponseCache(); + } catch { + // Non-fatal: cache directory may not exist + } + // Device Flow OAuth const loginSuccess = await runInteractiveLogin( stdout, diff --git a/src/commands/auth/status.ts b/src/commands/auth/status.ts index 6cbd945f..07e228fa 100644 --- a/src/commands/auth/status.ts +++ b/src/commands/auth/status.ts @@ -25,10 +25,16 @@ import { formatUserIdentity, maskToken, } from "../../lib/formatters/human.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, +} from "../../lib/list-command.js"; import type { Writer } from "../../types/index.js"; type StatusFlags = { readonly "show-token": boolean; + readonly fresh: boolean; }; /** @@ -131,9 +137,12 @@ export const statusCommand = buildCommand({ brief: "Show the stored token (masked by default)", default: false, }, + fresh: FRESH_FLAG, }, + aliases: FRESH_ALIASES, }, async func(this: SentryContext, flags: StatusFlags): Promise { + applyFreshFlag(flags); const { stdout, stderr } = this; const auth = await getAuthConfig(); diff --git a/src/commands/auth/whoami.ts b/src/commands/auth/whoami.ts index 678d8964..b7454e73 100644 --- a/src/commands/auth/whoami.ts +++ b/src/commands/auth/whoami.ts @@ -13,9 +13,15 @@ import { isAuthenticated } from "../../lib/db/auth.js"; import { setUserInfo } from "../../lib/db/user.js"; import { AuthError } from "../../lib/errors.js"; import { formatUserIdentity, writeJson } from "../../lib/formatters/index.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, +} from "../../lib/list-command.js"; type WhoamiFlags = { readonly json: boolean; + readonly fresh: boolean; }; export const whoamiCommand = buildCommand({ @@ -33,9 +39,12 @@ export const whoamiCommand = buildCommand({ brief: "Output as JSON", default: false, }, + fresh: FRESH_FLAG, }, + aliases: FRESH_ALIASES, }, async func(this: SentryContext, flags: WhoamiFlags): Promise { + applyFreshFlag(flags); const { stdout } = this; if (!(await isAuthenticated())) { diff --git a/src/commands/event/view.ts b/src/commands/event/view.ts index 0007d026..22027b1d 100644 --- a/src/commands/event/view.ts +++ b/src/commands/event/view.ts @@ -21,6 +21,11 @@ import { openInBrowser } from "../../lib/browser.js"; import { buildCommand } from "../../lib/command.js"; import { ContextError, ResolutionError } from "../../lib/errors.js"; import { formatEventDetails, writeJson } from "../../lib/formatters/index.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, +} from "../../lib/list-command.js"; import { resolveEffectiveOrg } from "../../lib/region.js"; import { resolveOrgAndProject, @@ -38,6 +43,7 @@ type ViewFlags = { readonly json: boolean; readonly web: boolean; readonly spans: number; + readonly fresh: boolean; }; type HumanOutputOptions = { @@ -301,14 +307,16 @@ export const viewCommand = buildCommand({ default: false, }, ...spansFlag, + fresh: FRESH_FLAG, }, - aliases: { w: "web" }, + aliases: { ...FRESH_ALIASES, w: "web" }, }, async func( this: SentryContext, flags: ViewFlags, ...args: string[] ): Promise { + applyFreshFlag(flags); const { stdout, cwd } = this; // Parse positional args diff --git a/src/commands/issue/explain.ts b/src/commands/issue/explain.ts index ac091b53..5145168e 100644 --- a/src/commands/issue/explain.ts +++ b/src/commands/issue/explain.ts @@ -12,6 +12,11 @@ import { formatRootCauseList, handleSeerApiError, } from "../../lib/formatters/seer.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, +} from "../../lib/list-command.js"; import { extractRootCauses } from "../../types/seer.js"; import { ensureRootCauseAnalysis, @@ -22,6 +27,7 @@ import { type ExplainFlags = { readonly json: boolean; readonly force: boolean; + readonly fresh: boolean; }; export const explainCommand = buildCommand({ @@ -61,13 +67,16 @@ export const explainCommand = buildCommand({ brief: "Force new analysis even if one exists", default: false, }, + fresh: FRESH_FLAG, }, + aliases: FRESH_ALIASES, }, async func( this: SentryContext, flags: ExplainFlags, issueArg: string ): Promise { + applyFreshFlag(flags); const { stdout, stderr, cwd } = this; // Declare org outside try block so it's accessible in catch for error messages diff --git a/src/commands/issue/list.ts b/src/commands/issue/list.ts index f2d7606a..1bb3a68d 100644 --- a/src/commands/issue/list.ts +++ b/src/commands/issue/list.ts @@ -43,8 +43,11 @@ import { writeJson, } from "../../lib/formatters/index.js"; import { + applyFreshFlag, buildListCommand, buildListLimitFlag, + FRESH_ALIASES, + FRESH_FLAG, LIST_BASE_ALIASES, LIST_JSON_FLAG, LIST_TARGET_POSITIONAL, @@ -80,6 +83,7 @@ type ListFlags = { readonly period: string; readonly json: boolean; readonly cursor?: string; + readonly fresh: boolean; }; /** @internal */ export type SortValue = "date" | "new" | "freq" | "user"; @@ -1177,14 +1181,22 @@ export const listCommand = buildListCommand("issue", { 'Pagination cursor for / or multi-target modes (use "last" to continue)', optional: true, }, + fresh: FRESH_FLAG, + }, + aliases: { + ...LIST_BASE_ALIASES, + ...FRESH_ALIASES, + q: "query", + s: "sort", + t: "period", }, - aliases: { ...LIST_BASE_ALIASES, q: "query", s: "sort", t: "period" }, }, async func( this: SentryContext, flags: ListFlags, target?: string ): Promise { + applyFreshFlag(flags); const { stdout, stderr, cwd, setContext } = this; const parsed = parseOrgProjectArg(target); diff --git a/src/commands/issue/plan.ts b/src/commands/issue/plan.ts index 20d55e0c..91a82f7e 100644 --- a/src/commands/issue/plan.ts +++ b/src/commands/issue/plan.ts @@ -15,6 +15,11 @@ import { formatSolution, handleSeerApiError, } from "../../lib/formatters/seer.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, +} from "../../lib/list-command.js"; import type { Writer } from "../../types/index.js"; import { type AutofixState, @@ -34,6 +39,7 @@ type PlanFlags = { readonly cause?: number; readonly json: boolean; readonly force: boolean; + readonly fresh: boolean; }; /** @@ -174,13 +180,16 @@ export const planCommand = buildCommand({ brief: "Force new plan even if one exists", default: false, }, + fresh: FRESH_FLAG, }, + aliases: FRESH_ALIASES, }, async func( this: SentryContext, flags: PlanFlags, issueArg: string ): Promise { + applyFreshFlag(flags); const { stdout, stderr, cwd } = this; // Declare org outside try block so it's accessible in catch for error messages diff --git a/src/commands/issue/view.ts b/src/commands/issue/view.ts index 744ec0c2..a7ded81f 100644 --- a/src/commands/issue/view.ts +++ b/src/commands/issue/view.ts @@ -16,6 +16,11 @@ import { writeFooter, writeJson, } from "../../lib/formatters/index.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, +} from "../../lib/list-command.js"; import { getSpanTreeLines } from "../../lib/span-tree.js"; import type { SentryEvent, SentryIssue, Writer } from "../../types/index.js"; import { issueIdPositional, resolveIssue } from "./utils.js"; @@ -24,6 +29,7 @@ type ViewFlags = { readonly json: boolean; readonly web: boolean; readonly spans: number; + readonly fresh: boolean; }; /** @@ -100,14 +106,16 @@ export const viewCommand = buildCommand({ default: false, }, ...spansFlag, + fresh: FRESH_FLAG, }, - aliases: { w: "web" }, + aliases: { ...FRESH_ALIASES, w: "web" }, }, async func( this: SentryContext, flags: ViewFlags, issueArg: string ): Promise { + applyFreshFlag(flags); const { stdout, cwd, setContext } = this; // Resolve issue using shared resolution logic diff --git a/src/commands/log/list.ts b/src/commands/log/list.ts index 4afc8224..6ae15679 100644 --- a/src/commands/log/list.ts +++ b/src/commands/log/list.ts @@ -26,7 +26,9 @@ import { import { renderInlineMarkdown } from "../../lib/formatters/markdown.js"; import type { StreamingTable } from "../../lib/formatters/text-table.js"; import { + applyFreshFlag, buildListCommand, + FRESH_FLAG, TARGET_PATTERN_NOTE, } from "../../lib/list-command.js"; import { @@ -43,6 +45,7 @@ type ListFlags = { readonly follow?: number; readonly json: boolean; readonly trace?: string; + readonly fresh: boolean; }; /** Maximum allowed value for --limit flag */ @@ -419,6 +422,7 @@ export const listCommand = buildListCommand("log", { brief: "Output as JSON", default: false, }, + fresh: FRESH_FLAG, }, aliases: { n: "limit", @@ -431,6 +435,7 @@ export const listCommand = buildListCommand("log", { flags: ListFlags, target?: string ): Promise { + applyFreshFlag(flags); const { stdout, stderr, cwd, setContext } = this; if (flags.trace) { diff --git a/src/commands/log/view.ts b/src/commands/log/view.ts index f3518c6a..e99856a1 100644 --- a/src/commands/log/view.ts +++ b/src/commands/log/view.ts @@ -14,6 +14,11 @@ import { openInBrowser } from "../../lib/browser.js"; import { buildCommand } from "../../lib/command.js"; import { ContextError, ValidationError } from "../../lib/errors.js"; import { formatLogDetails, writeJson } from "../../lib/formatters/index.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, +} from "../../lib/list-command.js"; import { resolveOrgAndProject, resolveProjectBySlug, @@ -24,6 +29,7 @@ import type { DetailedSentryLog, Writer } from "../../types/index.js"; type ViewFlags = { readonly json: boolean; readonly web: boolean; + readonly fresh: boolean; }; /** Usage hint for ContextError messages */ @@ -131,14 +137,16 @@ export const viewCommand = buildCommand({ brief: "Open in browser", default: false, }, + fresh: FRESH_FLAG, }, - aliases: { w: "web" }, + aliases: { ...FRESH_ALIASES, w: "web" }, }, async func( this: SentryContext, flags: ViewFlags, ...args: string[] ): Promise { + applyFreshFlag(flags); const { stdout, cwd, setContext } = this; // Parse positional args diff --git a/src/commands/org/list.ts b/src/commands/org/list.ts index cada4194..f8600e9c 100644 --- a/src/commands/org/list.ts +++ b/src/commands/org/list.ts @@ -12,11 +12,18 @@ import { getAllOrgRegions } from "../../lib/db/regions.js"; import { writeFooter, writeJson } from "../../lib/formatters/index.js"; import { escapeMarkdownCell } from "../../lib/formatters/markdown.js"; import { type Column, writeTable } from "../../lib/formatters/table.js"; -import { buildListLimitFlag, LIST_JSON_FLAG } from "../../lib/list-command.js"; +import { + applyFreshFlag, + buildListLimitFlag, + FRESH_ALIASES, + FRESH_FLAG, + LIST_JSON_FLAG, +} from "../../lib/list-command.js"; type ListFlags = { readonly limit: number; readonly json: boolean; + readonly fresh: boolean; }; /** @@ -69,11 +76,13 @@ export const listCommand = buildCommand({ flags: { limit: buildListLimitFlag("organizations"), json: LIST_JSON_FLAG, + fresh: FRESH_FLAG, }, // Only -n for --limit; no -c since org list has no --cursor flag - aliases: { n: "limit" }, + aliases: { ...FRESH_ALIASES, n: "limit" }, }, async func(this: SentryContext, flags: ListFlags): Promise { + applyFreshFlag(flags); const { stdout } = this; const orgs = await listOrganizations(); diff --git a/src/commands/org/view.ts b/src/commands/org/view.ts index 8a7c39e6..d51b5898 100644 --- a/src/commands/org/view.ts +++ b/src/commands/org/view.ts @@ -10,12 +10,18 @@ import { openInBrowser } from "../../lib/browser.js"; import { buildCommand } from "../../lib/command.js"; import { ContextError } from "../../lib/errors.js"; import { formatOrgDetails, writeOutput } from "../../lib/formatters/index.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, +} from "../../lib/list-command.js"; import { resolveOrg } from "../../lib/resolve-target.js"; import { buildOrgUrl } from "../../lib/sentry-urls.js"; type ViewFlags = { readonly json: boolean; readonly web: boolean; + readonly fresh: boolean; }; export const viewCommand = buildCommand({ @@ -51,14 +57,16 @@ export const viewCommand = buildCommand({ brief: "Open in browser", default: false, }, + fresh: FRESH_FLAG, }, - aliases: { w: "web" }, + aliases: { ...FRESH_ALIASES, w: "web" }, }, async func( this: SentryContext, flags: ViewFlags, orgSlug?: string ): Promise { + applyFreshFlag(flags); const { stdout, cwd } = this; const resolved = await resolveOrg({ org: orgSlug, cwd }); diff --git a/src/commands/project/list.ts b/src/commands/project/list.ts index 4fe7ae3a..25eb4040 100644 --- a/src/commands/project/list.ts +++ b/src/commands/project/list.ts @@ -35,8 +35,11 @@ import { writeFooter, writeJson } from "../../lib/formatters/index.js"; import { escapeMarkdownCell } from "../../lib/formatters/markdown.js"; import { type Column, writeTable } from "../../lib/formatters/table.js"; import { + applyFreshFlag, buildListCommand, buildListLimitFlag, + FRESH_ALIASES, + FRESH_FLAG, LIST_BASE_ALIASES, LIST_CURSOR_FLAG, LIST_JSON_FLAG, @@ -59,6 +62,7 @@ type ListFlags = { readonly json: boolean; readonly cursor?: string; readonly platform?: string; + readonly fresh: boolean; }; /** @@ -601,14 +605,16 @@ export const listCommand = buildListCommand("project", { brief: "Filter by platform (e.g., javascript, python)", optional: true, }, + fresh: FRESH_FLAG, }, - aliases: { ...LIST_BASE_ALIASES, p: "platform" }, + aliases: { ...LIST_BASE_ALIASES, ...FRESH_ALIASES, p: "platform" }, }, async func( this: SentryContext, flags: ListFlags, target?: string ): Promise { + applyFreshFlag(flags); const { stdout, cwd } = this; const parsed = parseOrgProjectArg(target); diff --git a/src/commands/project/view.ts b/src/commands/project/view.ts index 70db27bc..dccd0388 100644 --- a/src/commands/project/view.ts +++ b/src/commands/project/view.ts @@ -20,7 +20,12 @@ import { writeJson, writeOutput, } from "../../lib/formatters/index.js"; -import { TARGET_PATTERN_NOTE } from "../../lib/list-command.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, + TARGET_PATTERN_NOTE, +} from "../../lib/list-command.js"; import { type ResolvedTarget, resolveAllTargets, @@ -32,6 +37,7 @@ import type { SentryProject } from "../../types/index.js"; type ViewFlags = { readonly json: boolean; readonly web: boolean; + readonly fresh: boolean; }; /** Usage hint for ContextError messages */ @@ -203,14 +209,16 @@ export const viewCommand = buildCommand({ brief: "Open in browser", default: false, }, + fresh: FRESH_FLAG, }, - aliases: { w: "web" }, + aliases: { ...FRESH_ALIASES, w: "web" }, }, async func( this: SentryContext, flags: ViewFlags, targetArg?: string ): Promise { + applyFreshFlag(flags); const { stdout, cwd } = this; const parsed = parseOrgProjectArg(targetArg); diff --git a/src/commands/trace/list.ts b/src/commands/trace/list.ts index 908d2d1e..75c39b6a 100644 --- a/src/commands/trace/list.ts +++ b/src/commands/trace/list.ts @@ -19,7 +19,10 @@ import { writeJson, } from "../../lib/formatters/index.js"; import { + applyFreshFlag, buildListCommand, + FRESH_ALIASES, + FRESH_FLAG, LIST_CURSOR_FLAG, TARGET_PATTERN_NOTE, } from "../../lib/list-command.js"; @@ -31,6 +34,7 @@ type ListFlags = { readonly sort: "date" | "duration"; readonly json: boolean; readonly cursor?: string; + readonly fresh: boolean; }; type SortValue = "date" | "duration"; @@ -141,14 +145,22 @@ export const listCommand = buildListCommand("trace", { brief: "Output as JSON", default: false, }, + fresh: FRESH_FLAG, + }, + aliases: { + ...FRESH_ALIASES, + n: "limit", + q: "query", + s: "sort", + c: "cursor", }, - aliases: { n: "limit", q: "query", s: "sort", c: "cursor" }, }, async func( this: SentryContext, flags: ListFlags, target?: string ): Promise { + applyFreshFlag(flags); const { stdout, cwd, setContext } = this; // Resolve org/project from positional arg, config, or DSN auto-detection diff --git a/src/commands/trace/logs.ts b/src/commands/trace/logs.ts index 70b5d039..7bed9ce0 100644 --- a/src/commands/trace/logs.ts +++ b/src/commands/trace/logs.ts @@ -11,6 +11,11 @@ import { openInBrowser } from "../../lib/browser.js"; import { buildCommand } from "../../lib/command.js"; import { ContextError } from "../../lib/errors.js"; import { displayTraceLogs } from "../../lib/formatters/index.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, +} from "../../lib/list-command.js"; import { resolveOrg } from "../../lib/resolve-target.js"; import { buildTraceUrl } from "../../lib/sentry-urls.js"; import { validateTraceId } from "../../lib/trace-id.js"; @@ -21,6 +26,7 @@ type LogsFlags = { readonly period: string; readonly limit: number; readonly query?: string; + readonly fresh: boolean; }; /** Maximum allowed value for --limit flag */ @@ -166,14 +172,22 @@ export const logsCommand = buildCommand({ brief: "Additional filter query (Sentry search syntax)", optional: true, }, + fresh: FRESH_FLAG, + }, + aliases: { + ...FRESH_ALIASES, + w: "web", + t: "period", + n: "limit", + q: "query", }, - aliases: { w: "web", t: "period", n: "limit", q: "query" }, }, async func( this: SentryContext, flags: LogsFlags, ...args: string[] ): Promise { + applyFreshFlag(flags); const { stdout, cwd, setContext } = this; const { traceId, orgArg } = parsePositionalArgs(args); diff --git a/src/commands/trace/view.ts b/src/commands/trace/view.ts index 23841afb..a799e5c7 100644 --- a/src/commands/trace/view.ts +++ b/src/commands/trace/view.ts @@ -21,6 +21,11 @@ import { writeFooter, writeJson, } from "../../lib/formatters/index.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, +} from "../../lib/list-command.js"; import { resolveOrgAndProject, resolveProjectBySlug, @@ -32,6 +37,7 @@ type ViewFlags = { readonly json: boolean; readonly web: boolean; readonly spans: number; + readonly fresh: boolean; }; /** Usage hint for ContextError messages */ @@ -142,14 +148,16 @@ export const viewCommand = buildCommand({ default: false, }, ...spansFlag, + fresh: FRESH_FLAG, }, - aliases: { w: "web" }, + aliases: { ...FRESH_ALIASES, w: "web" }, }, async func( this: SentryContext, flags: ViewFlags, ...args: string[] ): Promise { + applyFreshFlag(flags); const { stdout, cwd, setContext } = this; // Parse positional args diff --git a/src/lib/db/auth.ts b/src/lib/db/auth.ts index f426de95..b98c211b 100644 --- a/src/lib/db/auth.ts +++ b/src/lib/db/auth.ts @@ -2,6 +2,7 @@ * Authentication credential storage (single-row table pattern). */ +import { clearResponseCache } from "../response-cache.js"; import { withDbSpan } from "../telemetry.js"; import { getDatabase } from "./index.js"; import { runUpsert } from "./utils.js"; @@ -94,7 +95,7 @@ export function setAuthToken( }); } -export function clearAuth(): void { +export async function clearAuth(): Promise { withDbSpan("clearAuth", () => { const db = getDatabase(); db.query("DELETE FROM auth WHERE id = 1").run(); @@ -103,6 +104,14 @@ export function clearAuth(): void { db.query("DELETE FROM org_regions").run(); db.query("DELETE FROM pagination_cursors").run(); }); + + // Clear cached API responses — they are tied to the current user's permissions. + // Awaited so cache is fully removed before the process exits. + try { + await clearResponseCache(); + } catch { + // Non-fatal: cache directory may not exist yet + } } export async function isAuthenticated(): Promise { diff --git a/src/lib/list-command.ts b/src/lib/list-command.ts index 9de81818..a4128280 100644 --- a/src/lib/list-command.ts +++ b/src/lib/list-command.ts @@ -24,6 +24,7 @@ import { parseOrgProjectArg } from "./arg-parsing.js"; import { buildCommand, numberParser } from "./command.js"; import { warning } from "./formatters/colors.js"; import { dispatchOrgScopedList, type OrgListConfig } from "./org-list.js"; +import { disableResponseCache } from "./response-cache.js"; // --------------------------------------------------------------------------- // Level A: shared parameter / flag definitions @@ -83,6 +84,60 @@ export const LIST_JSON_FLAG = { default: false, } as const; +/** + * The `--fresh` / `-f` flag shared by read-only commands. + * Bypasses the response cache and fetches fresh data from the API. + * + * Add to any command's `flags` object, then call `applyFreshFlag(flags)` at + * the top of `func()` to activate cache bypass when the flag is set. + * + * @example + * ```ts + * import { applyFreshFlag, FRESH_ALIASES, FRESH_FLAG } from "../lib/list-command.js"; + * + * // In parameters: + * flags: { ..., fresh: FRESH_FLAG }, + * aliases: { ...FRESH_ALIASES }, + * + * // In func(): + * applyFreshFlag(flags); + * ``` + */ +export const FRESH_FLAG = { + kind: "boolean" as const, + brief: "Bypass cache and fetch fresh data", + default: false, +} as const; + +/** + * Alias map for the `--fresh` flag: `-f` → `--fresh`. + * + * Spread into a command's `aliases` alongside other aliases: + * ```ts + * aliases: { ...FRESH_ALIASES, w: "web" } + * ``` + * + * **Note**: Commands that use `-f` for a different flag (e.g. `log list` + * uses `-f` for `--follow`) should NOT spread this constant. + */ +export const FRESH_ALIASES = { f: "fresh" } as const; + +/** + * Apply the `--fresh` flag: disables the response cache for this invocation. + * + * Call at the top of a command's `func()` after defining the `fresh` flag: + * ```ts + * flags: { fresh: FRESH_FLAG }, + * async func(this: SentryContext, flags) { + * applyFreshFlag(flags); + * ``` + */ +export function applyFreshFlag(flags: { readonly fresh: boolean }): void { + if (flags.fresh) { + disableResponseCache(); + } +} + /** Matches strings that are all digits — used to detect invalid cursor values */ const ALL_DIGITS_RE = /^\d+$/; @@ -346,8 +401,9 @@ export function buildOrgListCommand( limit: buildListLimitFlag(config.entityPlural), json: LIST_JSON_FLAG, cursor: LIST_CURSOR_FLAG, + fresh: FRESH_FLAG, }, - aliases: LIST_BASE_ALIASES, + aliases: { ...LIST_BASE_ALIASES, ...FRESH_ALIASES }, }, async func( this: SentryContext, @@ -355,9 +411,11 @@ export function buildOrgListCommand( readonly limit: number; readonly json: boolean; readonly cursor?: string; + readonly fresh: boolean; }, target?: string ): Promise { + applyFreshFlag(flags); const { stdout, cwd } = this; const parsed = parseOrgProjectArg(target); await dispatchOrgScopedList({ config, stdout, cwd, flags, parsed }); diff --git a/src/lib/response-cache.ts b/src/lib/response-cache.ts new file mode 100644 index 00000000..4013e77b --- /dev/null +++ b/src/lib/response-cache.ts @@ -0,0 +1,648 @@ +/** + * Filesystem-based HTTP response cache for read-only API calls. + * + * Uses `http-cache-semantics` (RFC 7234/9111) to make correct caching decisions. + * When the server provides `Cache-Control` / `ETag` / `Expires` headers, they + * are respected automatically. When the server sends no cache headers (Sentry's + * current behavior), a URL-based fallback TTL is applied. + * + * Cache entries are stored as individual JSON files under `~/.sentry/cache/responses/`. + * This keeps the response data separate from the config SQLite database, which + * stores small structured data (tokens, org slugs, cursors). API responses can + * be 50–500 KB each, so a dedicated cache directory avoids bloating the DB. + * + * @module + */ + +import { createHash } from "node:crypto"; +import { + mkdir, + readdir, + readFile, + rm, + unlink, + writeFile, +} from "node:fs/promises"; +import { join } from "node:path"; +import CachePolicy from "http-cache-semantics"; +import pLimit from "p-limit"; + +import { getConfigDir } from "./db/index.js"; +import { withCacheSpan } from "./telemetry.js"; + +// --------------------------------------------------------------------------- +// TTL tiers — used as fallback when the server sends no cache headers +// --------------------------------------------------------------------------- + +/** + * TTL tier classification for URLs. + * + * - `immutable`: data that never changes once created (events, traces) + * - `stable`: data that changes infrequently (orgs, projects, teams) + * - `volatile`: data that changes often (issue lists, log lists) + * - `no-cache`: never cache (polling endpoints like autofix state) + */ +type TtlTier = "immutable" | "stable" | "volatile" | "no-cache"; + +/** Fallback TTL durations by tier (milliseconds). `no-cache` uses 0 as a sentinel. */ +const FALLBACK_TTL_MS: Record = { + immutable: 24 * 60 * 60 * 1000, // 24 hours — events and traces never change + stable: 5 * 60 * 1000, // 5 minutes + volatile: 60 * 1000, // 60 seconds + "no-cache": 0, +}; + +/** + * URL patterns grouped by TTL tier. + * + * Checked in tier priority order (no-cache → immutable → volatile). + * "stable" has no patterns — it is the default fallback when nothing else matches. + */ +const URL_TIER_REGEXPS: Readonly> = { + // Polling endpoints where state changes rapidly + "no-cache": [/\/(?:autofix|root-cause)\//], + // Specific resources by ID (events, traces) — never change once created + immutable: [/\/events\/[^/?]+\/?(?:\?|$)/, /\/trace\/[0-9a-f]{32}\//], + // Issue endpoints (lists AND detail views), dataset queries, trace-logs + volatile: [ + /\/issues\//, + /[?&]dataset=(?:logs|transactions)/, + /\/trace-logs\//, + ], + // Default fallback — no patterns needed + stable: [], +}; + +/** Tier check order — stable is the default and has no patterns to check. */ +const TIER_CHECK_ORDER: readonly TtlTier[] = [ + "no-cache", + "immutable", + "volatile", +]; + +/** + * Classify a URL into a TTL tier for fallback caching. + * + * @param url - Full URL string (with query params) + * @returns The TTL tier + * @internal Exported for testing + */ +export function classifyUrl(url: string): TtlTier { + for (const tier of TIER_CHECK_ORDER) { + for (const pattern of URL_TIER_REGEXPS[tier]) { + if (pattern.test(url)) { + return tier; + } + } + } + return "stable"; +} + +// --------------------------------------------------------------------------- +// Cache key generation +// --------------------------------------------------------------------------- + +/** + * Build a deterministic cache key from an HTTP method and URL. + * + * Query parameters are sorted alphabetically so that `?a=1&b=2` and `?b=2&a=1` + * produce the same key. The key is then SHA-256 hashed to produce a fixed-length + * filename-safe string. + * + * @param method - HTTP method (e.g., "GET") + * @param url - Full URL string + * @returns Hex-encoded SHA-256 hash suitable for use as a filename + * @internal Exported for testing + */ +export function buildCacheKey(method: string, url: string): string { + const normalized = normalizeUrl(method, url); + return createHash("sha256").update(normalized).digest("hex"); +} + +/** + * Normalize method + URL into a stable string for cache key derivation. + * Sorts query params alphabetically for deterministic key generation. + * + * @internal Exported for testing + */ +export function normalizeUrl(method: string, url: string): string { + const parsed = new URL(url); + const sortedParams = new URLSearchParams( + [...parsed.searchParams.entries()].sort(([a], [b]) => { + if (a < b) { + return -1; + } + if (a > b) { + return 1; + } + return 0; + }) + ); + parsed.search = sortedParams.toString() ? `?${sortedParams.toString()}` : ""; + return `${method.toUpperCase()}|${parsed.toString()}`; +} + +// --------------------------------------------------------------------------- +// Cache storage types and constants +// --------------------------------------------------------------------------- + +/** Shape of a serialized cache entry on disk */ +type CacheEntry = { + /** Serialized CachePolicy object (via policy.toObject()) */ + policy: CachePolicy.CachePolicyObject; + /** Response body (already parsed JSON) */ + body: unknown; + /** HTTP status code */ + status: number; + /** Selected response headers (e.g., Link for pagination) */ + headers: Record; + /** Original URL, used for TTL tier classification during cleanup */ + url: string; + /** When this entry was created (epoch ms) */ + createdAt: number; + /** + * Pre-computed expiry timestamp (epoch ms). + * Allows cleanup to check freshness without deserializing CachePolicy. + * Optional for backwards compatibility with entries written before this field. + */ + expiresAt?: number; +}; + +/** CachePolicy options for a single-user CLI cache */ +const POLICY_OPTIONS: CachePolicy.Options = { + shared: false, + cacheHeuristic: 0.1, + immutableMinTimeToLive: FALLBACK_TTL_MS.immutable, +}; + +/** Maximum number of cache files to retain */ +const MAX_CACHE_ENTRIES = 500; + +/** Probability of running cleanup on each cache write */ +const CLEANUP_PROBABILITY = 0.1; + +/** + * Headers that should be preserved in the cache for consumers. + * Only includes headers that affect API client behavior (e.g., pagination). + */ +const PRESERVED_HEADERS = ["link"]; + +// --------------------------------------------------------------------------- +// Internal helpers +// --------------------------------------------------------------------------- + +/** Get the response cache directory path */ +function getCacheDir(): string { + return join(getConfigDir(), "cache", "responses"); +} + +/** Get the full file path for a cache key */ +function cacheFilePath(key: string): string { + return join(getCacheDir(), `${key}.json`); +} + +/** Check if an error is an ENOENT (file/directory not found) */ +function isNotFound(error: unknown): boolean { + return ( + error instanceof Error && + "code" in error && + (error as NodeJS.ErrnoException).code === "ENOENT" + ); +} + +/** Extract the subset of response headers worth caching */ +function pickHeaders(headers: Headers): Record { + const result: Record = {}; + for (const name of PRESERVED_HEADERS) { + const value = headers.get(name); + if (value) { + result[name] = value; + } + } + return result; +} + +/** Convert Headers to a plain object for http-cache-semantics */ +function headersToObject(headers: Headers): Record { + return Object.fromEntries(headers.entries()); +} + +/** + * Check whether the server sent explicit cache-control directives. + * + * When `rescc` (response cache-control) is empty, the server sent no + * Cache-Control header. When it has keys, the server explicitly provided + * directives (e.g., `max-age=0`, `no-cache`, `max-age=300`). + * + * This distinction is critical: `timeToLive() === 0` is ambiguous — it can + * mean "no headers" (use fallback TTL) or "max-age=0" (don't cache). + */ +function hasServerCacheDirectives(policy: CachePolicy): boolean { + const { rescc } = policy.toObject(); + return Object.keys(rescc).length > 0; +} + +/** + * Check whether a cache entry is still fresh. + * + * Uses the server-provided TTL (via CachePolicy) when available. Falls back + * to URL-based TTL tiers when the server sends no cache headers. + */ +function isEntryFresh( + policy: CachePolicy, + entry: CacheEntry, + requestHeaders: Record, + url: string +): boolean { + const newRequest = { url, method: "GET", headers: requestHeaders }; + if (policy.satisfiesWithoutRevalidation(newRequest)) { + return true; + } + + // If the server sent explicit cache directives (e.g., max-age=0), respect + // them — CachePolicy already said stale, so this entry is expired. + if (hasServerCacheDirectives(policy)) { + return false; + } + + // No server cache headers — use our URL-based fallback tier + const tier = classifyUrl(url); + const fallbackTtl = FALLBACK_TTL_MS[tier]; + const age = Date.now() - entry.createdAt; + return age <= fallbackTtl; +} + +/** + * Build the response headers for a cached entry. + * Merges CachePolicy's computed headers with our preserved headers. + * Flattens multi-value headers into comma-separated strings for the Response API. + */ +function buildResponseHeaders( + policy: CachePolicy, + entry: CacheEntry +): Record { + const policyHeaders = policy.responseHeaders(); + const result: Record = {}; + + for (const [name, value] of Object.entries(policyHeaders)) { + if (value === undefined) { + continue; + } + result[name] = Array.isArray(value) ? value.join(", ") : value; + } + + // Merge preserved headers (like Link for pagination) + for (const [name, value] of Object.entries(entry.headers)) { + if (!(name in result)) { + result[name] = value; + } + } + + return result; +} + +// --------------------------------------------------------------------------- +// Cache bypass control +// --------------------------------------------------------------------------- + +let cacheDisabledFlag = false; + +/** + * Disable the response cache for the current process. + * Called when `--fresh` flag is passed to a command. + */ +export function disableResponseCache(): void { + cacheDisabledFlag = true; +} + +/** + * Re-enable the response cache after `disableResponseCache()` was called. + * + * This is only needed in tests to prevent one test's `--fresh` flag from + * permanently disabling caching for subsequent tests in the same process. + * Production CLI invocations are single-process, so the flag resets naturally. + * + * @internal Exported for testing + */ +export function resetCacheState(): void { + cacheDisabledFlag = false; +} + +/** + * Check if response caching is disabled. + * Cache is disabled when: + * - `disableResponseCache()` was called (--refresh flag) + * - `SENTRY_NO_CACHE=1` environment variable is set + */ +export function isCacheDisabled(): boolean { + return cacheDisabledFlag || process.env.SENTRY_NO_CACHE === "1"; +} + +// --------------------------------------------------------------------------- +// Public API +// --------------------------------------------------------------------------- + +/** + * Attempt to serve a cached response for a GET request. + * + * Reads the cache file directly and handles ENOENT (cache miss) without a + * separate existence check. Reconstructs the `CachePolicy` from the stored + * metadata and verifies the cached response still satisfies the new request. + * + * @param method - HTTP method (only "GET" is cached) + * @param url - Full request URL + * @param requestHeaders - Headers from the new request + * @returns A synthetic Response if cache hit, or undefined on miss/expired + */ +export async function getCachedResponse( + method: string, + url: string, + requestHeaders: Record +): Promise { + if ( + method !== "GET" || + isCacheDisabled() || + classifyUrl(url) === "no-cache" + ) { + return; + } + + return await withCacheSpan( + "cache.lookup", + async () => { + const key = buildCacheKey(method, url); + const entry = await readCacheEntry(key); + if (!entry) { + return; + } + + try { + const policy = CachePolicy.fromObject(entry.policy); + if (!isEntryFresh(policy, entry, requestHeaders, url)) { + return; + } + + const responseHeaders = buildResponseHeaders(policy, entry); + return new Response(JSON.stringify(entry.body), { + status: entry.status, + headers: responseHeaders, + }); + } catch { + // Corrupted or version-incompatible policy object — treat as cache miss. + // Best-effort cleanup of the broken entry. + unlink(cacheFilePath(key)).catch(() => { + // Ignored — fire-and-forget + }); + return; + } + }, + { "cache.url": url } + ); +} + +/** + * Read and parse a cache entry from disk. + * Returns undefined on ENOENT or parse errors. + */ +async function readCacheEntry(key: string): Promise { + const filePath = cacheFilePath(key); + let raw: string; + try { + raw = await readFile(filePath, "utf-8"); + } catch { + // ENOENT = cache miss; other read errors = treat as miss + return; + } + + try { + return JSON.parse(raw) as CacheEntry; + } catch { + // Corrupted cache file — delete it + await unlink(filePath).catch(() => { + // Best-effort cleanup of corrupted file + }); + return; + } +} + +/** + * Store a response in the cache. + * + * Only caches successful (2xx) GET responses. Uses `http-cache-semantics` to + * determine if the response is storable per RFC 7234. If the server explicitly + * sends `Cache-Control: no-store`, the response is not cached. + * + * This function is fire-and-forget — errors are silently swallowed to avoid + * slowing down the response path. + * + * @param method - HTTP method + * @param url - Full request URL + * @param requestHeaders - Request headers + * @param response - The fetch Response to cache (must be cloned before passing) + */ +export async function storeCachedResponse( + method: string, + url: string, + requestHeaders: Record, + response: Response +): Promise { + if ( + method !== "GET" || + isCacheDisabled() || + !response.ok || + classifyUrl(url) === "no-cache" + ) { + return; + } + + try { + await withCacheSpan( + "cache.store", + () => writeResponseToCache(method, url, requestHeaders, response), + { "cache.url": url } + ); + } catch { + // Cache write failures are non-fatal — silently ignore + } +} + +/** Core cache write logic, separated for complexity management */ +async function writeResponseToCache( + method: string, + url: string, + requestHeaders: Record, + response: Response +): Promise { + const responseHeadersObj = headersToObject(response.headers); + + const policy = new CachePolicy( + { url, method, headers: requestHeaders }, + { status: response.status, headers: responseHeadersObj }, + POLICY_OPTIONS + ); + + if (!policy.storable()) { + return; + } + + const body: unknown = await response.json(); + const key = buildCacheKey(method, url); + const now = Date.now(); + + // Pre-compute expiry for cheap cleanup checks (avoids CachePolicy deserialization). + // When the server sent explicit cache directives, use its TTL (even if 0). + // Only fall back to URL-based tier when no server cache headers were present. + const serverTtl = policy.timeToLive(); + const ttl = hasServerCacheDirectives(policy) + ? serverTtl + : FALLBACK_TTL_MS[classifyUrl(url)]; + + const entry: CacheEntry = { + policy: policy.toObject(), + body, + status: response.status, + headers: pickHeaders(response.headers), + url, + createdAt: now, + expiresAt: now + ttl, + }; + + await mkdir(getCacheDir(), { recursive: true, mode: 0o700 }); + await writeFile(cacheFilePath(key), JSON.stringify(entry), "utf-8"); + + // Probabilistic cleanup to avoid unbounded cache growth + if (Math.random() < CLEANUP_PROBABILITY) { + cleanupCache().catch(() => { + // Non-fatal: cleanup failure doesn't affect cache correctness + }); + } +} + +/** + * Remove all cached responses. + * Called on `auth logout` and `auth login` since cached data is tied to the user. + */ +export async function clearResponseCache(): Promise { + try { + await rm(getCacheDir(), { recursive: true, force: true }); + } catch { + // Ignore errors — directory may not exist + } +} + +// --------------------------------------------------------------------------- +// Concurrency helper +// --------------------------------------------------------------------------- + +/** Concurrency limit for parallel cache file I/O operations */ +const CACHE_IO_CONCURRENCY = 8; + +/** Shared concurrency limiter for all cache I/O — created once, reused across calls */ +const cacheIO = pLimit(CACHE_IO_CONCURRENCY); + +// --------------------------------------------------------------------------- +// Cache cleanup +// --------------------------------------------------------------------------- + +/** + * Clean up expired and excess cache entries. + * + * Deletes entries that have expired (based on server TTL or fallback TTL), + * then enforces a maximum entry count by evicting the oldest entries. + */ +async function cleanupCache(): Promise { + const cacheDir = getCacheDir(); + let files: string[]; + try { + files = await readdir(cacheDir); + } catch (error) { + if (isNotFound(error)) { + return; + } + throw error; + } + + const jsonFiles = files.filter((f) => f.endsWith(".json")); + if (jsonFiles.length === 0) { + return; + } + + const entries = await collectEntryMetadata(cacheDir, jsonFiles); + + // Both operations are best-effort — run them in parallel without blocking + await Promise.all([ + deleteExpiredEntries(cacheDir, entries), + evictExcessEntries(cacheDir, entries), + ]); +} + +/** Metadata for a cache entry, used for cleanup decisions */ +type EntryMetadata = { file: string; createdAt: number; expired: boolean }; + +/** + * Read all cache files and determine which are expired. + * + * Uses the pre-computed `expiresAt` field when available (cheap — no + * CachePolicy deserialization). Falls back to URL-based TTL classification + * for entries written before `expiresAt` was added. + */ +async function collectEntryMetadata( + cacheDir: string, + jsonFiles: string[] +): Promise { + const entries: EntryMetadata[] = []; + const now = Date.now(); + + await cacheIO.map(jsonFiles, async (file) => { + const filePath = join(cacheDir, file); + try { + const raw = await readFile(filePath, "utf-8"); + const entry = JSON.parse(raw) as CacheEntry; + const expired = + entry.expiresAt !== undefined + ? now >= entry.expiresAt + : now - entry.createdAt > + FALLBACK_TTL_MS[classifyUrl(entry.url ?? "")]; + entries.push({ file, createdAt: entry.createdAt, expired }); + } catch { + // Unparseable file — delete it + unlink(filePath).catch(() => { + // Best-effort cleanup of corrupted file + }); + } + }); + + return entries; +} + +/** Delete cache files that have expired */ +async function deleteExpiredEntries( + cacheDir: string, + entries: EntryMetadata[] +): Promise { + const expired = entries.filter((e) => e.expired); + await cacheIO.map(expired, (entry) => + unlink(join(cacheDir, entry.file)).catch(() => { + // Best-effort: file may have been deleted by another process + }) + ); +} + +/** Evict the oldest entries when over the max count */ +async function evictExcessEntries( + cacheDir: string, + entries: EntryMetadata[] +): Promise { + const remaining = entries.filter((e) => !e.expired); + if (remaining.length <= MAX_CACHE_ENTRIES) { + return; + } + + remaining.sort((a, b) => a.createdAt - b.createdAt); + const toEvict = remaining.slice(0, remaining.length - MAX_CACHE_ENTRIES); + await cacheIO.map(toEvict, (entry) => + unlink(join(cacheDir, entry.file)).catch(() => { + // Best-effort eviction + }) + ); +} diff --git a/src/lib/sentry-client.ts b/src/lib/sentry-client.ts index 977f04ec..8e3e407d 100644 --- a/src/lib/sentry-client.ts +++ b/src/lib/sentry-client.ts @@ -9,7 +9,8 @@ */ import { DEFAULT_SENTRY_URL, getUserAgent } from "./constants.js"; -import { refreshToken } from "./db/auth.js"; +import { getAuthToken, refreshToken } from "./db/auth.js"; +import { getCachedResponse, storeCachedResponse } from "./response-cache.js"; import { withHttpSpan } from "./telemetry.js"; /** Request timeout in milliseconds */ @@ -187,16 +188,20 @@ function handleFetchError( return { action: "retry" }; } -/** Extract the URL pathname for span naming */ -function extractUrlPath(input: Request | string | URL): string { - let raw: string; +/** Extract the full URL string from a fetch input */ +function extractFullUrl(input: Request | string | URL): string { if (typeof input === "string") { - raw = input; - } else if (input instanceof URL) { - raw = input.href; - } else { - raw = input.url; + return input; + } + if (input instanceof URL) { + return input.href; } + return input.url; +} + +/** Extract the URL pathname for span naming */ +function extractUrlPath(input: Request | string | URL): string { + const raw = extractFullUrl(input); try { return new URL(raw).pathname; } catch { @@ -205,9 +210,102 @@ function extractUrlPath(input: Request | string | URL): string { } /** - * Create a fetch function with authentication, timeout, retry, and 401 refresh. + * Attempt to serve a GET request from the response cache. + * Returns the cached Response if valid, or undefined on miss. + * + * @param requestHeaders - Headers that were (or will be) sent with the request, + * needed for correct `Vary` handling in CachePolicy freshness checks. + */ +async function tryCacheHit( + method: string, + fullUrl: string, + requestHeaders: Record +): Promise { + if (method !== "GET") { + return; + } + return await getCachedResponse(method, fullUrl, requestHeaders); +} + +/** + * Store a successful GET response in the cache (fire-and-forget). + * Clones the response so the original body stream is preserved for the caller. + * + * @param requestHeaders - Headers sent with the request, stored in CachePolicy + * for future `Vary`-aware freshness checks. + */ +function cacheResponse( + method: string, + fullUrl: string, + requestHeaders: Record, + response: Response +): void { + if (method !== "GET" || !response.ok) { + return; + } + // Cast needed: Bun extends Response with extra properties (toJSON, count, getAll) + // that .clone() doesn't carry over, but our cache only reads standard Response API + storeCachedResponse( + method, + fullUrl, + requestHeaders, + response.clone() as Response + ).catch(() => { + // Non-fatal: cache write failures don't affect the response + }); +} + +/** Build a `{ authorization }` header map from a bearer token, or `{}` if absent. */ +function authHeaders(token: string | undefined): Record { + return token ? { authorization: `Bearer ${token}` } : {}; +} + +/** + * Authenticate and execute a request with retry logic. + * + * Refreshes the auth token, then retries the request up to `MAX_RETRIES` times + * with exponential backoff on transient errors. + */ +async function fetchWithRetry( + input: Request | string | URL, + init: RequestInit | undefined, + method: string, + fullUrl: string +): Promise { + const { token } = await refreshToken(); + const headers = prepareHeaders(input, init, token); + + for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { + const isLastAttempt = attempt === MAX_RETRIES; + const result = await executeAttempt(input, init, headers, isLastAttempt); + + if (result.action === "done") { + // Use getAuthToken() instead of captured `token` — after a 401 refresh, + // handleUnauthorized stores a new token in the DB + cacheResponse( + method, + fullUrl, + authHeaders(getAuthToken()), + result.response + ); + return result.response; + } + if (result.action === "throw") { + throw result.error; + } + + await Bun.sleep(backoffDelay(attempt)); + } + + // Unreachable: the last attempt always returns 'done' or 'throw' + throw new Error("Exhausted all retry attempts"); +} + +/** + * Create a fetch function with authentication, timeout, retry, caching, and 401 refresh. * * This wraps the native fetch with: + * - **Response caching** for GET requests (checked before hitting the network) * - Auth token injection (Bearer token) * - Request timeout via AbortController * - Automatic retry on transient HTTP errors (408, 429, 5xx) @@ -216,6 +314,10 @@ function extractUrlPath(input: Request | string | URL): string { * - User-Agent header for API analytics * - Automatic HTTP span tracing for every request * + * Cache is checked first — on a hit, auth refresh, timeout, and retry logic are + * all skipped. On a miss or for non-GET methods, the full authenticated flow runs + * and successful GET responses are stored in the cache afterward. + * * @returns A fetch-compatible function for use with @sentry/api SDK functions */ function createAuthenticatedFetch(): ( @@ -231,30 +333,20 @@ function createAuthenticatedFetch(): ( const urlPath = extractUrlPath(input); return withHttpSpan(method, urlPath, async () => { - const { token } = await refreshToken(); - const headers = prepareHeaders(input, init, token); - - for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { - const isLastAttempt = attempt === MAX_RETRIES; - const result = await executeAttempt( - input, - init, - headers, - isLastAttempt - ); - - if (result.action === "done") { - return result.response; - } - if (result.action === "throw") { - throw result.error; - } - - await Bun.sleep(backoffDelay(attempt)); + const fullUrl = extractFullUrl(input); + + // Check cache before auth/retry for GET requests. + // Uses current token (no refresh) so lookups are fast but Vary-correct. + const cached = await tryCacheHit( + method, + fullUrl, + authHeaders(getAuthToken()) + ); + if (cached) { + return cached; } - // Unreachable: the last attempt always returns 'done' or 'throw' - throw new Error("Exhausted all retry attempts"); + return await fetchWithRetry(input, init, method, fullUrl); }); }; } diff --git a/src/lib/telemetry.ts b/src/lib/telemetry.ts index 649a27a6..44935575 100644 --- a/src/lib/telemetry.ts +++ b/src/lib/telemetry.ts @@ -945,3 +945,22 @@ export function withFsSpan( ): Promise { return withTracing(operation, "file", fn); } + +/** + * Wrap a cache operation with a span for tracing. + * + * Creates a child span under the current active span to track + * response cache hit/miss/store operations. + * + * @param operation - Name of the operation (e.g., "cache.lookup", "cache.store") + * @param fn - The function that performs the cache operation + * @param attributes - Optional span attributes (e.g., url, cache.hit) + * @returns The result of the function + */ +export function withCacheSpan( + operation: string, + fn: () => T | Promise, + attributes?: Record +): Promise { + return withTracing(operation, "cache", fn, attributes); +} diff --git a/test/commands/project/list.test.ts b/test/commands/project/list.test.ts index dc84db25..88f22e1b 100644 --- a/test/commands/project/list.test.ts +++ b/test/commands/project/list.test.ts @@ -426,6 +426,7 @@ describe("handleExplicit", () => { await handleExplicit(writer, "test-org", "frontend", { limit: 30, json: false, + fresh: false, }); const text = output(); @@ -440,6 +441,7 @@ describe("handleExplicit", () => { await handleExplicit(writer, "test-org", "frontend", { limit: 30, json: true, + fresh: false, }); const parsed = JSON.parse(output()); @@ -454,6 +456,7 @@ describe("handleExplicit", () => { await handleExplicit(writer, "test-org", "nonexistent", { limit: 30, json: false, + fresh: false, }); const text = output(); @@ -469,6 +472,7 @@ describe("handleExplicit", () => { await handleExplicit(writer, "test-org", "nonexistent", { limit: 30, json: true, + fresh: false, }); const parsed = JSON.parse(output()); @@ -483,6 +487,7 @@ describe("handleExplicit", () => { limit: 30, json: false, platform: "ruby", + fresh: false, }); const text = output(); @@ -498,6 +503,7 @@ describe("handleExplicit", () => { limit: 30, json: false, platform: "javascript", + fresh: false, }); const text = output(); @@ -524,7 +530,7 @@ describe("handleOrgAll", () => { await handleOrgAll({ stdout: writer, org: "test-org", - flags: { limit: 30, json: false }, + flags: { limit: 30, json: false, fresh: false }, contextKey: "type:org:test-org", cursor: undefined, }); @@ -546,7 +552,7 @@ describe("handleOrgAll", () => { await handleOrgAll({ stdout: writer, org: "test-org", - flags: { limit: 30, json: true }, + flags: { limit: 30, json: true, fresh: false }, contextKey: "type:org:test-org", cursor: undefined, }); @@ -564,7 +570,7 @@ describe("handleOrgAll", () => { await handleOrgAll({ stdout: writer, org: "test-org", - flags: { limit: 30, json: true }, + flags: { limit: 30, json: true, fresh: false }, contextKey: "type:org:test-org", cursor: undefined, }); @@ -584,7 +590,7 @@ describe("handleOrgAll", () => { await handleOrgAll({ stdout: writer, org: "test-org", - flags: { limit: 30, json: false }, + flags: { limit: 30, json: false, fresh: false }, contextKey: "type:org:test-org", cursor: undefined, }); @@ -607,7 +613,7 @@ describe("handleOrgAll", () => { await handleOrgAll({ stdout: writer, org: "test-org", - flags: { limit: 30, json: false }, + flags: { limit: 30, json: false, fresh: false }, contextKey: "type:org:test-org", cursor: undefined, }); @@ -626,7 +632,7 @@ describe("handleOrgAll", () => { await handleOrgAll({ stdout: writer, org: "test-org", - flags: { limit: 30, json: false, platform: "rust" }, + flags: { limit: 30, json: false, platform: "rust", fresh: false }, contextKey: "type:org:test-org", cursor: undefined, }); @@ -644,7 +650,7 @@ describe("handleOrgAll", () => { await handleOrgAll({ stdout: writer, org: "test-org", - flags: { limit: 30, json: false }, + flags: { limit: 30, json: false, fresh: false }, contextKey: "type:org:test-org", cursor: undefined, }); @@ -660,7 +666,7 @@ describe("handleOrgAll", () => { await handleOrgAll({ stdout: writer, org: "test-org", - flags: { limit: 30, json: false, platform: "rust" }, + flags: { limit: 30, json: false, platform: "rust", fresh: false }, contextKey: "type:org:test-org", cursor: undefined, }); @@ -680,7 +686,7 @@ describe("handleOrgAll", () => { await handleOrgAll({ stdout: writer, org: "test-org", - flags: { limit: 30, json: false }, + flags: { limit: 30, json: false, fresh: false }, contextKey: "type:org:test-org", cursor: undefined, }); @@ -701,7 +707,7 @@ describe("handleOrgAll", () => { await handleOrgAll({ stdout: writer, org: "test-org", - flags: { limit: 30, json: false, platform: "python" }, + flags: { limit: 30, json: false, platform: "python", fresh: false }, contextKey: "type:org:test-org:platform:python", cursor: undefined, }); @@ -730,6 +736,7 @@ describe("handleProjectSearch", () => { await handleProjectSearch(writer, "frontend", { limit: 30, json: false, + fresh: false, }); const text = output(); @@ -743,6 +750,7 @@ describe("handleProjectSearch", () => { await handleProjectSearch(writer, "frontend", { limit: 30, json: true, + fresh: false, }); const parsed = JSON.parse(output()); @@ -779,6 +787,7 @@ describe("handleProjectSearch", () => { handleProjectSearch(writer, "nonexistent", { limit: 30, json: false, + fresh: false, }) ).rejects.toThrow(ContextError); }); @@ -811,6 +820,7 @@ describe("handleProjectSearch", () => { await handleProjectSearch(writer, "nonexistent", { limit: 30, json: true, + fresh: false, }); const parsed = JSON.parse(output()); @@ -824,6 +834,7 @@ describe("handleProjectSearch", () => { await handleProjectSearch(writer, "frontend", { limit: 30, json: false, + fresh: false, }); const text = output(); @@ -838,6 +849,7 @@ describe("handleProjectSearch", () => { limit: 30, json: false, platform: "rust", + fresh: false, }); const text = output(); @@ -894,6 +906,7 @@ describe("handleProjectSearch", () => { await handleProjectSearch(writer, "frontend", { limit: 1, json: false, + fresh: false, }); const text = output(); @@ -950,6 +963,7 @@ describe("handleProjectSearch", () => { await handleProjectSearch(writer, "frontend", { limit: 1, json: true, + fresh: false, }); const parsed = JSON.parse(output()); @@ -1063,7 +1077,7 @@ describe("fetchOrgProjectsSafe", () => { test("propagates AuthError when not authenticated", async () => { // Clear auth token so the API client throws AuthError before making any request - clearAuth(); + await clearAuth(); await expect(fetchOrgProjectsSafe("myorg")).rejects.toThrow(AuthError); }); @@ -1163,6 +1177,7 @@ describe("handleAutoDetect", () => { await handleAutoDetect(writer, "/tmp/test-project", { limit: 30, json: false, + fresh: false, }); const text = output(); @@ -1179,6 +1194,7 @@ describe("handleAutoDetect", () => { await handleAutoDetect(writer, "/tmp/test-project", { limit: 30, json: true, + fresh: false, }); const parsed = JSON.parse(output()); @@ -1194,6 +1210,7 @@ describe("handleAutoDetect", () => { await handleAutoDetect(writer, "/tmp/test-project", { limit: 30, json: false, + fresh: false, }); expect(output()).toContain("No projects found"); @@ -1209,6 +1226,7 @@ describe("handleAutoDetect", () => { await handleAutoDetect(writer, "/tmp/test-project", { limit: 2, json: true, + fresh: false, }); const parsed = JSON.parse(output()); @@ -1225,6 +1243,7 @@ describe("handleAutoDetect", () => { limit: 30, json: true, platform: "python", + fresh: false, }); const parsed = JSON.parse(output()); @@ -1243,6 +1262,7 @@ describe("handleAutoDetect", () => { await handleAutoDetect(writer, "/tmp/test-project", { limit: 2, json: false, + fresh: false, }); const text = output(); @@ -1259,6 +1279,7 @@ describe("handleAutoDetect", () => { await handleAutoDetect(writer, "/tmp/test-project", { limit: 30, json: true, + fresh: false, }); const parsed = JSON.parse(output()); @@ -1279,6 +1300,7 @@ describe("handleAutoDetect", () => { await handleAutoDetect(writer, "/tmp/test-project", { limit: 30, json: false, + fresh: false, }); const text = output(); @@ -1298,6 +1320,7 @@ describe("handleAutoDetect", () => { await handleAutoDetect(writer, "/tmp/test-project", { limit: 30, json: true, + fresh: false, }); const parsed = JSON.parse(output()); @@ -1325,6 +1348,7 @@ describe("handleAutoDetect", () => { await handleAutoDetect(writer, "/tmp/test-project", { limit: 30, json: true, + fresh: false, }); const parsed = JSON.parse(output()); @@ -1335,13 +1359,14 @@ describe("handleAutoDetect", () => { test("fast path: AuthError still propagates", async () => { await setDefaults("test-org"); // Clear auth so getAuthToken() throws AuthError before any fetch - clearAuth(); + await clearAuth(); const { writer } = createCapture(); await expect( handleAutoDetect(writer, "/tmp/test-project", { limit: 30, json: true, + fresh: false, }) ).rejects.toThrow(AuthError); }); @@ -1356,6 +1381,7 @@ describe("handleAutoDetect", () => { limit: 30, json: true, platform: "python", + fresh: false, }); const parsed = JSON.parse(output()); diff --git a/test/lib/db/model-based.test.ts b/test/lib/db/model-based.test.ts index 72903040..4c27c3f8 100644 --- a/test/lib/db/model-based.test.ts +++ b/test/lib/db/model-based.test.ts @@ -203,7 +203,7 @@ class ClearAuthCommand implements AsyncCommand { check = () => true; async run(model: DbModel, _real: RealDb): Promise { - clearAuth(); + await clearAuth(); // Clear auth state model.auth.token = null; @@ -629,8 +629,8 @@ const allCommands = [ // Tests describe("model-based: database layer", () => { - test("random sequences of database operations maintain consistency", () => { - fcAssert( + test("random sequences of database operations maintain consistency", async () => { + await fcAssert( asyncProperty(commands(allCommands, { size: "+1" }), async (cmds) => { const cleanup = createIsolatedDbContext(); try { @@ -651,8 +651,8 @@ describe("model-based: database layer", () => { ); }); - test("clearAuth also clears org regions (key invariant)", () => { - fcAssert( + test("clearAuth also clears org regions (key invariant)", async () => { + await fcAssert( asyncProperty( array(tuple(slugArb, regionUrlArb), { minLength: 1, maxLength: 5 }), async (entries) => { @@ -670,7 +670,7 @@ describe("model-based: database layer", () => { expect(regionsBefore.size).toBe(uniqueOrgSlugs.size); // Clear auth - clearAuth(); + await clearAuth(); // Verify regions were also cleared (this is the invariant!) const regionsAfter = await getAllOrgRegions(); @@ -684,8 +684,8 @@ describe("model-based: database layer", () => { ); }); - test("clearAuth also clears pagination cursors (key invariant)", () => { - fcAssert( + test("clearAuth also clears pagination cursors (key invariant)", async () => { + await fcAssert( asyncProperty(tuple(slugArb, slugArb), async ([commandKey, context]) => { const cleanup = createIsolatedDbContext(); try { @@ -703,7 +703,7 @@ describe("model-based: database layer", () => { expect(before).toBe("1735689600000:100:0"); // Clear auth - clearAuth(); + await clearAuth(); // Verify pagination cursor was also cleared (this is the invariant!) const after = getPaginationCursor(commandKey, context); @@ -716,8 +716,8 @@ describe("model-based: database layer", () => { ); }); - test("alias lookup is case-insensitive", () => { - fcAssert( + test("alias lookup is case-insensitive", async () => { + await fcAssert( asyncProperty( tuple(aliasArb, slugArb, slugArb), async ([alias, org, project]) => { @@ -772,7 +772,7 @@ describe("model-based: database layer", () => { ); }); - test("fingerprint mismatch rejects alias lookup", () => { + test("fingerprint mismatch rejects alias lookup", async () => { // Combine all parameters into a single tuple to avoid parameter limit const paramsArb = tuple( aliasArb, @@ -784,7 +784,7 @@ describe("model-based: database layer", () => { nat(1000) ); - fcAssert( + await fcAssert( asyncProperty(paramsArb, async ([alias, org, project, a, b, c, d]) => { // Ensure fingerprints are different const fp1 = `${a}:${b}`; @@ -814,13 +814,13 @@ describe("model-based: database layer", () => { ); }); - test("setProjectAliases replaces all existing aliases", () => { + test("setProjectAliases replaces all existing aliases", async () => { const aliasEntryArb = array(tuple(aliasArb, slugArb, slugArb), { minLength: 1, maxLength: 3, }); - fcAssert( + await fcAssert( asyncProperty( tuple(aliasEntryArb, aliasEntryArb), async ([first, second]) => { diff --git a/test/lib/db/pagination.model-based.test.ts b/test/lib/db/pagination.model-based.test.ts index e43519b1..df37d55c 100644 --- a/test/lib/db/pagination.model-based.test.ts +++ b/test/lib/db/pagination.model-based.test.ts @@ -192,8 +192,8 @@ const allCommands = [setCmdArb, getCmdArb, clearCmdArb]; // Tests describe("model-based: pagination cursor storage", () => { - test("random sequences of pagination operations maintain consistency", () => { - fcAssert( + test("random sequences of pagination operations maintain consistency", async () => { + await fcAssert( asyncProperty(commands(allCommands, { size: "+1" }), async (cmds) => { const cleanup = createIsolatedDbContext(); try { diff --git a/test/lib/response-cache.property.test.ts b/test/lib/response-cache.property.test.ts new file mode 100644 index 00000000..e98a4dfd --- /dev/null +++ b/test/lib/response-cache.property.test.ts @@ -0,0 +1,249 @@ +/** + * Property-Based Tests for Response Cache + * + * Verifies properties of cache key generation, URL normalization, + * and URL classification that should hold for any valid input. + */ + +import { describe, expect, test } from "bun:test"; +import { + array, + constantFrom, + assert as fcAssert, + property, + string, + tuple, +} from "fast-check"; +import { + buildCacheKey, + classifyUrl, + normalizeUrl, +} from "../../src/lib/response-cache.js"; +import { DEFAULT_NUM_RUNS } from "../model-based/helpers.js"; + +// --------------------------------------------------------------------------- +// Arbitraries +// --------------------------------------------------------------------------- + +/** Generate valid HTTP methods */ +const methodArb = constantFrom("GET", "POST", "PUT", "DELETE", "PATCH"); + +/** Generate simple path segments */ +const pathSegmentArb = string({ minLength: 1, maxLength: 20 }).filter((s) => + /^[a-zA-Z0-9_-]+$/.test(s) +); + +/** Generate URL-like strings with paths and query params */ +const sentryUrlArb = tuple( + constantFrom( + "https://us.sentry.io", + "https://de.sentry.io", + "https://sentry.io" + ), + array(pathSegmentArb, { minLength: 1, maxLength: 5 }), + array( + tuple( + string({ minLength: 1, maxLength: 10 }).filter((s) => + /^[a-zA-Z]+$/.test(s) + ), + string({ minLength: 1, maxLength: 20 }).filter((s) => + /^[a-zA-Z0-9]+$/.test(s) + ) + ), + { minLength: 0, maxLength: 4 } + ) +).map(([base, paths, params]) => { + const pathStr = `/api/0/${paths.join("/")}`; + const query = + params.length > 0 + ? `?${params.map(([k, v]) => `${k}=${v}`).join("&")}` + : ""; + return `${base}${pathStr}${query}`; +}); + +// --------------------------------------------------------------------------- +// Tests: buildCacheKey +// --------------------------------------------------------------------------- + +describe("property: buildCacheKey", () => { + test("produces a 64-char hex string (SHA-256)", () => { + fcAssert( + property(methodArb, sentryUrlArb, (method, url) => { + const key = buildCacheKey(method, url); + expect(key).toMatch(/^[0-9a-f]{64}$/); + }), + { numRuns: DEFAULT_NUM_RUNS } + ); + }); + + test("is deterministic — same inputs produce same key", () => { + fcAssert( + property(methodArb, sentryUrlArb, (method, url) => { + const key1 = buildCacheKey(method, url); + const key2 = buildCacheKey(method, url); + expect(key1).toBe(key2); + }), + { numRuns: DEFAULT_NUM_RUNS } + ); + }); + + test("different methods produce different keys for same URL", () => { + fcAssert( + property(sentryUrlArb, (url) => { + const getKey = buildCacheKey("GET", url); + const postKey = buildCacheKey("POST", url); + expect(getKey).not.toBe(postKey); + }), + { numRuns: DEFAULT_NUM_RUNS } + ); + }); + + test("query param order does not affect the key", () => { + fcAssert( + property( + constantFrom("https://us.sentry.io", "https://de.sentry.io"), + pathSegmentArb, + (base, path) => { + const url1 = `${base}/api/0/${path}?a=1&b=2&c=3`; + const url2 = `${base}/api/0/${path}?c=3&a=1&b=2`; + const key1 = buildCacheKey("GET", url1); + const key2 = buildCacheKey("GET", url2); + expect(key1).toBe(key2); + } + ), + { numRuns: DEFAULT_NUM_RUNS } + ); + }); + + test("method comparison is case-insensitive", () => { + fcAssert( + property(sentryUrlArb, (url) => { + const key1 = buildCacheKey("get", url); + const key2 = buildCacheKey("GET", url); + expect(key1).toBe(key2); + }), + { numRuns: DEFAULT_NUM_RUNS } + ); + }); +}); + +// --------------------------------------------------------------------------- +// Tests: normalizeUrl +// --------------------------------------------------------------------------- + +describe("property: normalizeUrl", () => { + test("sorts query parameters alphabetically", () => { + const normalized = normalizeUrl("GET", "https://sentry.io/api?z=1&a=2&m=3"); + expect(normalized).toBe("GET|https://sentry.io/api?a=2&m=3&z=1"); + }); + + test("uppercases the method", () => { + fcAssert( + property( + constantFrom("get", "post", "put", "delete"), + sentryUrlArb, + (method, url) => { + const normalized = normalizeUrl(method, url); + expect(normalized.startsWith(method.toUpperCase())).toBe(true); + } + ), + { numRuns: DEFAULT_NUM_RUNS } + ); + }); + + test("produces pipe-separated method|url format", () => { + fcAssert( + property(methodArb, sentryUrlArb, (method, url) => { + const normalized = normalizeUrl(method, url); + expect(normalized).toContain("|"); + const [m] = normalized.split("|", 1); + expect(m).toBe(method.toUpperCase()); + }), + { numRuns: DEFAULT_NUM_RUNS } + ); + }); +}); + +// --------------------------------------------------------------------------- +// Tests: classifyUrl +// --------------------------------------------------------------------------- + +describe("property: classifyUrl", () => { + test("always returns a valid tier", () => { + fcAssert( + property(sentryUrlArb, (url) => { + const tier = classifyUrl(url); + expect(["immutable", "stable", "volatile", "no-cache"]).toContain(tier); + }), + { numRuns: DEFAULT_NUM_RUNS } + ); + }); + + test("event detail URLs are immutable", () => { + const urls = [ + "https://us.sentry.io/api/0/projects/myorg/myproject/events/abc123/", + "https://sentry.io/api/0/projects/org/proj/events/deadbeef/?full=true", + ]; + for (const url of urls) { + expect(classifyUrl(url)).toBe("immutable"); + } + }); + + test("trace URLs with 32-char hex IDs are immutable", () => { + const traceId = "a".repeat(32); + const url = `https://us.sentry.io/api/0/organizations/myorg/trace/${traceId}/`; + expect(classifyUrl(url)).toBe("immutable"); + }); + + test("issue URLs are volatile (lists and detail views)", () => { + const urls = [ + "https://us.sentry.io/api/0/projects/org/proj/issues/", + "https://us.sentry.io/api/0/projects/org/proj/issues/?query=is:unresolved", + "https://us.sentry.io/api/0/issues/12345/", + "https://sentry.io/api/0/issues/67890/?format=json", + "https://us.sentry.io/api/0/organizations/org/issues/12345/hashes/", + ]; + for (const url of urls) { + expect(classifyUrl(url)).toBe("volatile"); + } + }); + + test("dataset=logs URLs are volatile", () => { + const url = + "https://us.sentry.io/api/0/organizations/org/events/?dataset=logs&query=foo"; + expect(classifyUrl(url)).toBe("volatile"); + }); + + test("dataset=transactions URLs are volatile", () => { + const url = + "https://us.sentry.io/api/0/organizations/org/events/?dataset=transactions"; + expect(classifyUrl(url)).toBe("volatile"); + }); + + test("autofix URLs are no-cache", () => { + const urls = [ + "https://us.sentry.io/api/0/organizations/org/issues/123/autofix/", + "https://sentry.io/api/0/organizations/org/issues/456/autofix/?format=json", + ]; + for (const url of urls) { + expect(classifyUrl(url)).toBe("no-cache"); + } + }); + + test("root-cause URLs are no-cache", () => { + const url = + "https://us.sentry.io/api/0/organizations/org/issues/123/root-cause/"; + expect(classifyUrl(url)).toBe("no-cache"); + }); + + test("org/project/team list URLs default to stable", () => { + const urls = [ + "https://us.sentry.io/api/0/organizations/", + "https://us.sentry.io/api/0/organizations/myorg/projects/", + "https://us.sentry.io/api/0/organizations/myorg/teams/", + ]; + for (const url of urls) { + expect(classifyUrl(url)).toBe("stable"); + } + }); +}); diff --git a/test/lib/response-cache.test.ts b/test/lib/response-cache.test.ts new file mode 100644 index 00000000..b25f6ea1 --- /dev/null +++ b/test/lib/response-cache.test.ts @@ -0,0 +1,341 @@ +/** + * Unit Tests for Response Cache + * + * Tests the cache lifecycle: store, retrieve, expire, clear, and bypass. + * Uses isolated temp directories per test to avoid interference. + */ + +import { afterEach, beforeEach, describe, expect, test } from "bun:test"; +import { readdir } from "node:fs/promises"; +import { join } from "node:path"; +import { + buildCacheKey, + clearResponseCache, + getCachedResponse, + resetCacheState, + storeCachedResponse, +} from "../../src/lib/response-cache.js"; +import { useTestConfigDir } from "../helpers.js"; + +const getConfigDir = useTestConfigDir("response-cache-"); + +// Reset cache disabled state between tests +let savedNoCache: string | undefined; + +beforeEach(() => { + savedNoCache = process.env.SENTRY_NO_CACHE; + delete process.env.SENTRY_NO_CACHE; + resetCacheState(); +}); + +afterEach(() => { + if (savedNoCache !== undefined) { + process.env.SENTRY_NO_CACHE = savedNoCache; + } else { + delete process.env.SENTRY_NO_CACHE; + } + resetCacheState(); +}); + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/** Create a mock Response with JSON body and optional headers */ +function mockResponse( + body: unknown, + status = 200, + headers: Record = {} +): Response { + return new Response(JSON.stringify(body), { + status, + headers: { + "content-type": "application/json", + ...headers, + }, + }); +} + +const TEST_URL = "https://us.sentry.io/api/0/organizations/myorg/projects/"; +const TEST_METHOD = "GET"; +const TEST_BODY = { data: [{ id: 1, name: "test" }] }; + +// --------------------------------------------------------------------------- +// Store and Retrieve +// --------------------------------------------------------------------------- + +describe("store and retrieve", () => { + test("round-trip: store then retrieve returns same body", async () => { + const response = mockResponse(TEST_BODY); + await storeCachedResponse(TEST_METHOD, TEST_URL, {}, response); + + const cached = await getCachedResponse(TEST_METHOD, TEST_URL, {}); + expect(cached).toBeDefined(); + expect(cached!.status).toBe(200); + + const cachedBody = await cached!.json(); + expect(cachedBody).toEqual(TEST_BODY); + }); + + test("preserves Link header for pagination", async () => { + const linkHeader = + '; rel="next"'; + const response = mockResponse(TEST_BODY, 200, { link: linkHeader }); + await storeCachedResponse(TEST_METHOD, TEST_URL, {}, response); + + const cached = await getCachedResponse(TEST_METHOD, TEST_URL, {}); + expect(cached).toBeDefined(); + expect(cached!.headers.get("link")).toBe(linkHeader); + }); + + test("cache miss returns undefined", async () => { + const cached = await getCachedResponse( + TEST_METHOD, + "https://us.sentry.io/api/0/organizations/nonexistent/projects/", + {} + ); + expect(cached).toBeUndefined(); + }); + + test("different URLs produce different cache entries", async () => { + const url1 = "https://us.sentry.io/api/0/organizations/org1/projects/"; + const url2 = "https://us.sentry.io/api/0/organizations/org2/projects/"; + const body1 = { data: "org1" }; + const body2 = { data: "org2" }; + + await storeCachedResponse(TEST_METHOD, url1, {}, mockResponse(body1)); + await storeCachedResponse(TEST_METHOD, url2, {}, mockResponse(body2)); + + const cached1 = await getCachedResponse(TEST_METHOD, url1, {}); + const cached2 = await getCachedResponse(TEST_METHOD, url2, {}); + + expect(await cached1!.json()).toEqual(body1); + expect(await cached2!.json()).toEqual(body2); + }); + + test("query param order does not affect cache lookup", async () => { + const url1 = "https://us.sentry.io/api/0/orgs/?a=1&b=2"; + const url2 = "https://us.sentry.io/api/0/orgs/?b=2&a=1"; + + await storeCachedResponse(TEST_METHOD, url1, {}, mockResponse(TEST_BODY)); + + const cached = await getCachedResponse(TEST_METHOD, url2, {}); + expect(cached).toBeDefined(); + expect(await cached!.json()).toEqual(TEST_BODY); + }); +}); + +// --------------------------------------------------------------------------- +// Method isolation +// --------------------------------------------------------------------------- + +describe("method isolation", () => { + test("only GET requests are cached", async () => { + await storeCachedResponse("POST", TEST_URL, {}, mockResponse(TEST_BODY)); + + const cached = await getCachedResponse("POST", TEST_URL, {}); + expect(cached).toBeUndefined(); + }); + + test("GET lookup does not return POST-stored data", async () => { + // This is already guaranteed since POST doesn't store, but test explicitly + await storeCachedResponse("GET", TEST_URL, {}, mockResponse(TEST_BODY)); + + // GET should find it + const getResult = await getCachedResponse("GET", TEST_URL, {}); + expect(getResult).toBeDefined(); + + // POST should not even look + const postResult = await getCachedResponse("POST", TEST_URL, {}); + expect(postResult).toBeUndefined(); + }); +}); + +// --------------------------------------------------------------------------- +// Non-2xx responses +// --------------------------------------------------------------------------- + +describe("non-2xx responses", () => { + test("4xx responses are not cached", async () => { + await storeCachedResponse( + TEST_METHOD, + TEST_URL, + {}, + mockResponse({ detail: "not found" }, 404) + ); + + const cached = await getCachedResponse(TEST_METHOD, TEST_URL, {}); + expect(cached).toBeUndefined(); + }); + + test("5xx responses are not cached", async () => { + await storeCachedResponse( + TEST_METHOD, + TEST_URL, + {}, + mockResponse({ detail: "server error" }, 500) + ); + + const cached = await getCachedResponse(TEST_METHOD, TEST_URL, {}); + expect(cached).toBeUndefined(); + }); +}); + +// --------------------------------------------------------------------------- +// Cache-Control: no-store +// --------------------------------------------------------------------------- + +describe("Cache-Control: no-store", () => { + test("responses with no-store are not cached", async () => { + const response = mockResponse(TEST_BODY, 200, { + "cache-control": "no-store", + }); + await storeCachedResponse(TEST_METHOD, TEST_URL, {}, response); + + const cached = await getCachedResponse(TEST_METHOD, TEST_URL, {}); + expect(cached).toBeUndefined(); + }); +}); + +// --------------------------------------------------------------------------- +// clearResponseCache +// --------------------------------------------------------------------------- + +describe("clearResponseCache", () => { + test("removes all cached entries", async () => { + const url1 = "https://us.sentry.io/api/0/orgs/a/projects/"; + const url2 = "https://us.sentry.io/api/0/orgs/b/projects/"; + + await storeCachedResponse(TEST_METHOD, url1, {}, mockResponse({ a: 1 })); + await storeCachedResponse(TEST_METHOD, url2, {}, mockResponse({ b: 2 })); + + // Verify entries exist + expect(await getCachedResponse(TEST_METHOD, url1, {})).toBeDefined(); + + await clearResponseCache(); + + // Verify all cleared + expect(await getCachedResponse(TEST_METHOD, url1, {})).toBeUndefined(); + expect(await getCachedResponse(TEST_METHOD, url2, {})).toBeUndefined(); + }); + + test("is idempotent — clearing empty cache does not throw", async () => { + await clearResponseCache(); + await clearResponseCache(); + // No error + }); +}); + +// --------------------------------------------------------------------------- +// Cache bypass +// --------------------------------------------------------------------------- + +describe("cache bypass", () => { + test("SENTRY_NO_CACHE=1 bypasses cache reads", async () => { + await storeCachedResponse( + TEST_METHOD, + TEST_URL, + {}, + mockResponse(TEST_BODY) + ); + + process.env.SENTRY_NO_CACHE = "1"; + + const cached = await getCachedResponse(TEST_METHOD, TEST_URL, {}); + expect(cached).toBeUndefined(); + }); + + test("SENTRY_NO_CACHE=1 bypasses cache writes", async () => { + process.env.SENTRY_NO_CACHE = "1"; + + await storeCachedResponse( + TEST_METHOD, + TEST_URL, + {}, + mockResponse(TEST_BODY) + ); + + // Remove the bypass to verify nothing was written + delete process.env.SENTRY_NO_CACHE; + + const cached = await getCachedResponse(TEST_METHOD, TEST_URL, {}); + expect(cached).toBeUndefined(); + }); +}); + +// --------------------------------------------------------------------------- +// buildCacheKey +// --------------------------------------------------------------------------- + +describe("buildCacheKey", () => { + test("produces a 64-char hex string", () => { + const key = buildCacheKey("GET", TEST_URL); + expect(key).toMatch(/^[0-9a-f]{64}$/); + }); + + test("is deterministic", () => { + const key1 = buildCacheKey("GET", TEST_URL); + const key2 = buildCacheKey("GET", TEST_URL); + expect(key1).toBe(key2); + }); + + test("different methods produce different keys", () => { + const getKey = buildCacheKey("GET", TEST_URL); + const postKey = buildCacheKey("POST", TEST_URL); + expect(getKey).not.toBe(postKey); + }); +}); + +// --------------------------------------------------------------------------- +// No-cache tier (polling endpoints) +// --------------------------------------------------------------------------- + +describe("no-cache tier", () => { + test("autofix URLs are not cached", async () => { + const autofixUrl = + "https://us.sentry.io/api/0/organizations/myorg/issues/123/autofix/"; + await storeCachedResponse( + TEST_METHOD, + autofixUrl, + {}, + mockResponse({ autofix: { status: "PROCESSING" } }) + ); + + const cached = await getCachedResponse(TEST_METHOD, autofixUrl, {}); + expect(cached).toBeUndefined(); + }); + + test("root-cause URLs are not cached", async () => { + const rootCauseUrl = + "https://us.sentry.io/api/0/organizations/myorg/issues/123/root-cause/"; + await storeCachedResponse( + TEST_METHOD, + rootCauseUrl, + {}, + mockResponse({ cause: "something" }) + ); + + const cached = await getCachedResponse(TEST_METHOD, rootCauseUrl, {}); + expect(cached).toBeUndefined(); + }); +}); + +// --------------------------------------------------------------------------- +// File structure +// --------------------------------------------------------------------------- + +describe("file structure", () => { + test("creates cache directory under config dir", async () => { + await storeCachedResponse( + TEST_METHOD, + TEST_URL, + {}, + mockResponse(TEST_BODY) + ); + + const cacheDir = join(getConfigDir(), "cache", "responses"); + const files = await readdir(cacheDir); + expect(files.length).toBe(1); + expect(files[0]).toMatch(/^[0-9a-f]{64}\.json$/); + }); +});