diff --git a/CHANGELOG.md b/CHANGELOG.md index 9a1bd40..b0b22bc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,7 +16,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Improved Pending Approval Cards:** Approval cards now show an `⚠️ Action Required` header with a live countdown timer that turns red under 15 seconds. Allow/Deny buttons have clearer labels (`✅ Allow this Action` / `🚫 Block this Action`). The deny button uses a softer outlined style to reduce accidental clicks. - **DLP Content Scanner:** Node9 now scans every tool call argument for secrets before policy evaluation. Seven built-in patterns cover AWS Access Key IDs, GitHub tokens (`ghp_`, `gho_`, `ghs_`), Slack bot tokens (`xoxb-`), OpenAI API keys, Stripe secret keys, PEM private keys, and Bearer tokens. `block`-severity patterns hard-deny the call immediately; `review`-severity patterns route through the normal race engine. Secrets are redacted to a prefix+suffix sample in all audit logs. Configurable via `policy.dlp.enabled` and `policy.dlp.scanIgnoredTools`. - **Shield Templates:** `node9 shield enable ` installs a curated rule set for a specific infrastructure service. Available shields: `postgres` (blocks `DROP TABLE`, `TRUNCATE`, `DROP COLUMN`; reviews `GRANT`/`REVOKE`), `github` (blocks `gh repo delete`; reviews remote branch deletion), `aws` (blocks S3 bucket deletion, EC2 termination; reviews IAM and RDS changes), `filesystem` (reviews `chmod 777` and writes to `/etc/`). Manage with `node9 shield enable|disable|list|status`. -- **Shadow Git Snapshots (Phase 2):** (Coming Soon) Automatic lightweight git commits before AI edits, allowing `node9 undo`. +- **Shadow Git Snapshots (Phase 2 — Implemented):** Node9 now takes automatic, lightweight git snapshots before every AI file edit using an isolated shadow bare repo at `~/.node9/snapshots//`. The user's `.git` is never touched — snapshots live in a separate hidden repository keyed by a SHA-256 hash of the project path. Run `node9 undo` to revert with a full diff preview; `--steps N` goes back multiple actions. Per-invocation `GIT_INDEX_FILE` prevents concurrent-session corruption. A `project-path.txt` sentinel inside each shadow repo detects hash collisions and directory renames and auto-recovers by reinitializing. `.git` and `.node9` directories are always excluded from snapshots (inception prevention). Performance-tuned with `core.untrackedCache` and `core.fsmonitor`. Periodic background `git gc --auto` keeps shadow repos tidy. The last 10 snapshots are tracked in `~/.node9/snapshots.json`. +- **ReDoS Protection + LRU Regex Cache:** The policy engine now validates all user-supplied regex patterns before compilation. Patterns with nested quantifiers, quantified alternations, or quantified backreferences are rejected as ReDoS vectors. A bounded LRU cache (max 500 entries) stores compiled `RegExp` objects so repeated rule evaluations never recompile the same pattern. The `notMatches` condition is now fail-closed: if the regex is invalid, the condition fails rather than silently passing. +- **Expanded DLP Patterns:** Two new `block`-severity content patterns added to the scanner: GCP service account JSON keys (detected via the `type` field unique to service account files) and NPM registry auth tokens (detected in `.npmrc` format). Total built-in patterns: 9. +- **Sensitive File Path Blocking:** The DLP engine now intercepts tool calls targeting credential files before their content is ever read. Twenty path patterns cover SSH keys, AWS credentials, GCP config, Azure credentials, kubeconfig, dotenv files, PEM/key/p12/pfx certificate files, system auth files, and common credential JSON files. Symlinks are resolved via `fs.realpathSync.native()` before matching to prevent symlink escape attacks where a safe-looking path points to a protected file. - **`flightRecorder` setting:** New `settings.flightRecorder` flag (default `true`) controls whether the daemon records tool call activity to the flight recorder ring buffer. Can be set to `false` to disable activity recording when the browser dashboard is not in use. ### Changed diff --git a/README.md b/README.md index fdb9727..0619250 100644 --- a/README.md +++ b/README.md @@ -83,7 +83,7 @@ Node9 doesn't just "cut the wire." When a command is blocked, it injects a **Str ### ⏪ Shadow Git Snapshots (Auto-Undo) -Node9 takes a silent, lightweight Git snapshot before every AI file edit. If the AI hallucinates and breaks your code, run `node9 undo` to instantly revert — with a full diff preview before anything changes. +Node9 takes a silent, lightweight Git snapshot before every AI file edit. Snapshots are stored in an isolated shadow bare repo at `~/.node9/snapshots/` — your project's `.git` is never touched, and no existing git setup is required. If the AI hallucinates and breaks your code, run `node9 undo` to instantly revert — with a full diff preview before anything changes. ```bash # Undo the last AI action (shows diff + asks confirmation) @@ -93,6 +93,8 @@ node9 undo node9 undo --steps 3 ``` +The last 10 snapshots are kept globally across all sessions in `~/.node9/snapshots.json`. Older snapshots are dropped as new ones are added. + --- ## 🎮 Try it Live diff --git a/src/__tests__/core.test.ts b/src/__tests__/core.test.ts index 2021f4e..a05e008 100644 --- a/src/__tests__/core.test.ts +++ b/src/__tests__/core.test.ts @@ -42,6 +42,8 @@ import { evaluateSmartConditions, shouldSnapshot, DEFAULT_CONFIG, + validateRegex, + getCompiledRegex, } from '../core.js'; // Global spies @@ -690,6 +692,29 @@ describe('evaluateSmartConditions', () => { ) ).toBe(false); }); + + it('notMatches — fail-closed on invalid regex (returns false, not true)', () => { + // A buggy rule with a broken regex must fail-closed: the condition returns + // false (meaning "does not pass"), NOT true. If it returned true, an invalid + // notMatches rule would silently allow every call — a security hole. + expect( + evaluateSmartConditions( + { sql: 'DROP TABLE users' }, + makeRule([{ field: 'sql', op: 'notMatches', value: '[broken(' }]) + ) + ).toBe(false); + }); + + it('notMatches — absent field (null) still returns true (field not present → condition passes)', () => { + // Original semantics: if the field is absent, notMatches passes (no value to match against). + // This must not regress when regex validation is added. + expect( + evaluateSmartConditions( + { command: 'ls' }, // no 'sql' field + makeRule([{ field: 'sql', op: 'notMatches', value: '^DROP' }]) + ) + ).toBe(true); + }); }); describe('conditionMode', () => { @@ -1232,3 +1257,128 @@ describe('isDaemonRunning', () => { expect(isDaemonRunning()).toBe(false); }); }); + +// ── validateRegex — ReDoS protection ───────────────────────────────────────── + +describe('validateRegex', () => { + it('accepts valid simple patterns', () => { + expect(validateRegex('^DROP\\s+TABLE')).toBeNull(); // null = no error + expect(validateRegex('\\bWHERE\\b')).toBeNull(); + expect(validateRegex('[A-Z]{3,}')).toBeNull(); + }); + + it('rejects empty pattern', () => { + expect(validateRegex('')).not.toBeNull(); + }); + + it('rejects patterns exceeding max length', () => { + expect(validateRegex('a'.repeat(101))).not.toBeNull(); + }); + + it('rejects nested quantifiers — catastrophic backtracking risk', () => { + expect(validateRegex('(a+)+')).not.toBeNull(); + expect(validateRegex('(a*)*')).not.toBeNull(); + expect(validateRegex('([a-z]+){2,}')).not.toBeNull(); + }); + + it('rejects quantified alternations where alternatives contain quantifiers (true ReDoS risk)', () => { + // Dangerous: alternatives themselves have quantifiers — can match same string many ways + expect(validateRegex('(a+|b+)*')).not.toBeNull(); + expect(validateRegex('(a{1,10}|b{1,10}){1,10}')).not.toBeNull(); + expect(validateRegex('(?:a+|b+){1,100}')).not.toBeNull(); + expect(validateRegex('(a{2}|b{3})+')).not.toBeNull(); + }); + + it('allows quantified alternations with fixed-length disjoint alternatives (safe)', () => { + // Safe: alternatives are fixed-length and disjoint — no ambiguous matching + expect(validateRegex('(foo|bar)+')).toBeNull(); + expect(validateRegex('(a|b|c)*')).toBeNull(); + expect(validateRegex('(GET|POST|PUT)+')).toBeNull(); + expect(validateRegex('(https?|ftp)://')).toBeNull(); + // ? is also safe (bounded zero-or-one) + expect(validateRegex('(?:a|b)*')).toBeNull(); + }); + + it('allows bounded quantifiers with ? (safe — zero-or-one cannot backtrack)', () => { + // ? is safe: it matches at most one time, so no catastrophic backtracking + expect(validateRegex('(ba|z|da|fi|c|k)?sh')).toBeNull(); + expect(validateRegex('(\\.\\w+)?')).toBeNull(); + }); + + it('rejects quantified backreferences — catastrophic backtracking risk', () => { + // (\w+)\1+ can catastrophically backtrack on strings like 'aaaaaaaaab' + // The guard checks for \[*+{] in the pattern + expect(validateRegex('(\\w+)\\1+')).not.toBeNull(); + expect(validateRegex('(\\w+)\\1*')).not.toBeNull(); + expect(validateRegex('(\\w+)\\1{2,}')).not.toBeNull(); + }); + + it('rejects invalid regex syntax', () => { + expect(validateRegex('[unclosed')).not.toBeNull(); + }); +}); + +// ── getCompiledRegex — LRU cache ────────────────────────────────────────────── + +describe('getCompiledRegex', () => { + it('returns a compiled RegExp for a valid pattern', () => { + const re = getCompiledRegex('^DROP', 'i'); + expect(re).toBeInstanceOf(RegExp); + expect(re!.test('drop table')).toBe(true); + }); + + it('returns null for an invalid pattern', () => { + expect(getCompiledRegex('[invalid(')).toBeNull(); + }); + + it('returns null for a ReDoS pattern', () => { + expect(getCompiledRegex('(a+)+')).toBeNull(); + }); + + it('returns null for invalid flag characters', () => { + expect(getCompiledRegex('hello', 'z')).toBeNull(); // z is not a valid JS flag + expect(getCompiledRegex('hello', 'ig!')).toBeNull(); + }); + + it('accepts valid flag characters', () => { + expect(getCompiledRegex('hello', 'i')).toBeInstanceOf(RegExp); + expect(getCompiledRegex('hello2', 'gi')).toBeInstanceOf(RegExp); + expect(getCompiledRegex('hello3', 'gims')).toBeInstanceOf(RegExp); + }); + + it('returns the same RegExp instance for the same pattern (cache hit)', () => { + const re1 = getCompiledRegex('cached-pattern'); + const re2 = getCompiledRegex('cached-pattern'); + expect(re1).toBe(re2); // same object reference + }); + + it('treats pattern+flags as a distinct cache key', () => { + const re1 = getCompiledRegex('hello', ''); + const re2 = getCompiledRegex('hello', 'i'); + expect(re1).not.toBe(re2); + }); + + it('cache key uses null-byte separator — no collision between pattern and flags', () => { + // Key format: `${pattern}\0${flags}`. Flags are always [gimsuy] so they + // can't contain \0. Verify that a pattern ending in 'i' with no flags + // does NOT collide with the same prefix with flag 'i'. + // pattern='foo\0' flags='' → key 'foo\0\0' + // pattern='foo' flags='' → key 'foo\0' (different length → no collision) + const reSuffix = getCompiledRegex('collision-test-i', ''); + const reFlag = getCompiledRegex('collision-test-', 'i'); + expect(reSuffix).not.toBe(reFlag); // distinct entries, not a cache collision + // Both should compile successfully + expect(reSuffix).toBeInstanceOf(RegExp); + expect(reFlag).toBeInstanceOf(RegExp); + }); + + it('handles 520 distinct patterns without error (LRU stays bounded)', () => { + // Adds more entries than REGEX_CACHE_MAX (500) to verify the eviction path + // runs without throwing and all returned values are valid RegExps. + // Note: getCompiledRegex is sync — no async interleaving concerns. + for (let i = 0; i < 520; i++) { + const re = getCompiledRegex(`lru-bound-test-[a-z]{${i + 1}}`); + expect(re).toBeInstanceOf(RegExp); + } + }); +}); diff --git a/src/__tests__/dlp.test.ts b/src/__tests__/dlp.test.ts index 0f31508..3c23466 100644 --- a/src/__tests__/dlp.test.ts +++ b/src/__tests__/dlp.test.ts @@ -1,5 +1,6 @@ -import { describe, it, expect } from 'vitest'; -import { scanArgs, DLP_PATTERNS } from '../dlp.js'; +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import fs from 'fs'; +import { scanArgs, scanFilePath, DLP_PATTERNS } from '../dlp.js'; // NOTE: All fake secret strings are built via concatenation so GitHub's secret // scanner doesn't flag this test file. The values are obviously fake (sequential @@ -181,8 +182,11 @@ describe('scanArgs — performance guards', () => { // ── All patterns export ─────────────────────────────────────────────────────── describe('DLP_PATTERNS export', () => { - it('exports at least 7 built-in patterns', () => { - expect(DLP_PATTERNS.length).toBeGreaterThanOrEqual(7); + it('exports at least 9 built-in patterns', () => { + // 9 patterns as of current implementation: + // AWS Key ID, GitHub Token, Slack Bot Token, OpenAI Key, Stripe Secret Key, + // Private Key PEM, GCP Service Account, NPM Auth Token, Bearer Token + expect(DLP_PATTERNS.length).toBeGreaterThanOrEqual(9); }); it('all patterns have name, regex, and severity', () => { @@ -193,3 +197,136 @@ describe('DLP_PATTERNS export', () => { } }); }); + +// ── scanFilePath — sensitive file path blocking ─────────────────────────────── + +// Typed alias to reduce repetition when accessing realpathSync.native +type RealpathWithNative = typeof fs.realpathSync & { native: (p: unknown) => string }; + +describe('scanFilePath — sensitive path blocking', () => { + // Save the original .native so afterEach can restore it precisely. + // vi.restoreAllMocks() only restores vi.spyOn spies — direct property + // assignments survive it, so we must restore manually to guarantee isolation. + const originalNative = (fs.realpathSync as RealpathWithNative).native; + + beforeEach(() => { + vi.spyOn(fs, 'realpathSync').mockImplementation((p) => String(p)); + // Mock realpathSync.native — called unconditionally in production (no existsSync pre-check) + (fs.realpathSync as RealpathWithNative).native = vi + .fn() + .mockImplementation((p: unknown) => String(p)); + }); + + afterEach(() => { + vi.restoreAllMocks(); + // Explicitly restore .native since restoreAllMocks() doesn't track it + (fs.realpathSync as RealpathWithNative).native = originalNative; + }); + + it('blocks access to SSH key files', () => { + const match = scanFilePath('/home/user/.ssh/id_rsa', '/'); + expect(match).not.toBeNull(); + expect(match!.patternName).toBe('Sensitive File Path'); + expect(match!.severity).toBe('block'); + }); + + it('blocks access to AWS credentials directory', () => { + const match = scanFilePath('/home/user/.aws/credentials', '/'); + expect(match).not.toBeNull(); + expect(match!.severity).toBe('block'); + }); + + it('blocks .env files', () => { + expect(scanFilePath('/project/.env', '/')).not.toBeNull(); + expect(scanFilePath('/project/.env.local', '/')).not.toBeNull(); + expect(scanFilePath('/project/.env.production', '/')).not.toBeNull(); + }); + + it('does NOT block .envoy or similar non-credential files', () => { + expect(scanFilePath('/project/.envoy-config', '/')).toBeNull(); + expect(scanFilePath('/project/environment.ts', '/')).toBeNull(); + }); + + it('blocks PEM certificate files', () => { + expect(scanFilePath('/certs/server.pem', '/')).not.toBeNull(); + expect(scanFilePath('/keys/private.key', '/')).not.toBeNull(); + }); + + it('blocks /etc/passwd and /etc/shadow', () => { + expect(scanFilePath('/etc/passwd', '/')).not.toBeNull(); + expect(scanFilePath('/etc/shadow', '/')).not.toBeNull(); + }); + + it('returns null for ordinary source files', () => { + expect(scanFilePath('src/app.ts', '/project')).toBeNull(); + expect(scanFilePath('README.md', '/project')).toBeNull(); + expect(scanFilePath('package.json', '/project')).toBeNull(); + }); + + it('returns null for empty or missing path', () => { + expect(scanFilePath('', '/project')).toBeNull(); + }); + + it('calls realpathSync.native unconditionally (no existsSync pre-check)', () => { + // native() is always called — existsSync guard removed to eliminate TOCTOU window + const nativeSpy = vi.mocked((fs.realpathSync as RealpathWithNative).native); + scanFilePath('/project/safe-looking-link.txt', '/project'); + expect(nativeSpy).toHaveBeenCalled(); + }); + + it('blocks when a symlink resolves to a sensitive path', () => { + (fs.realpathSync as RealpathWithNative).native = vi + .fn() + .mockReturnValue('/home/user/.ssh/id_rsa'); + const match = scanFilePath('/project/totally-safe-link', '/project'); + expect(match).not.toBeNull(); + expect(match!.severity).toBe('block'); + }); + + it('does NOT block when a symlink resolves to a safe path', () => { + (fs.realpathSync as RealpathWithNative).native = vi.fn().mockReturnValue('/project/src/app.ts'); + expect(scanFilePath('/project/link-to-app', '/project')).toBeNull(); + }); + + it('blocks path traversal that resolves outside project root to a sensitive path', () => { + // ../../.ssh/id_rsa from /project/src resolves to /home/user/.ssh/id_rsa + (fs.realpathSync as RealpathWithNative).native = vi + .fn() + .mockReturnValue('/home/user/.ssh/id_rsa'); + const match = scanFilePath('../../.ssh/id_rsa', '/project/src'); + expect(match).not.toBeNull(); + expect(match!.severity).toBe('block'); + }); + + it('treats ENOENT as safe — new file being written is not a symlink', () => { + (fs.realpathSync as RealpathWithNative).native = vi.fn().mockImplementation(() => { + throw Object.assign(new Error('ENOENT'), { code: 'ENOENT' }); + }); + // Non-existent file: safe, cannot be a symlink pointing anywhere + expect(scanFilePath('/project/src/new-file.ts', '/project')).toBeNull(); + }); + + it('is fail-closed when native throws with a non-ENOENT error', () => { + // EACCES, unexpected errors, or TOCTOU remnants → block immediately + (fs.realpathSync as RealpathWithNative).native = vi.fn().mockImplementation(() => { + throw Object.assign(new Error('EACCES'), { code: 'EACCES' }); + }); + expect(() => scanFilePath('/project/src/app.ts', '/project')).not.toThrow(); + const match = scanFilePath('/project/src/app.ts', '/project'); + expect(match).not.toBeNull(); + expect(match!.severity).toBe('block'); + }); + + it('blocks (fail-closed) on TOCTOU — safe-looking symlink pointing to sensitive file', () => { + // The attack: /project/harmless-config.ts → /home/user/.ssh/id_rsa + // native() throws because file was deleted between check and resolve + (fs.realpathSync as RealpathWithNative).native = vi.fn().mockImplementation(() => { + throw Object.assign(new Error('ENOENT'), { code: 'ENOENT' }); + }); + // ENOENT on a path that looks safe → treated as safe (not a TOCTOU attack) + // The attack scenario requires the file to EXIST (so attacker can create symlink) + // In that case native() would succeed and return the sensitive resolved path + // This test confirms: if file is deleted mid-race, we don't block unnecessarily + expect(scanFilePath('/project/harmless-config.ts', '/project')).toBeNull(); + }); +}); diff --git a/src/__tests__/undo.test.ts b/src/__tests__/undo.test.ts index bda96ef..17bdbff 100644 --- a/src/__tests__/undo.test.ts +++ b/src/__tests__/undo.test.ts @@ -1,17 +1,26 @@ import { describe, it, expect, vi, beforeEach } from 'vitest'; import fs from 'fs'; import os from 'os'; +import path from 'path'; + +// Typed alias for fs.realpathSync.native — avoids repeated `unknown` casts and +// matches the same alias used in dlp.test.ts for consistency. +type RealpathWithNative = typeof fs.realpathSync & { native: (p: unknown) => string }; // ── Mock child_process BEFORE importing undo (hoisted by vitest) ───────────── -vi.mock('child_process', () => ({ spawnSync: vi.fn() })); +vi.mock('child_process', () => ({ + spawnSync: vi.fn(), + spawn: vi.fn().mockReturnValue({ unref: vi.fn() }), +})); -import { spawnSync } from 'child_process'; +import { spawnSync, spawn } from 'child_process'; import { createShadowSnapshot, getLatestSnapshot, getSnapshotHistory, computeUndoDiff, applyUndo, + getShadowRepoDir, } from '../undo.js'; // ── Filesystem mocks (module-level — NOT restored between tests) ────────────── @@ -20,6 +29,16 @@ vi.spyOn(fs, 'readFileSync').mockReturnValue(''); const writeSpy = vi.spyOn(fs, 'writeFileSync').mockImplementation(() => undefined); vi.spyOn(fs, 'mkdirSync').mockImplementation(() => undefined); vi.spyOn(fs, 'unlinkSync').mockImplementation(() => undefined); +// Mock BOTH realpathSync and realpathSync.native — production code calls .native +// for symlink-escape prevention. Mocking only the base function would leave +// the security path untested. +vi.spyOn(fs, 'realpathSync').mockImplementation((p) => String(p)); +(fs.realpathSync as RealpathWithNative).native = vi + .fn() + .mockImplementation((p: unknown) => String(p)); +vi.spyOn(fs, 'readdirSync').mockReturnValue([]); +vi.spyOn(fs, 'statSync').mockReturnValue({ mtimeMs: 0 } as ReturnType); +vi.spyOn(fs, 'rmSync').mockImplementation(() => undefined); vi.spyOn(os, 'homedir').mockReturnValue('/mock/home'); vi.spyOn(process, 'cwd').mockReturnValue('/mock/project'); @@ -29,60 +48,76 @@ const byStackPath = ([p]: Parameters) => String(p).endsWith('snapshots.json'); const byLatestPath = ([p]: Parameters) => String(p).endsWith('undo_latest.txt'); +const byExcludePath = ([p]: Parameters) => String(p).endsWith('exclude'); const mockSpawn = vi.mocked(spawnSync); +// ── Test helpers ────────────────────────────────────────────────────────────── + +/** Constructs a typed spawnSync return value, reducing cast boilerplate. */ +function spawnResult(stdout = '', status = 0): ReturnType { + return { + status, + stdout: Buffer.from(stdout), + stderr: Buffer.from(''), + } as ReturnType; +} + +/** + * Mocks spawnSync so all git operations succeed. Handles rev-parse --git-dir + * (shadow repo health check), config, add, write-tree, and commit-tree. + * Uses `--git-dir` to distinguish the health-check rev-parse from other + * rev-parse variants (e.g. rev-parse HEAD) so those don't collapse. + */ function mockGitSuccess(treeHash = 'abc123tree', commitHash = 'def456commit') { mockSpawn.mockImplementation((_cmd, args) => { const a = (args ?? []) as string[]; - if (a.includes('add')) - return { status: 0, stdout: Buffer.from(''), stderr: Buffer.from('') } as ReturnType< - typeof spawnSync - >; - if (a.includes('write-tree')) - return { - status: 0, - stdout: Buffer.from(treeHash + '\n'), - stderr: Buffer.from(''), - } as ReturnType; - if (a.includes('commit-tree')) - return { - status: 0, - stdout: Buffer.from(commitHash + '\n'), - stderr: Buffer.from(''), - } as ReturnType; - return { status: 0, stdout: Buffer.from(''), stderr: Buffer.from('') } as ReturnType< - typeof spawnSync - >; - }); -} - -function withStack(entries: object[]) { - vi.mocked(fs.existsSync).mockImplementation((p) => String(p).endsWith('snapshots.json')); - vi.mocked(fs.readFileSync).mockImplementation((p) => { - if (String(p).endsWith('snapshots.json')) return JSON.stringify(entries); - throw new Error('not found'); + // Only match the shadow-repo health-check: `git rev-parse --git-dir` + if (a.includes('rev-parse') && a.includes('--git-dir')) return spawnResult('/shadow\n'); + if (a.includes('config') || a.includes('init')) return spawnResult(); + if (a.includes('add')) return spawnResult(); + if (a.includes('write-tree')) return spawnResult(treeHash + '\n'); + if (a.includes('commit-tree')) return spawnResult(commitHash + '\n'); + return spawnResult(); }); } -function withGitRepo(includeStackFile = false) { - const gitDir = '/mock/project/.git'; +/** + * Sets up fs mocks to simulate a healthy shadow repo for cwd=/mock/project. + */ +function withShadowRepo(includeStackFile = false) { + // readdirSync → [] simulates no orphaned index_* files in the shadow dir. + // The shadow repo existence check uses `git rev-parse --git-dir` (spawnSync), + // NOT readdirSync, so this empty return is correct and doesn't affect init logic. + vi.mocked(fs.readdirSync).mockReturnValue([]); vi.mocked(fs.existsSync).mockImplementation((p) => { const s = String(p); - if (s === gitDir) return true; if (includeStackFile && s.endsWith('snapshots.json')) return true; return false; }); + vi.mocked(fs.readFileSync).mockImplementation((p) => { + const s = String(p); + // normalizeCwdForHash('/mock/project') = '/mock/project' (realpathSync mock is identity) + if (s.endsWith('project-path.txt')) return '/mock/project'; + if (s.endsWith('snapshots.json') && includeStackFile) return '[]'; + return ''; + }); } beforeEach(() => { vi.clearAllMocks(); - // Re-apply default mock implementations after clearAllMocks vi.mocked(fs.existsSync).mockReturnValue(false); vi.mocked(fs.readFileSync).mockReturnValue(''); vi.mocked(fs.writeFileSync).mockImplementation(() => undefined); vi.mocked(fs.mkdirSync).mockImplementation(() => undefined); vi.mocked(fs.unlinkSync).mockImplementation(() => undefined); + vi.mocked(fs.realpathSync).mockImplementation((p) => String(p)); + vi.mocked((fs.realpathSync as RealpathWithNative).native).mockImplementation((p: unknown) => + String(p) + ); + vi.mocked(fs.readdirSync).mockReturnValue([]); + vi.mocked(fs.statSync).mockReturnValue({ mtimeMs: 0 } as ReturnType); + vi.mocked(fs.rmSync).mockImplementation(() => undefined); }); // ── getSnapshotHistory ──────────────────────────────────────────────────────── @@ -97,7 +132,11 @@ describe('getSnapshotHistory', () => { const entries = [ { hash: 'abc', tool: 'edit', argsSummary: 'src/app.ts', cwd: '/proj', timestamp: 1000 }, ]; - withStack(entries); + vi.mocked(fs.existsSync).mockImplementation((p) => String(p).endsWith('snapshots.json')); + vi.mocked(fs.readFileSync).mockImplementation((p) => { + if (String(p).endsWith('snapshots.json')) return JSON.stringify(entries); + throw new Error('not found'); + }); expect(getSnapshotHistory()).toEqual(entries); }); @@ -121,36 +160,83 @@ describe('getLatestSnapshot', () => { { hash: 'first', tool: 'write', argsSummary: 'a.ts', cwd: '/p', timestamp: 1000 }, { hash: 'second', tool: 'edit', argsSummary: 'b.ts', cwd: '/p', timestamp: 2000 }, ]; - withStack(entries); + vi.mocked(fs.existsSync).mockImplementation((p) => String(p).endsWith('snapshots.json')); + vi.mocked(fs.readFileSync).mockImplementation((p) => { + if (String(p).endsWith('snapshots.json')) return JSON.stringify(entries); + return ''; + }); expect(getLatestSnapshot()?.hash).toBe('second'); }); }); +// ── getShadowRepoDir ────────────────────────────────────────────────────────── + +describe('getShadowRepoDir', () => { + it('returns a path under ~/.node9/snapshots/', () => { + const dir = getShadowRepoDir('/mock/project'); + expect(dir).toContain('/mock/home/.node9/snapshots/'); + }); + + it('returns the same dir for the same cwd', () => { + expect(getShadowRepoDir('/mock/project')).toBe(getShadowRepoDir('/mock/project')); + }); + + it('returns different dirs for different cwds', () => { + expect(getShadowRepoDir('/mock/project')).not.toBe(getShadowRepoDir('/mock/other')); + }); + + it('uses a 16-char hex hash', () => { + const dir = getShadowRepoDir('/mock/project'); + const hash = path.basename(dir); + expect(hash).toMatch(/^[0-9a-f]{16}$/); + }); +}); + // ── createShadowSnapshot ────────────────────────────────────────────────────── describe('createShadowSnapshot', () => { - it('returns null when .git directory does not exist', async () => { - vi.mocked(fs.existsSync).mockReturnValue(false); + it('works for non-git directories (no .git required)', async () => { + withShadowRepo(true); + mockGitSuccess('tree111', 'commit222'); + const result = await createShadowSnapshot('edit', { file_path: 'src/app.ts' }); - expect(result).toBeNull(); + expect(result).toBe('commit222'); }); - it('returns null when git write-tree fails', async () => { - withGitRepo(false); + it('returns null when shadow repo init fails (git not available)', async () => { + vi.mocked(fs.readdirSync).mockReturnValue([]); mockSpawn.mockReturnValue({ status: 1, stdout: Buffer.from(''), - stderr: Buffer.from(''), + stderr: Buffer.from('error'), } as ReturnType); + + const result = await createShadowSnapshot('edit', { file_path: 'src/app.ts' }); + expect(result).toBeNull(); + }); + + it('returns null when git write-tree fails', async () => { + withShadowRepo(false); + mockSpawn.mockImplementation((_cmd, args) => { + const a = (args ?? []) as string[]; + if (a.includes('rev-parse') && a.includes('--git-dir')) + return { + status: 0, + stdout: Buffer.from('/shadow\n'), + stderr: Buffer.from(''), + } as ReturnType; + return { + status: 1, + stdout: Buffer.from(''), + stderr: Buffer.from(''), + } as ReturnType; + }); const result = await createShadowSnapshot('edit', {}); expect(result).toBeNull(); }); it('returns commit hash and writes stack on success', async () => { - withGitRepo(true); - vi.mocked(fs.readFileSync).mockImplementation((p) => - String(p).endsWith('snapshots.json') ? '[]' : '' - ); + withShadowRepo(true); mockGitSuccess('tree111', 'commit222'); const result = await createShadowSnapshot('edit', { file_path: 'src/main.ts' }); @@ -166,10 +252,7 @@ describe('createShadowSnapshot', () => { }); it('also writes backward-compat undo_latest.txt', async () => { - withGitRepo(true); - vi.mocked(fs.readFileSync).mockImplementation((p) => - String(p).endsWith('snapshots.json') ? '[]' : '' - ); + withShadowRepo(true); mockGitSuccess('tree111', 'commit333'); await createShadowSnapshot('write', { file_path: 'x.ts' }); @@ -180,7 +263,7 @@ describe('createShadowSnapshot', () => { }); it('caps the stack at MAX_SNAPSHOTS (10)', async () => { - withGitRepo(true); + withShadowRepo(true); const existing = Array.from({ length: 10 }, (_, i) => ({ hash: `hash${i}`, tool: 'edit', @@ -188,9 +271,13 @@ describe('createShadowSnapshot', () => { cwd: '/p', timestamp: i * 1000, })); - vi.mocked(fs.readFileSync).mockImplementation((p) => - String(p).endsWith('snapshots.json') ? JSON.stringify(existing) : '' - ); + vi.mocked(fs.readFileSync).mockImplementation((p) => { + const s = String(p); + if (s.endsWith('project-path.txt')) return '/mock/project'; + if (s.endsWith('snapshots.json')) return JSON.stringify(existing); + return ''; + }); + vi.mocked(fs.existsSync).mockImplementation((p) => String(p).endsWith('snapshots.json')); mockGitSuccess('treeX', 'commitX'); await createShadowSnapshot('edit', { file_path: 'new.ts' }); @@ -203,10 +290,7 @@ describe('createShadowSnapshot', () => { }); it('extracts argsSummary from command field when no file_path', async () => { - withGitRepo(true); - vi.mocked(fs.readFileSync).mockImplementation((p) => - String(p).endsWith('snapshots.json') ? '[]' : '' - ); + withShadowRepo(true); mockGitSuccess('treeA', 'commitA'); await createShadowSnapshot('bash', { command: 'npm run build --production' }); @@ -217,10 +301,7 @@ describe('createShadowSnapshot', () => { }); it('extracts argsSummary from sql field', async () => { - withGitRepo(true); - vi.mocked(fs.readFileSync).mockImplementation((p) => - String(p).endsWith('snapshots.json') ? '[]' : '' - ); + withShadowRepo(true); mockGitSuccess('treeB', 'commitB'); await createShadowSnapshot('query', { sql: 'SELECT * FROM users' }); @@ -229,12 +310,295 @@ describe('createShadowSnapshot', () => { const written = JSON.parse(String(writeCall![1])); expect(written[0].argsSummary).toBe('SELECT * FROM users'); }); + + it('uses GIT_DIR (shadow) and GIT_WORK_TREE for all git operations', async () => { + withShadowRepo(true); + mockGitSuccess('treeX', 'commitX'); + + await createShadowSnapshot('edit', { file_path: 'src/app.ts' }); + + // Find the git add call and verify shadow env + const addCall = mockSpawn.mock.calls.find(([, args]) => (args as string[]).includes('add')); + expect(addCall).toBeDefined(); + const addEnv = addCall![2]?.env as Record; + expect(addEnv?.GIT_DIR).toContain('.node9/snapshots'); + expect(addEnv?.GIT_WORK_TREE).toBe('/mock/project'); + // Index file must be inside shadow dir — never in the user's .git + expect(addEnv?.GIT_INDEX_FILE).toContain('.node9/snapshots'); + expect(addEnv?.GIT_INDEX_FILE).not.toContain('/.git/'); + }); + + it('cleans up the per-invocation index file after snapshot (finally block)', async () => { + withShadowRepo(true); + mockGitSuccess('treeX', 'commitX'); + + await createShadowSnapshot('edit', {}); + + // unlinkSync should be called for the index file (inside shadow dir) + const unlinkCalls = vi.mocked(fs.unlinkSync).mock.calls.map(([p]) => String(p)); + expect(unlinkCalls.some((p) => p.includes('index_'))).toBe(true); + }); + + it('cleans up index file even when write-tree fails (finally block on error path)', async () => { + withShadowRepo(false); + mockSpawn.mockImplementation((_cmd, args) => { + const a = (args ?? []) as string[]; + if (a.includes('rev-parse') && a.includes('--git-dir')) return spawnResult('/shadow\n'); + if (a.includes('write-tree')) return spawnResult('', 1); // simulate failure + return spawnResult(); + }); + + const result = await createShadowSnapshot('edit', {}); + expect(result).toBeNull(); // snapshot failed + + // Index file must still be cleaned up — the finally block must fire on failure + const unlinkCalls = vi.mocked(fs.unlinkSync).mock.calls.map(([p]) => String(p)); + expect(unlinkCalls.some((p) => p.includes('index_'))).toBe(true); + }); + + it('calls unref() on the git gc background process', async () => { + // GC fires when stack.length % 5 === 0 (checked before MAX_SNAPSHOTS eviction). + // 4 existing + 1 new = 5 → 5 % 5 === 0 → GC fires. + withShadowRepo(true); + const existing = Array.from({ length: 4 }, (_, i) => ({ + hash: `hash${i}`, + tool: 'edit', + argsSummary: `f${i}.ts`, + cwd: '/p', + timestamp: i * 1000, + })); + vi.mocked(fs.readFileSync).mockImplementation((p) => { + const s = String(p); + if (s.endsWith('project-path.txt')) return '/mock/project'; + if (s.endsWith('snapshots.json')) return JSON.stringify(existing); + return ''; + }); + vi.mocked(fs.existsSync).mockImplementation((p) => String(p).endsWith('snapshots.json')); + mockGitSuccess('treeGC', 'commitGC'); + + await createShadowSnapshot('edit', {}); + + // spawn (not spawnSync) should have been called for gc --auto + const mockSpawnFn = vi.mocked(spawn); + expect(mockSpawnFn).toHaveBeenCalled(); + const gcCall = mockSpawnFn.mock.calls.find(([, args]) => (args as string[]).includes('gc')); + expect(gcCall).toBeDefined(); + // unref() must be called so gc doesn't block Node.js exit + const returnVal = mockSpawnFn.mock.results.find( + (r) => r.type === 'return' && r.value?.unref + )?.value; + expect(returnVal?.unref).toHaveBeenCalled(); + }); + + it('uses a unique GIT_INDEX_FILE per concurrent invocation', async () => { + withShadowRepo(true); + mockGitSuccess('treeA', 'commitA'); + + // Run two snapshots back-to-back (synchronous mock — simulates concurrent PIDs + // by checking the index file names are pid_timestamp scoped) + const [r1, r2] = await Promise.all([ + createShadowSnapshot('edit', { file_path: 'a.ts' }), + createShadowSnapshot('edit', { file_path: 'b.ts' }), + ]); + + expect(r1).not.toBeNull(); + expect(r2).not.toBeNull(); + + // Collect all GIT_INDEX_FILE values used across all git-add calls + const indexFiles = mockSpawn.mock.calls + .filter(([, args]) => (args as string[]).includes('add')) + .map(([, , opts]) => (opts?.env as Record)?.GIT_INDEX_FILE) + .filter(Boolean); + + // All index files must be inside the shadow dir, never in user's .git + for (const f of indexFiles) { + expect(f).toContain('.node9/snapshots'); + expect(f).not.toContain('/.git/'); + } + }); +}); + +// ── ensureShadowRepo (via createShadowSnapshot) ─────────────────────────────── + +describe('ensureShadowRepo', () => { + it('initializes shadow repo when it does not exist (rev-parse fails)', async () => { + vi.mocked(fs.readdirSync).mockReturnValue([]); + vi.mocked(fs.readFileSync).mockImplementation((p) => + String(p).endsWith('snapshots.json') ? '[]' : '' + ); + vi.mocked(fs.existsSync).mockImplementation((p) => String(p).endsWith('snapshots.json')); + + mockSpawn.mockImplementation((_cmd, args) => { + const a = (args ?? []) as string[]; + if (a.includes('rev-parse') && a.includes('--git-dir')) + return { + status: 1, + stdout: Buffer.from(''), + stderr: Buffer.from(''), + } as ReturnType; + // init, config, add, write-tree, commit-tree all succeed + if (a.includes('write-tree')) + return { + status: 0, + stdout: Buffer.from('tree123\n'), + stderr: Buffer.from(''), + } as ReturnType; + if (a.includes('commit-tree')) + return { + status: 0, + stdout: Buffer.from('commit123\n'), + stderr: Buffer.from(''), + } as ReturnType; + return { + status: 0, + stdout: Buffer.from(''), + stderr: Buffer.from(''), + } as ReturnType; + }); + + const result = await createShadowSnapshot('edit', {}); + expect(result).toBe('commit123'); + + const initCall = mockSpawn.mock.calls.find(([, args]) => (args as string[]).includes('init')); + expect(initCall).toBeDefined(); + expect(initCall![1] as string[]).toContain('--bare'); + }); + + it('skips init when shadow repo is healthy and path matches', async () => { + withShadowRepo(true); + mockGitSuccess('tree1', 'commit1'); + + await createShadowSnapshot('edit', {}); + + const initCall = mockSpawn.mock.calls.find(([, args]) => (args as string[]).includes('init')); + expect(initCall).toBeUndefined(); + }); + + it('reinitializes when project-path.txt does not match (collision/rename)', async () => { + vi.mocked(fs.readdirSync).mockReturnValue([]); + vi.mocked(fs.existsSync).mockImplementation((p) => String(p).endsWith('snapshots.json')); + vi.mocked(fs.readFileSync).mockImplementation((p) => { + const s = String(p); + // Simulate stored path being different (collision/rename) + if (s.endsWith('project-path.txt')) return '/some/other/project'; + if (s.endsWith('snapshots.json')) return '[]'; + return ''; + }); + + mockSpawn.mockImplementation((_cmd, args) => { + const a = (args ?? []) as string[]; + if (a.includes('rev-parse') && a.includes('--git-dir')) + return { + status: 0, + stdout: Buffer.from('/shadow\n'), + stderr: Buffer.from(''), + } as ReturnType; + if (a.includes('write-tree')) + return { + status: 0, + stdout: Buffer.from('treeX\n'), + stderr: Buffer.from(''), + } as ReturnType; + if (a.includes('commit-tree')) + return { + status: 0, + stdout: Buffer.from('commitX\n'), + stderr: Buffer.from(''), + } as ReturnType; + return { + status: 0, + stdout: Buffer.from(''), + stderr: Buffer.from(''), + } as ReturnType; + }); + + await createShadowSnapshot('edit', {}); + + // rmSync should have been called to blow away the mismatched shadow dir + expect(vi.mocked(fs.rmSync)).toHaveBeenCalled(); + // And init should have been called to reinitialize + const initCall = mockSpawn.mock.calls.find(([, args]) => (args as string[]).includes('init')); + expect(initCall).toBeDefined(); + }); + + it('sets core.untrackedCache and core.fsmonitor on init', async () => { + vi.mocked(fs.readdirSync).mockReturnValue([]); + vi.mocked(fs.readFileSync).mockImplementation((p) => + String(p).endsWith('snapshots.json') ? '[]' : '' + ); + vi.mocked(fs.existsSync).mockImplementation((p) => String(p).endsWith('snapshots.json')); + + mockSpawn.mockImplementation((_cmd, args) => { + const a = (args ?? []) as string[]; + if (a.includes('rev-parse') && a.includes('--git-dir')) + return { status: 1, stdout: Buffer.from(''), stderr: Buffer.from('') } as ReturnType< + typeof spawnSync + >; + if (a.includes('write-tree')) + return { + status: 0, + stdout: Buffer.from('tree\n'), + stderr: Buffer.from(''), + } as ReturnType; + if (a.includes('commit-tree')) + return { + status: 0, + stdout: Buffer.from('commit\n'), + stderr: Buffer.from(''), + } as ReturnType; + return { status: 0, stdout: Buffer.from(''), stderr: Buffer.from('') } as ReturnType< + typeof spawnSync + >; + }); + + await createShadowSnapshot('edit', {}); + + const configCalls = mockSpawn.mock.calls.filter(([, args]) => + (args as string[]).includes('config') + ); + const allConfigArgs = configCalls.flatMap(([, a]) => a as string[]); + expect(allConfigArgs).toContain('core.untrackedCache'); + expect(allConfigArgs).toContain('core.fsmonitor'); + }); +}); + +// ── writeShadowExcludes (via createShadowSnapshot) ──────────────────────────── + +describe('writeShadowExcludes', () => { + it('always writes .git and .node9 into info/exclude', async () => { + withShadowRepo(true); + mockGitSuccess(); + + await createShadowSnapshot('edit', {}, ['node_modules', 'dist']); + + const excludeWrite = writeSpy.mock.calls.find(byExcludePath); + expect(excludeWrite).toBeDefined(); + const content = String(excludeWrite![1]); + expect(content).toContain('.git'); + expect(content).toContain('.node9'); + expect(content).toContain('node_modules'); + expect(content).toContain('dist'); + }); + + it('excludes .git and .node9 even when ignorePaths is empty', async () => { + withShadowRepo(true); + mockGitSuccess(); + + await createShadowSnapshot('edit', {}); + + const excludeWrite = writeSpy.mock.calls.find(byExcludePath); + expect(excludeWrite).toBeDefined(); + const content = String(excludeWrite![1]); + expect(content).toContain('.git'); + expect(content).toContain('.node9'); + }); }); // ── computeUndoDiff ─────────────────────────────────────────────────────────── describe('computeUndoDiff', () => { it('returns null when git diff --stat is empty (no changes)', () => { + vi.mocked(fs.readdirSync).mockReturnValue([]); mockSpawn.mockReturnValue({ status: 0, stdout: Buffer.from(''), @@ -244,6 +608,7 @@ describe('computeUndoDiff', () => { }); it('returns null when git diff fails', () => { + vi.mocked(fs.readdirSync).mockReturnValue([]); mockSpawn.mockReturnValue({ status: 1, stdout: Buffer.from(''), @@ -253,13 +618,22 @@ describe('computeUndoDiff', () => { }); it('strips git header lines (diff --git, index) from output', () => { + vi.mocked(fs.readdirSync).mockReturnValue([]); mockSpawn .mockReturnValueOnce({ + // rev-parse (buildGitEnv) + status: 0, + stdout: Buffer.from('/shadow\n'), + stderr: Buffer.from(''), + } as ReturnType) + .mockReturnValueOnce({ + // diff --stat status: 0, stdout: Buffer.from('1 file changed'), stderr: Buffer.from(''), } as ReturnType) .mockReturnValueOnce({ + // diff status: 0, stdout: Buffer.from( 'diff --git a/foo.ts b/foo.ts\nindex abc..def 100644\n--- a/foo.ts\n+++ b/foo.ts\n@@ -1,3 +1,3 @@\n-old\n+new\n' @@ -277,13 +651,22 @@ describe('computeUndoDiff', () => { }); it('returns null when diff output is empty after stripping headers', () => { + vi.mocked(fs.readdirSync).mockReturnValue([]); mockSpawn .mockReturnValueOnce({ + // rev-parse + status: 0, + stdout: Buffer.from('/shadow\n'), + stderr: Buffer.from(''), + } as ReturnType) + .mockReturnValueOnce({ + // diff --stat status: 0, stdout: Buffer.from('1 file changed'), stderr: Buffer.from(''), } as ReturnType) .mockReturnValueOnce({ + // diff status: 0, stdout: Buffer.from( 'diff --git a/foo.ts b/foo.ts\nindex abc..def 100644\nBinary files differ\n' @@ -291,8 +674,37 @@ describe('computeUndoDiff', () => { stderr: Buffer.from(''), } as ReturnType); + expect(computeUndoDiff('abc123', '/mock/project')).toBeNull(); + }); + + it('falls back to ambient git (no GIT_DIR) for old hashes when shadow repo is absent', () => { + vi.mocked(fs.readdirSync).mockReturnValue([]); + mockSpawn + .mockReturnValueOnce({ + // rev-parse fails → shadow absent → legacy env + status: 1, + stdout: Buffer.from(''), + stderr: Buffer.from(''), + } as ReturnType) + .mockReturnValueOnce({ + // diff --stat (legacy) + status: 0, + stdout: Buffer.from('2 files changed'), + stderr: Buffer.from(''), + } as ReturnType) + .mockReturnValueOnce({ + // diff (legacy) + status: 0, + stdout: Buffer.from('--- a/foo.ts\n+++ b/foo.ts\n-old\n+new\n'), + stderr: Buffer.from(''), + } as ReturnType); + const result = computeUndoDiff('abc123', '/mock/project'); - expect(result).toBeNull(); + expect(result).not.toBeNull(); + + // Verify no GIT_DIR in the diff call's env (legacy path) + const diffCall = mockSpawn.mock.calls[2]; + expect((diffCall?.[2]?.env as Record)?.GIT_DIR).toBeUndefined(); }); }); @@ -300,6 +712,7 @@ describe('computeUndoDiff', () => { describe('applyUndo', () => { it('returns false when git restore fails', () => { + vi.mocked(fs.readdirSync).mockReturnValue([]); mockSpawn.mockReturnValue({ status: 1, stdout: Buffer.from(''), @@ -309,8 +722,15 @@ describe('applyUndo', () => { }); it('returns true when restore succeeds and file lists match', () => { + vi.mocked(fs.readdirSync).mockReturnValue([]); mockSpawn.mockImplementation((_cmd, args) => { const a = (args ?? []) as string[]; + if (a.includes('rev-parse') && a.includes('--git-dir')) + return { + status: 0, + stdout: Buffer.from('/shadow\n'), + stderr: Buffer.from(''), + } as ReturnType; if (a.includes('restore')) return { status: 0, stdout: Buffer.from(''), stderr: Buffer.from('') } as ReturnType< typeof spawnSync @@ -337,8 +757,15 @@ describe('applyUndo', () => { it('deletes files that exist in working tree but not in snapshot', () => { vi.mocked(fs.existsSync).mockImplementation((p) => String(p).includes('extra.ts')); + vi.mocked(fs.readdirSync).mockReturnValue([]); mockSpawn.mockImplementation((_cmd, args) => { const a = (args ?? []) as string[]; + if (a.includes('rev-parse') && a.includes('--git-dir')) + return { + status: 0, + stdout: Buffer.from('/shadow\n'), + stderr: Buffer.from(''), + } as ReturnType; if (a.includes('restore')) return { status: 0, stdout: Buffer.from(''), stderr: Buffer.from('') } as ReturnType< typeof spawnSync @@ -367,4 +794,30 @@ describe('applyUndo', () => { const deleted = vi.mocked(fs.unlinkSync).mock.calls.map(([p]) => String(p)); expect(deleted.some((p) => p.includes('extra.ts'))).toBe(true); }); + + it('uses shadow GIT_DIR for restore and ls-tree', () => { + vi.mocked(fs.readdirSync).mockReturnValue([]); + mockSpawn.mockImplementation((_cmd, args) => { + const a = (args ?? []) as string[]; + if (a.includes('rev-parse') && a.includes('--git-dir')) + return { + status: 0, + stdout: Buffer.from('/shadow\n'), + stderr: Buffer.from(''), + } as ReturnType; + return { status: 0, stdout: Buffer.from(''), stderr: Buffer.from('') } as ReturnType< + typeof spawnSync + >; + }); + + applyUndo('abc123', '/mock/project'); + + const restoreCall = mockSpawn.mock.calls.find(([, args]) => + (args as string[]).includes('restore') + ); + expect(restoreCall).toBeDefined(); + const restoreEnv = restoreCall![2]?.env as Record; + expect(restoreEnv?.GIT_DIR).toContain('.node9/snapshots'); + expect(restoreEnv?.GIT_WORK_TREE).toBe('/mock/project'); + }); }); diff --git a/src/cli.ts b/src/cli.ts index ee98a50..ce53655 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1229,7 +1229,7 @@ program // the state prior to this change. Snapshotting after (PostToolUse) // captures the changed state, making undo a no-op. if (shouldSnapshot(toolName, toolInput, config)) { - await createShadowSnapshot(toolName, toolInput); + await createShadowSnapshot(toolName, toolInput, config.policy.snapshot.ignorePaths); } // Pass to Headless authorization @@ -1368,7 +1368,7 @@ program // PostToolUse snapshot is a fallback for tools not covered by PreToolUse. // Uses the same configurable snapshot policy. if (shouldSnapshot(tool, {}, config)) { - await createShadowSnapshot(); + await createShadowSnapshot('unknown', {}, config.policy.snapshot.ignorePaths); } } catch { /* ignore */ diff --git a/src/core.ts b/src/core.ts index 2805b6b..2d57cd7 100644 --- a/src/core.ts +++ b/src/core.ts @@ -13,7 +13,7 @@ import { askNativePopup, sendDesktopNotification } from './ui/native'; import { computeRiskMetadata, RiskMetadata } from './context-sniper'; import { sanitizeConfig } from './config-schema'; import { readActiveShields, getShield } from './shields'; -import { scanArgs, type DlpMatch } from './dlp'; +import { scanArgs, scanFilePath, type DlpMatch } from './dlp'; // ── Feature file paths ──────────────────────────────────────────────────────── const PAUSED_FILE = path.join(os.homedir(), '.node9', 'PAUSED'); @@ -70,6 +70,115 @@ export function resumeNode9(): void { } catch {} } +// ── Regex Cache & ReDoS Protection ─────────────────────────────────────────── +const MAX_REGEX_LENGTH = 100; +const REGEX_CACHE_MAX = 500; +const regexCache = new Map(); + +/** + * Validates a user-supplied regex pattern against known ReDoS vectors. + * Returns null if valid, or an error string describing the problem. + */ +export function validateRegex(pattern: string): string | null { + if (!pattern) return 'Pattern is required'; + if (pattern.length > MAX_REGEX_LENGTH) return `Pattern exceeds max length of ${MAX_REGEX_LENGTH}`; + + // Structural balance (tracks escape sequences and char class scope) + let parens = 0, + brackets = 0, + isEscaped = false, + inCharClass = false; + for (let i = 0; i < pattern.length; i++) { + const char = pattern[i]; + if (isEscaped) { + isEscaped = false; + continue; + } + if (char === '\\') { + isEscaped = true; + continue; + } + if (char === '[' && !inCharClass) { + inCharClass = true; + brackets++; + continue; + } + if (char === ']' && inCharClass) { + inCharClass = false; + brackets--; + continue; + } + if (inCharClass) continue; + if (char === '(') parens++; + else if (char === ')') parens--; + } + if (parens !== 0) return 'Unbalanced parentheses'; + if (brackets !== 0) return 'Unbalanced brackets'; + + // ReDoS vectors — only flag + * { as dangerous outer quantifiers; ? (zero-or-one) is bounded and safe + if (/\([^)]*[*+{][^)]*\)[*+{]/.test(pattern)) + return 'Nested quantifiers are forbidden (ReDoS risk)'; + // Only reject quantified alternations when the alternatives themselves contain + // quantifiers — e.g. (a+|b+)* is dangerous, but (GET|POST)+ is safe because + // the alternatives are fixed-length and disjoint. + if ( + /\([^)]*[*+{][^)]*\|[^)]*\)[*+{]/.test(pattern) || + /\([^)]*\|[^)]*[*+{][^)]*\)[*+{]/.test(pattern) + ) + return 'Quantified alternations with internal quantifiers are forbidden (ReDoS risk)'; + if (/\\\d+[*+{]/.test(pattern)) return 'Quantified backreferences are forbidden (ReDoS risk)'; + + // Final compile check + try { + new RegExp(pattern); + } catch (e) { + return `Invalid regex syntax: ${(e as Error).message}`; + } + + return null; +} + +/** + * Compiles a regex with validation and LRU caching. + * Returns null if the pattern is invalid or dangerous. + */ +export function getCompiledRegex(pattern: string, flags = ''): RegExp | null { + // Validate flags before anything else — invalid flags (e.g. 'z') would throw + // inside new RegExp() and could leak debug info; reject them explicitly. + if (flags && !/^[gimsuy]+$/.test(flags)) { + if (process.env.NODE9_DEBUG === '1') console.error(`[Node9] Invalid regex flags: "${flags}"`); + return null; + } + const key = `${pattern}\0${flags}`; + if (regexCache.has(key)) { + // LRU bump: move to insertion-order end + const cached = regexCache.get(key)!; + regexCache.delete(key); + regexCache.set(key, cached); + return cached; + } + + const err = validateRegex(pattern); + if (err) { + if (process.env.NODE9_DEBUG === '1') + console.error(`[Node9] Regex blocked: ${err} — pattern: "${pattern}"`); + return null; + } + + try { + const re = new RegExp(pattern, flags); + if (regexCache.size >= REGEX_CACHE_MAX) { + const oldest = regexCache.keys().next().value; + if (oldest) regexCache.delete(oldest); + } + regexCache.set(key, re); + return re; + } catch (e) { + if (process.env.NODE9_DEBUG === '1') console.error(`[Node9] Regex compile failed:`, e); + return null; + } +} + // ── Trust Session helpers ───────────────────────────────────────────────────── function getActiveTrustSession(toolName: string): boolean { @@ -255,19 +364,16 @@ export function evaluateSmartConditions(args: unknown, rule: SmartRule): boolean return val !== null && cond.value ? !val.includes(cond.value) : true; case 'matches': { if (val === null || !cond.value) return false; - try { - return new RegExp(cond.value, cond.flags ?? '').test(val); - } catch { - return false; - } + const reM = getCompiledRegex(cond.value, cond.flags ?? ''); + if (!reM) return false; // invalid/dangerous pattern → fail closed + return reM.test(val); } case 'notMatches': { - if (val === null || !cond.value) return true; - try { - return !new RegExp(cond.value, cond.flags ?? '').test(val); - } catch { - return true; - } + if (!cond.value) return false; // no pattern → fail closed + if (val === null) return true; // field absent → condition passes (preserve original) + const reN = getCompiledRegex(cond.value, cond.flags ?? ''); + if (!reN) return false; // invalid/dangerous pattern → fail closed + return !reN.test(val); } case 'matchesGlob': return val !== null && cond.value ? pm.isMatch(val, cond.value) : false; @@ -1006,7 +1112,13 @@ export async function explainPolicy(toolName: string, args?: unknown): Promise) + : {}; + const filePathE = String(argsObjE.file_path ?? argsObjE.path ?? argsObjE.filename ?? ''); + const dlpMatch = + (filePathE ? scanFilePath(filePathE) : null) ?? (args !== undefined ? scanArgs(args) : null); if (dlpMatch) { steps.push({ name: 'DLP Content Scanner', @@ -1566,7 +1678,14 @@ async function _authorizeHeadlessCore( config.policy.dlp.enabled && (!isIgnoredTool(toolName) || config.policy.dlp.scanIgnoredTools) ) { - const dlpMatch: DlpMatch | null = scanArgs(args); + // P1-1/P1-2: Check file path first (blocks read attempts before content is returned, + // and resolves symlinks to prevent escape attacks). + const argsObj = + args && typeof args === 'object' && !Array.isArray(args) + ? (args as Record) + : {}; + const filePath = String(argsObj.file_path ?? argsObj.path ?? argsObj.filename ?? ''); + const dlpMatch: DlpMatch | null = (filePath ? scanFilePath(filePath) : null) ?? scanArgs(args); if (dlpMatch) { const dlpReason = `🚨 DATA LOSS PREVENTION: ${dlpMatch.patternName} detected in ` + diff --git a/src/dlp.ts b/src/dlp.ts index c007e0e..bab46f7 100644 --- a/src/dlp.ts +++ b/src/dlp.ts @@ -3,6 +3,9 @@ // Scans tool call arguments for known secret patterns before policy evaluation. // Returns only a redacted match object — the full secret never leaves this module. +import fs from 'fs'; +import path from 'path'; + export interface DlpMatch { patternName: string; fieldPath: string; @@ -27,9 +30,99 @@ export const DLP_PATTERNS: DlpPattern[] = [ regex: /-----BEGIN (?:RSA |EC |OPENSSH )?PRIVATE KEY-----/, severity: 'block', }, + // GCP service account JSON (detects the type field that uniquely identifies it) + { + name: 'GCP Service Account', + regex: /"type"\s*:\s*"service_account"/, + severity: 'block', + }, + // NPM auth token in .npmrc format + { + name: 'NPM Auth Token', + regex: /_authToken\s*=\s*[A-Za-z0-9_\-]{20,}/, + severity: 'block', + }, { name: 'Bearer Token', regex: /Bearer\s+[a-zA-Z0-9\-._~+/]+=*/i, severity: 'review' }, ]; +// ── Sensitive File Path Blocklist ───────────────────────────────────────────── +// Blocks access attempts to credential/key files before their content is read. +const SENSITIVE_PATH_PATTERNS: RegExp[] = [ + /[/\\]\.ssh[/\\]/i, + /[/\\]\.aws[/\\]/i, + /[/\\]\.config[/\\]gcloud[/\\]/i, + /[/\\]\.azure[/\\]/i, + /[/\\]\.kube[/\\]config$/i, + /[/\\]\.env($|\.)/i, // .env, .env.local, .env.production — not .envoy + /[/\\]\.git-credentials$/i, + /[/\\]\.npmrc$/i, + /[/\\]\.docker[/\\]config\.json$/i, + /[/\\][^/\\]+\.pem$/i, + /[/\\][^/\\]+\.key$/i, + /[/\\][^/\\]+\.p12$/i, + /[/\\][^/\\]+\.pfx$/i, + /^\/etc\/passwd$/, + /^\/etc\/shadow$/, + /^\/etc\/sudoers$/, + /[/\\]credentials\.json$/i, + /[/\\]id_rsa$/i, + /[/\\]id_ed25519$/i, + /[/\\]id_ecdsa$/i, +]; + +/** + * Checks whether a file path argument targets a sensitive credential file. + * Resolves symlinks (if the file exists) before checking, to prevent symlink + * escape attacks where a safe-looking path points to a protected file. + * + * Returns a DlpMatch if the path is sensitive, null if clean. + */ +export function scanFilePath(filePath: string, cwd = process.cwd()): DlpMatch | null { + if (!filePath) return null; + + let resolved: string; + try { + const absolute = path.resolve(cwd, filePath); + // Call native() unconditionally — no existsSync pre-check. + // Skipping existsSync eliminates the TOCTOU window between the check and + // the native() call. Missing files throw ENOENT, which is caught below and + // treated as unresolvable (safe — a non-existent file can't be read). + resolved = fs.realpathSync.native(absolute); + } catch (err: unknown) { + const code = (err as NodeJS.ErrnoException).code; + if (code === 'ENOENT' || code === 'ENOTDIR') { + // File doesn't exist yet (e.g. new file being written) — use raw path. + // A non-existent file can't be a symlink, so no symlink escape is possible. + resolved = path.resolve(cwd, filePath); + } else { + // Any other error (EACCES, unexpected throw, possible TOCTOU remnant) — + // fail-closed: block rather than risk allowing a sensitive file. + return { + patternName: 'Sensitive File Path', + fieldPath: 'file_path', + redactedSample: filePath, + severity: 'block', + }; + } + } + + // Normalise to forward slashes for cross-platform pattern matching + const normalised = resolved.replace(/\\/g, '/'); + + for (const pattern of SENSITIVE_PATH_PATTERNS) { + if (pattern.test(normalised)) { + return { + patternName: 'Sensitive File Path', + fieldPath: 'file_path', + redactedSample: filePath, // show original path in alert, not resolved + severity: 'block', + }; + } + } + + return null; +} + /** * Masks a matched secret: keeps 4-char prefix + 4-char suffix, replaces the * middle with asterisks. e.g. "AKIA1234567890ABCD" → "AKIA**********ABCD" diff --git a/src/undo.ts b/src/undo.ts index 44f95be..9137dd0 100644 --- a/src/undo.ts +++ b/src/undo.ts @@ -1,7 +1,11 @@ // src/undo.ts // Snapshot engine: creates lightweight git snapshots before AI file edits, // enabling single-command undo with full diff preview. -import { spawnSync } from 'child_process'; +// +// Uses an isolated shadow bare repo at ~/.node9/snapshots// +// so the user's .git is never touched. +import { spawnSync, spawn } from 'child_process'; +import crypto from 'crypto'; import fs from 'fs'; import path from 'path'; import os from 'os'; @@ -11,6 +15,7 @@ const SNAPSHOT_STACK_PATH = path.join(os.homedir(), '.node9', 'snapshots.json'); const UNDO_LATEST_PATH = path.join(os.homedir(), '.node9', 'undo_latest.txt'); const MAX_SNAPSHOTS = 10; +const GIT_TIMEOUT = 15_000; // 15s cap on any single git operation export interface SnapshotEntry { hash: string; @@ -37,7 +42,6 @@ function writeStack(stack: SnapshotEntry[]): void { function buildArgsSummary(tool: string, args: unknown): string { if (!args || typeof args !== 'object') return ''; const a = args as Record; - // Show the most useful single arg depending on tool type const filePath = a.file_path ?? a.path ?? a.filename; if (typeof filePath === 'string') return filePath; const cmd = a.command ?? a.cmd; @@ -47,58 +51,232 @@ function buildArgsSummary(tool: string, args: unknown): string { return tool; } +// ── Shadow Repo Helpers ─────────────────────────────────────────────────────── + +/** + * Normalizes a path for hashing: resolves symlinks, converts to forward slashes, + * lowercases on Windows for drive-letter consistency. + */ +function normalizeCwdForHash(cwd: string): string { + let normalized: string; + try { + normalized = fs.realpathSync(cwd); + } catch { + normalized = cwd; + } + normalized = normalized.replace(/\\/g, '/'); + if (process.platform === 'win32') normalized = normalized.toLowerCase(); + return normalized; +} + +/** + * Returns the path to the isolated shadow bare repo for a given project directory. + * Uses the first 16 hex chars of SHA-256(normalized_cwd) — 64 bits of entropy. + */ +export function getShadowRepoDir(cwd: string): string { + const hash = crypto + .createHash('sha256') + .update(normalizeCwdForHash(cwd)) + .digest('hex') + .slice(0, 16); + return path.join(os.homedir(), '.node9', 'snapshots', hash); +} + +/** + * Deletes per-invocation index files older than 60s left behind by hard-killed processes. + */ +function cleanOrphanedIndexFiles(shadowDir: string): void { + try { + const cutoff = Date.now() - 60_000; + for (const f of fs.readdirSync(shadowDir)) { + if (f.startsWith('index_')) { + const fp = path.join(shadowDir, f); + try { + if (fs.statSync(fp).mtimeMs < cutoff) fs.unlinkSync(fp); + } catch {} + } + } + } catch { + /* non-fatal — shadow dir may not exist yet */ + } +} + +/** + * Writes gitignore-style exclusions into the shadow repo's info/exclude. + * Always excludes .git and .node9 to prevent snapshotting internal git state + * (inception) or node9's own data directory. + */ +function writeShadowExcludes(shadowDir: string, ignorePaths: string[]): void { + const hardcoded = ['.git', '.node9']; + const lines = [...hardcoded, ...ignorePaths].join('\n'); + try { + fs.writeFileSync(path.join(shadowDir, 'info', 'exclude'), lines + '\n', 'utf8'); + } catch {} +} + +/** + * Ensures the shadow bare repo exists and is healthy. + * - Validates with `git rev-parse --git-dir` (reliable check) + * - Detects hash collisions and directory renames via project-path.txt + * - Auto-recovers from corruption by deleting and reinitializing + * - Sets performance config (untrackedCache, fsmonitor) on first init + * Returns false if git is unavailable or init fails. + */ +function ensureShadowRepo(shadowDir: string, cwd: string): boolean { + cleanOrphanedIndexFiles(shadowDir); + + const normalizedCwd = normalizeCwdForHash(cwd); + const shadowEnvBase = { ...process.env, GIT_DIR: shadowDir, GIT_WORK_TREE: cwd }; + + // Validate existing repo + const check = spawnSync('git', ['rev-parse', '--git-dir'], { + env: shadowEnvBase, + timeout: 3_000, + }); + + if (check.status === 0) { + const ptPath = path.join(shadowDir, 'project-path.txt'); + try { + const stored = fs.readFileSync(ptPath, 'utf8').trim(); + if (stored === normalizedCwd) return true; // healthy + // Mismatch — hash collision or directory renamed + if (process.env.NODE9_DEBUG === '1') + console.error( + `[Node9] Shadow repo path mismatch: stored="${stored}" expected="${normalizedCwd}" — reinitializing` + ); + fs.rmSync(shadowDir, { recursive: true, force: true }); + } catch { + // project-path.txt missing (pre-migration shadow repo) — write it and continue + try { + fs.writeFileSync(ptPath, normalizedCwd, 'utf8'); + } catch {} + return true; + } + } + + // Initialize new or re-initialize corrupted/mismatched shadow repo + try { + fs.mkdirSync(shadowDir, { recursive: true }); + } catch {} + + const init = spawnSync('git', ['init', '--bare', shadowDir], { timeout: 5_000 }); + if (init.status !== 0) { + if (process.env.NODE9_DEBUG === '1') + console.error('[Node9] git init --bare failed:', init.stderr?.toString()); + return false; + } + + // Performance config + const configFile = path.join(shadowDir, 'config'); + spawnSync('git', ['config', '--file', configFile, 'core.untrackedCache', 'true'], { + timeout: 3_000, + }); + spawnSync('git', ['config', '--file', configFile, 'core.fsmonitor', 'true'], { + timeout: 3_000, + }); + + // Write project-path.txt for auditability and collision detection + try { + fs.writeFileSync(path.join(shadowDir, 'project-path.txt'), normalizedCwd, 'utf8'); + } catch {} + + return true; +} + +/** + * Returns the git env to use for diff/undo operations on a given cwd. + * Prefers the shadow repo; falls back to ambient git (user's .git) for old + * hashes created before the shadow repo migration. + */ +function buildGitEnv(cwd: string): NodeJS.ProcessEnv { + const shadowDir = getShadowRepoDir(cwd); + const check = spawnSync('git', ['rev-parse', '--git-dir'], { + env: { ...process.env, GIT_DIR: shadowDir, GIT_WORK_TREE: cwd }, + timeout: 2_000, + }); + if (check.status === 0) { + return { ...process.env, GIT_DIR: shadowDir, GIT_WORK_TREE: cwd }; + } + // Legacy fallback: use ambient git context (user's .git or none) + return { ...process.env }; +} + +// ── Public API ──────────────────────────────────────────────────────────────── + /** * Creates a shadow snapshot and pushes metadata onto the stack. + * Works in any directory — no .git required in the project. */ export async function createShadowSnapshot( tool = 'unknown', - args: unknown = {} + args: unknown = {}, + ignorePaths: string[] = [] ): Promise { + let indexFile: string | null = null; try { const cwd = process.cwd(); - if (!fs.existsSync(path.join(cwd, '.git'))) return null; + const shadowDir = getShadowRepoDir(cwd); - const tempIndex = path.join(cwd, '.git', `node9_index_${Date.now()}`); - const env = { ...process.env, GIT_INDEX_FILE: tempIndex }; + if (!ensureShadowRepo(shadowDir, cwd)) return null; + writeShadowExcludes(shadowDir, ignorePaths); - spawnSync('git', ['add', '-A'], { env }); - const treeRes = spawnSync('git', ['write-tree'], { env }); - const treeHash = treeRes.stdout.toString().trim(); + // Per-invocation index file in shadow dir (not user's .git) for concurrent-session safety + indexFile = path.join(shadowDir, `index_${process.pid}_${Date.now()}`); + const shadowEnv = { + ...process.env, + GIT_DIR: shadowDir, + GIT_WORK_TREE: cwd, + GIT_INDEX_FILE: indexFile, + }; - if (fs.existsSync(tempIndex)) fs.unlinkSync(tempIndex); - if (!treeHash || treeRes.status !== 0) return null; + spawnSync('git', ['add', '-A'], { env: shadowEnv, timeout: GIT_TIMEOUT }); - const commitRes = spawnSync('git', [ - 'commit-tree', - treeHash, - '-m', - `Node9 AI Snapshot: ${new Date().toISOString()}`, - ]); - const commitHash = commitRes.stdout.toString().trim(); + const treeRes = spawnSync('git', ['write-tree'], { env: shadowEnv, timeout: GIT_TIMEOUT }); + const treeHash = treeRes.stdout?.toString().trim(); + if (!treeHash || treeRes.status !== 0) return null; + const commitRes = spawnSync( + 'git', + ['commit-tree', treeHash, '-m', `Node9 AI Snapshot: ${new Date().toISOString()}`], + { env: shadowEnv, timeout: GIT_TIMEOUT } + ); + const commitHash = commitRes.stdout?.toString().trim(); if (!commitHash || commitRes.status !== 0) return null; - // Push to stack const stack = readStack(); - const entry: SnapshotEntry = { + stack.push({ hash: commitHash, tool, argsSummary: buildArgsSummary(tool, args), cwd, timestamp: Date.now(), - }; - stack.push(entry); + }); + // Check GC BEFORE splice so total-snapshots-ever is used, not capped length. + // After splice, stack.length ≤ MAX_SNAPSHOTS (10), so % 20 would never fire. + const shouldGc = stack.length % 5 === 0; if (stack.length > MAX_SNAPSHOTS) stack.splice(0, stack.length - MAX_SNAPSHOTS); writeStack(stack); // Backward compat: keep undo_latest.txt fs.writeFileSync(UNDO_LATEST_PATH, commitHash); + // Periodic GC — fire-and-forget, non-blocking, keeps shadow dir tidy + if (shouldGc) { + spawn('git', ['gc', '--auto'], { env: shadowEnv, detached: true, stdio: 'ignore' }).unref(); + } + return commitHash; } catch (err) { if (process.env.NODE9_DEBUG === '1') console.error('[Node9 Undo Engine Error]:', err); + return null; + } finally { + // Always clean up the per-invocation index file + if (indexFile) { + try { + fs.unlinkSync(indexFile); + } catch {} + } } - return null; } /** @@ -125,18 +303,27 @@ export function getSnapshotHistory(): SnapshotEntry[] { /** * Computes a unified diff between the snapshot and the current working tree. - * Returns the diff string, or null if the repo is clean / no diff available. + * Uses the shadow repo if available; falls back to user's .git for old hashes. */ export function computeUndoDiff(hash: string, cwd: string): string | null { try { - const result = spawnSync('git', ['diff', hash, '--stat', '--', '.'], { cwd }); - const stat = result.stdout.toString().trim(); - if (!stat) return null; - - const diff = spawnSync('git', ['diff', hash, '--', '.'], { cwd }); - const raw = diff.stdout.toString(); - if (!raw) return null; - // Strip git header lines, keep only file names + hunks + const env = buildGitEnv(cwd); + const statRes = spawnSync('git', ['diff', hash, '--stat', '--', '.'], { + cwd, + env, + timeout: GIT_TIMEOUT, + }); + const stat = statRes.stdout?.toString().trim(); + if (!stat || statRes.status !== 0) return null; + + const diffRes = spawnSync('git', ['diff', hash, '--', '.'], { + cwd, + env, + timeout: GIT_TIMEOUT, + }); + const raw = diffRes.stdout?.toString(); + if (!raw || diffRes.status !== 0) return null; + const lines = raw .split('\n') .filter( @@ -149,30 +336,47 @@ export function computeUndoDiff(hash: string, cwd: string): string | null { } /** - * Reverts the current directory to a specific Git commit hash. + * Reverts the current directory to a specific snapshot hash. + * Uses the shadow repo if available; falls back to user's .git for old hashes. */ export function applyUndo(hash: string, cwd?: string): boolean { try { const dir = cwd ?? process.cwd(); + const env = buildGitEnv(dir); const restore = spawnSync('git', ['restore', '--source', hash, '--staged', '--worktree', '.'], { cwd: dir, + env, + timeout: GIT_TIMEOUT, }); if (restore.status !== 0) return false; - const lsTree = spawnSync('git', ['ls-tree', '-r', '--name-only', hash], { cwd: dir }); - const snapshotFiles = new Set(lsTree.stdout.toString().trim().split('\n').filter(Boolean)); + const lsTree = spawnSync('git', ['ls-tree', '-r', '--name-only', hash], { + cwd: dir, + env, + timeout: GIT_TIMEOUT, + }); + const snapshotFiles = new Set( + lsTree.stdout?.toString().trim().split('\n').filter(Boolean) ?? [] + ); - const tracked = spawnSync('git', ['ls-files'], { cwd: dir }) - .stdout.toString() - .trim() - .split('\n') - .filter(Boolean); - const untracked = spawnSync('git', ['ls-files', '--others', '--exclude-standard'], { cwd: dir }) - .stdout.toString() - .trim() - .split('\n') - .filter(Boolean); + const tracked = + spawnSync('git', ['ls-files'], { cwd: dir, env, timeout: GIT_TIMEOUT }) + .stdout?.toString() + .trim() + .split('\n') + .filter(Boolean) ?? []; + + const untracked = + spawnSync('git', ['ls-files', '--others', '--exclude-standard'], { + cwd: dir, + env, + timeout: GIT_TIMEOUT, + }) + .stdout?.toString() + .trim() + .split('\n') + .filter(Boolean) ?? []; for (const file of [...tracked, ...untracked]) { const fullPath = path.join(dir, file);