From a1f749ccdb9b6e31c742e16a274eb57d5e74c091 Mon Sep 17 00:00:00 2001 From: Robert Gonek Date: Wed, 11 Mar 2026 10:12:38 +0100 Subject: [PATCH 01/10] specs: harden workflow semantics and operator safeguards --- openspec/specs/compatibility/spec.md | 13 +++-- openspec/specs/pull-and-validate/spec.md | 25 +++++++++ openspec/specs/push/spec.md | 51 +++++++++++++++++++ .../specs/recovery-and-maintenance/spec.md | 12 +++++ openspec/specs/workspace/spec.md | 11 ++++ 5 files changed, 109 insertions(+), 3 deletions(-) diff --git a/openspec/specs/compatibility/spec.md b/openspec/specs/compatibility/spec.md index 3a69aca..fc45a0a 100644 --- a/openspec/specs/compatibility/spec.md +++ b/openspec/specs/compatibility/spec.md @@ -21,8 +21,15 @@ The system SHALL degrade safely when the Confluence Folder API is unavailable. - GIVEN a tenant does not support folder operations needed for hierarchy writes - WHEN `push` resolves remote hierarchy -- THEN the system SHALL fall back to page-based hierarchy behavior -- AND the system SHALL emit compatibility diagnostics +- THEN the system SHALL explain that completing the push would require a folder-to-page semantic downgrade +- AND the system SHALL require explicit interactive operator acceptance before rewriting the workspace into page-based hierarchy behavior + +#### Scenario: Non-interactive push does not auto-downgrade folder semantics + +- GIVEN a tenant does not support folder operations needed for hierarchy writes +- AND the run is non-interactive +- WHEN `push` resolves remote hierarchy +- THEN the system SHALL fail closed instead of auto-downgrading the hierarchy #### Scenario: Folder fallback distinguishes incompatibility from upstream failure @@ -32,7 +39,7 @@ The system SHALL degrade safely when the Confluence Folder API is unavailable. #### Scenario: Push summary and structured reports surface active folder fallback -- GIVEN push runs in folder compatibility fallback mode +- GIVEN push runs after an explicitly accepted folder compatibility downgrade - WHEN the command prints its final summary or emits a structured JSON report - THEN the active fallback mode SHALL be visible in the summary/report output - AND the output SHALL preserve the distinction between unsupported capability and upstream endpoint failure diff --git a/openspec/specs/pull-and-validate/spec.md b/openspec/specs/pull-and-validate/spec.md index c50435a..50edcfb 100644 --- a/openspec/specs/pull-and-validate/spec.md +++ b/openspec/specs/pull-and-validate/spec.md @@ -63,6 +63,13 @@ The system SHALL map Confluence hierarchy into deterministic Markdown paths. - THEN the system SHALL continue with a safe fallback path - AND the system SHALL emit diagnostics describing the degraded hierarchy resolution +#### Scenario: Canonical pull paths reconcile authored slugs + +- GIVEN a tracked page was previously authored at a non-canonical local path +- WHEN pull computes the canonical local markdown path for that page +- THEN the canonical pull path SHALL win +- AND an existing workspace SHALL converge to the same path shape a fresh workspace would produce + ### Requirement: Link and attachment rewrite on pull The system SHALL rewrite same-space references to local Markdown and asset paths whenever the local targets are known. @@ -161,12 +168,30 @@ The system SHALL validate local Markdown with the same strict reverse-conversion - WHEN `conf validate` builds the page index - THEN the system SHALL fail validation +#### Scenario: Duplicate folder titles block validation + +- GIVEN two local directory-backed folders in the same space resolve to the same folder title under different parent paths +- WHEN `conf validate` inspects the pushable hierarchy +- THEN the system SHALL fail validation +- AND the error SHALL explain that Confluence folder titles must be unique across the space + #### Scenario: Mermaid content produces warning, not failure - GIVEN a Markdown file contains a Mermaid fenced code block - WHEN `conf validate` runs - THEN the system SHALL emit a warning indicating the content will be preserved as a code block on push +### Requirement: Diff previews new-page creation + +The system SHALL keep `diff` useful for brand-new Markdown pages that do not yet have a Confluence `id`. + +#### Scenario: Diff shows create preview for a new local page + +- GIVEN the user runs `conf diff` on a Markdown file without a Confluence `id` +- WHEN the file is a candidate for `push` +- THEN the system SHALL render a create preview instead of failing with a missing-id error +- AND the preview SHALL align with push preflight for the same file + #### Scenario: Space-scoped push validation evaluates the full space target - GIVEN a space-scoped push has one or more in-scope Markdown changes diff --git a/openspec/specs/push/spec.md b/openspec/specs/push/spec.md index 0747e11..acc9965 100644 --- a/openspec/specs/push/spec.md +++ b/openspec/specs/push/spec.md @@ -99,6 +99,50 @@ The system SHALL make remote-ahead conflict handling explicit. - THEN the system SHALL preserve the local edits via a clean merge, conflict markers, or explicit recoverable state - AND the system SHALL not silently discard local edits +#### Scenario: Non-interactive pull-merge requires an explicit merge-resolution policy + +- GIVEN push detects a remote-ahead conflict +- AND the policy is `pull-merge` +- AND the user passed `--non-interactive` +- WHEN no explicit merge-resolution policy was supplied +- THEN the system SHALL stop before mutating the main workspace +- AND the error SHALL instruct the operator to choose an explicit merge-resolution policy or rerun interactively + +#### Scenario: Non-interactive pull-merge uses isolated conflict exploration + +- GIVEN push detects a remote-ahead conflict +- AND the policy is `pull-merge` +- AND the user passed `--non-interactive` with an explicit merge-resolution policy +- WHEN push evaluates the pull-merge path +- THEN the system SHALL perform the exploratory merge in isolated workspace state +- AND the main workspace SHALL not be left in an unresolved merge state if the exploratory merge still conflicts + +### Requirement: Folder semantics are never downgraded silently + +The system SHALL fail closed when a directory-backed folder cannot be represented remotely without changing semantics. + +#### Scenario: Push refuses silent folder-to-page downgrade + +- GIVEN a local directory represents a folder-only hierarchy segment +- WHEN push cannot create the corresponding Confluence folder because of unsupported capability, duplicate folder title, invalid parentage, or another hierarchy write constraint +- THEN the system SHALL not silently create a page in place of that folder +- AND the system SHALL surface the specific downgrade reason to the operator + +#### Scenario: Interactive push may offer explicit folder-to-page conversion + +- GIVEN push detects that continuing requires changing a directory-backed folder into a page-with-subpages node +- AND the run is interactive +- WHEN the operator explicitly accepts the downgrade +- THEN the system SHALL rewrite the local workspace into the page-with-subpages shape before continuing +- AND the resulting remote hierarchy SHALL match that rewritten local shape + +#### Scenario: Non-interactive push fails closed on required folder downgrade + +- GIVEN push detects that continuing would require changing a directory-backed folder into a page-with-subpages node +- AND the run is non-interactive +- WHEN no explicit interactive acceptance is possible +- THEN the system SHALL fail before performing the semantic downgrade + ### Requirement: Strict remote publishing The system SHALL publish Markdown to Confluence using strict conversion and explicit attachment/link resolution. @@ -148,6 +192,13 @@ The system SHALL provide safe non-write inspection modes for push. - THEN the system SHALL show the planned changes and validation outcome - AND the system SHALL not modify remote content or local Git state +#### Scenario: Preflight surfaces create-preview detail for new pages + +- GIVEN the target scope contains a brand-new Markdown page without a Confluence `id` +- WHEN the user runs `conf push --preflight` +- THEN the system SHALL show a create preview that includes the resolved parent, canonical target path, and planned attachment mutations +- AND the preview SHALL not require the operator to switch to a different command to inspect the create operation + #### Scenario: Preflight uses the same validation scope as real push - GIVEN the target scope contains a validation failure, including one introduced by planned deletions outside the directly changed file set diff --git a/openspec/specs/recovery-and-maintenance/spec.md b/openspec/specs/recovery-and-maintenance/spec.md index 35c613b..e46f390 100644 --- a/openspec/specs/recovery-and-maintenance/spec.md +++ b/openspec/specs/recovery-and-maintenance/spec.md @@ -24,6 +24,18 @@ The system SHALL let operators inspect retained push recovery artifacts without - AND the system SHALL show a concrete discard command for each retained run - AND the system SHALL show the general `conf recover --discard-all --yes` cleanup command +### Requirement: Status can inspect attachment drift + +The system SHALL let operators inspect attachment-only drift without dropping into Git internals. + +#### Scenario: Attachment-aware status reports local and remote asset drift + +- GIVEN a space has attachment additions, deletions, or orphaned local asset files while Markdown page content is otherwise unchanged +- WHEN the user runs attachment-aware status inspection +- THEN the system SHALL summarize local attachment additions and deletions +- AND the system SHALL summarize remote attachment additions and deletions +- AND the system SHALL identify orphaned local asset files + ### Requirement: Doctor surfaces active workspace sync locks The system SHALL let operators inspect leftover repository sync locks that can block new mutating commands. diff --git a/openspec/specs/workspace/spec.md b/openspec/specs/workspace/spec.md index 697bd7d..8e7ae8f 100644 --- a/openspec/specs/workspace/spec.md +++ b/openspec/specs/workspace/spec.md @@ -64,6 +64,17 @@ The system SHALL maintain one directory per managed Confluence space. - WHEN `pull`, `push`, `validate`, or `status` run for that space - THEN the system SHALL reuse the tracked directory rather than renaming it opportunistically +### Requirement: Canonical markdown paths converge within a tracked space + +The system SHALL keep page paths inside a tracked space aligned with the canonical pull hierarchy. + +#### Scenario: Pull renames tracked markdown to canonical path + +- GIVEN a tracked page currently lives at a non-canonical authored path inside the space directory +- WHEN `pull` or `pull --force` recomputes the page layout +- THEN the tracked markdown path SHALL move to the canonical pull path +- AND the state file SHALL be updated to the canonical path + ### Requirement: Safety confirmation The system SHALL require explicit confirmation before large or destructive operations proceed. From c964f46fe13063c842bb51332928659bad8c2f98 Mon Sep 17 00:00:00 2001 From: Robert Gonek Date: Wed, 11 Mar 2026 10:16:14 +0100 Subject: [PATCH 02/10] sync: enforce canonical paths and folder title validation --- cmd/validate.go | 16 ++++++ cmd/validate_test.go | 51 ++++++++++++++++++ internal/sync/folder_conflicts.go | 86 +++++++++++++++++++++++++++++++ internal/sync/pull_paths.go | 30 ++--------- internal/sync/pull_paths_test.go | 62 ++++++++++++++++++++-- 5 files changed, 215 insertions(+), 30 deletions(-) create mode 100644 internal/sync/folder_conflicts.go diff --git a/cmd/validate.go b/cmd/validate.go index 2fbc574..5cbe94f 100644 --- a/cmd/validate.go +++ b/cmd/validate.go @@ -190,6 +190,22 @@ func runValidateTargetWithContextReportWithOverride(ctx context.Context, out io. return result, fmt.Errorf("validation failed: duplicate page IDs detected - rename each file to have a unique id or remove the duplicate id") } + if folderConflicts := syncflow.DetectFolderTitleConflicts(targetCtx.spaceDir, targetCtx.files); len(folderConflicts) > 0 { + for _, conflict := range folderConflicts { + msg := fmt.Sprintf( + "folder title %q is used by multiple directory-backed folders: %s β€” Confluence folder titles must be unique across the space; rename or convert one path into a page-backed directory", + conflict.Title, + strings.Join(conflict.Paths, ", "), + ) + _, _ = fmt.Fprintf(out, "Validation failed: %s\n", msg) + result.Diagnostics = append(result.Diagnostics, commandRunReportDiagnostic{ + Code: "duplicate_folder_title", + Message: msg, + }) + } + return result, fmt.Errorf("validation failed: duplicate folder titles detected - rename or convert the conflicting folders before retrying") + } + globalIndex, err := buildWorkspaceGlobalPageIndex(targetCtx.spaceDir) if err != nil { return result, fmt.Errorf("failed to build global page index: %w", err) diff --git a/cmd/validate_test.go b/cmd/validate_test.go index 87b8114..89885bd 100644 --- a/cmd/validate_test.go +++ b/cmd/validate_test.go @@ -556,6 +556,57 @@ func TestRunValidateTarget_BlocksDuplicatePageIDs(t *testing.T) { } } +func TestRunValidateTarget_BlocksDuplicateFolderTitles(t *testing.T) { + runParallelCommandTest(t) + repo := t.TempDir() + setupGitRepo(t, repo) + setupEnv(t) + + spaceDir := filepath.Join(repo, "Engineering (ENG)") + if err := os.MkdirAll(filepath.Join(spaceDir, "API"), 0o750); err != nil { + t.Fatalf("mkdir API dir: %v", err) + } + if err := os.MkdirAll(filepath.Join(spaceDir, "Guides", "API"), 0o750); err != nil { + t.Fatalf("mkdir nested API dir: %v", err) + } + + writeMarkdown(t, filepath.Join(spaceDir, "API", "One.md"), fs.MarkdownDocument{ + Frontmatter: fs.Frontmatter{Title: "One"}, + Body: "one\n", + }) + writeMarkdown(t, filepath.Join(spaceDir, "Guides", "Guides.md"), fs.MarkdownDocument{ + Frontmatter: fs.Frontmatter{Title: "Guides"}, + Body: "guides\n", + }) + writeMarkdown(t, filepath.Join(spaceDir, "Guides", "API", "Two.md"), fs.MarkdownDocument{ + Frontmatter: fs.Frontmatter{Title: "Two"}, + Body: "two\n", + }) + if err := fs.SaveState(spaceDir, fs.SpaceState{SpaceKey: "ENG"}); err != nil { + t.Fatalf("save state: %v", err) + } + + runGitForTest(t, repo, "add", ".") + runGitForTest(t, repo, "commit", "-m", "baseline") + + chdirRepo(t, repo) + out := &bytes.Buffer{} + err := runValidateTargetWithContext(context.Background(), out, config.Target{Mode: config.TargetModeSpace, Value: "Engineering (ENG)"}) + if err == nil { + t.Fatal("expected validate to fail for duplicate folder titles") + } + got := out.String() + if !strings.Contains(got, "folder title \"API\"") { + t.Fatalf("expected duplicate folder title in output, got:\n%s", got) + } + if !strings.Contains(got, "API, Guides/API") { + t.Fatalf("expected both conflicting folder paths in output, got:\n%s", got) + } + if !strings.Contains(got, "must be unique across the space") { + t.Fatalf("expected Confluence uniqueness guidance, got:\n%s", got) + } +} + func TestDetectDuplicatePageIDs_ReturnsNilForUniqueIDs(t *testing.T) { t.Parallel() index := map[string]string{ diff --git a/internal/sync/folder_conflicts.go b/internal/sync/folder_conflicts.go new file mode 100644 index 0000000..361b5ad --- /dev/null +++ b/internal/sync/folder_conflicts.go @@ -0,0 +1,86 @@ +package sync + +import ( + "path/filepath" + "sort" + "strings" +) + +// FolderTitleConflict describes directory-backed folders that would collide on +// Confluence's space-wide folder-title uniqueness constraint. +type FolderTitleConflict struct { + Title string + Paths []string +} + +// DetectFolderTitleConflicts returns duplicate pure-folder titles within the +// provided validation scope. A "pure folder" is a directory segment that does +// not already have a page-backed index file at /.md. +func DetectFolderTitleConflicts(spaceDir string, files []string) []FolderTitleConflict { + markdownPaths := map[string]struct{}{} + for _, file := range files { + path := strings.TrimSpace(file) + if path == "" { + continue + } + if !filepath.IsAbs(path) { + path = filepath.Join(spaceDir, filepath.FromSlash(path)) + } + relPath, err := filepath.Rel(spaceDir, path) + if err != nil { + continue + } + normalized := normalizeRelPath(relPath) + if normalized == "" { + continue + } + markdownPaths[normalized] = struct{}{} + } + + pathsByTitle := map[string]map[string]struct{}{} + for relPath := range markdownPaths { + currentDir := normalizeRelPath(filepath.ToSlash(filepath.Dir(filepath.FromSlash(relPath)))) + for currentDir != "" && currentDir != "." { + if indexPath := indexPagePathForDir(currentDir); indexPath != "" { + if _, exists := markdownPaths[indexPath]; exists { + currentDir = normalizeRelPath(filepath.ToSlash(filepath.Dir(filepath.FromSlash(currentDir)))) + continue + } + } + + title := strings.TrimSpace(filepath.Base(filepath.FromSlash(currentDir))) + if title != "" && title != "." { + if pathsByTitle[title] == nil { + pathsByTitle[title] = map[string]struct{}{} + } + pathsByTitle[title][currentDir] = struct{}{} + } + + nextDir := normalizeRelPath(filepath.ToSlash(filepath.Dir(filepath.FromSlash(currentDir)))) + if nextDir == currentDir { + break + } + currentDir = nextDir + } + } + + conflicts := make([]FolderTitleConflict, 0) + for title, pathSet := range pathsByTitle { + if len(pathSet) <= 1 { + continue + } + paths := sortedStringKeys(pathSet) + conflicts = append(conflicts, FolderTitleConflict{ + Title: title, + Paths: paths, + }) + } + + sort.Slice(conflicts, func(i, j int) bool { + if conflicts[i].Title == conflicts[j].Title { + return strings.Join(conflicts[i].Paths, "|") < strings.Join(conflicts[j].Paths, "|") + } + return conflicts[i].Title < conflicts[j].Title + }) + return conflicts +} diff --git a/internal/sync/pull_paths.go b/internal/sync/pull_paths.go index 9af9bd9..03edc4e 100644 --- a/internal/sync/pull_paths.go +++ b/internal/sync/pull_paths.go @@ -12,10 +12,10 @@ import ( "github.com/rgonek/confluence-markdown-sync/internal/fs" ) -// PlanPagePaths builds deterministic markdown paths for remote pages. +// PlanPagePaths builds deterministic canonical markdown paths for remote pages. // -// It preserves previously mapped paths from page_path_index when possible, -// then allocates unique sanitized filenames for newly discovered pages. +// It always recomputes the canonical pull path from the current remote +// hierarchy, then allocates unique sanitized filenames if needed. func PlanPagePaths( spaceDir string, previousPageIndex map[string]string, @@ -46,21 +46,6 @@ func PlanPagePaths( } } } - previousPathByID := map[string]string{} - for _, previousPath := range sortedStringKeys(previousPageIndex) { - pageID := previousPageIndex[previousPath] - if _, exists := pageByID[pageID]; !exists { - continue - } - normalized := normalizeRelPath(previousPath) - if normalized == "" { - continue - } - if _, exists := previousPathByID[pageID]; !exists { - previousPathByID[pageID] = normalized - } - } - absByID := map[string]string{} relByID := map[string]string{} usedRelPaths := map[string]struct{}{} @@ -72,9 +57,6 @@ func PlanPagePaths( plans := make([]pagePathPlan, 0, len(pages)) for _, page := range pages { baseRelPath := plannedPageRelPath(page, pageByID, folderByID, hasChildren) - if previousPath := previousPathByID[page.ID]; previousPath != "" && sameParentDirectory(previousPath, baseRelPath) { - baseRelPath = previousPath - } plans = append(plans, pagePathPlan{ ID: page.ID, @@ -187,12 +169,6 @@ func ancestorPathSegments(parentID string, parentType string, pageByID map[strin return segments, true } -func sameParentDirectory(pathA, pathB string) bool { - dirA := normalizeRelPath(filepath.Dir(pathA)) - dirB := normalizeRelPath(filepath.Dir(pathB)) - return dirA == dirB -} - func ensureUniqueMarkdownPath(baseName string, used map[string]struct{}) string { baseName = normalizeRelPath(baseName) if baseName == "" { diff --git a/internal/sync/pull_paths_test.go b/internal/sync/pull_paths_test.go index 7495c71..abd01bb 100644 --- a/internal/sync/pull_paths_test.go +++ b/internal/sync/pull_paths_test.go @@ -1,6 +1,8 @@ package sync import ( + "path/filepath" + "strings" "testing" "github.com/rgonek/confluence-markdown-sync/internal/confluence" @@ -60,7 +62,7 @@ func TestPlanPagePaths_UsesFolderHierarchy(t *testing.T) { } } -func TestPlanPagePaths_PreservesExistingPathWhenTitleChangesInSameParent(t *testing.T) { +func TestPlanPagePaths_ReconcilesExistingPathToCanonicalTitle(t *testing.T) { spaceDir := t.TempDir() pages := []confluence.Page{ @@ -72,8 +74,62 @@ func TestPlanPagePaths_PreservesExistingPathWhenTitleChangesInSameParent(t *test _, relByID := PlanPagePaths(spaceDir, previousPageIndex, pages, nil) - if got := relByID["1"]; got != "custom-title.md" { - t.Fatalf("preserved path = %q, want custom-title.md", got) + if got := relByID["1"]; got != "Renamed-Page.md" { + t.Fatalf("canonical path = %q, want Renamed-Page.md", got) + } +} + +func TestPlanPagePaths_ReconcilesShortSlugToCanonicalPath(t *testing.T) { + spaceDir := t.TempDir() + + pages := []confluence.Page{ + {ID: "1", Title: "Cross Space Target 2026-03-11-0712", ParentPageID: "10"}, + {ID: "10", Title: "Software Development"}, + } + previousPageIndex := map[string]string{ + "Software-Development/XT-20260311-0712.md": "1", + "Software-Development/Software-Development.md": "10", + } + + _, relByID := PlanPagePaths(spaceDir, previousPageIndex, pages, nil) + + if got := relByID["1"]; got != "Software-Development/Cross-Space-Target-2026-03-11-0712.md" { + t.Fatalf("canonical child path = %q, want Software-Development/Cross-Space-Target-2026-03-11-0712.md", got) + } +} + +func TestDetectFolderTitleConflicts_FindsDuplicatePureFolders(t *testing.T) { + spaceDir := t.TempDir() + + conflicts := DetectFolderTitleConflicts(spaceDir, []string{ + filepath.Join(spaceDir, "API", "One.md"), + filepath.Join(spaceDir, "Guides", "API", "Two.md"), + filepath.Join(spaceDir, "Guides", "Guides.md"), + }) + + if len(conflicts) != 1 { + t.Fatalf("conflicts = %+v, want 1 duplicate title", conflicts) + } + if conflicts[0].Title != "API" { + t.Fatalf("conflict title = %q, want API", conflicts[0].Title) + } + if got := strings.Join(conflicts[0].Paths, ","); got != "API,Guides/API" { + t.Fatalf("conflict paths = %q, want API,Guides/API", got) + } +} + +func TestDetectFolderTitleConflicts_IgnoresPageBackedDirectories(t *testing.T) { + spaceDir := t.TempDir() + + conflicts := DetectFolderTitleConflicts(spaceDir, []string{ + filepath.Join(spaceDir, "API", "API.md"), + filepath.Join(spaceDir, "API", "One.md"), + filepath.Join(spaceDir, "Guides", "API", "API.md"), + filepath.Join(spaceDir, "Guides", "API", "Two.md"), + }) + + if len(conflicts) != 0 { + t.Fatalf("conflicts = %+v, want none for page-backed directories", conflicts) } } From 42679cf8b5dbb27d9b5053fb5aaaf25d0854ff0d Mon Sep 17 00:00:00 2001 From: Robert Gonek Date: Wed, 11 Mar 2026 10:29:06 +0100 Subject: [PATCH 03/10] cmd: add create previews and attachment-aware status --- cmd/create_preview.go | 151 +++++++++++++++++++++++++++ cmd/diff.go | 84 ++++++++++++--- cmd/diff_test.go | 41 ++++++-- cmd/more_tests_test.go | 2 +- cmd/push_dryrun_test.go | 3 + cmd/push_preflight.go | 45 +++++--- cmd/push_refactor_test.go | 29 +++++- cmd/status.go | 213 +++++++++++++++++++++++++++++++++++++- cmd/status_e2e_test.go | 2 +- cmd/status_run_test.go | 69 ++++++++++-- cmd/status_test.go | 8 ++ 11 files changed, 591 insertions(+), 56 deletions(-) create mode 100644 cmd/create_preview.go diff --git a/cmd/create_preview.go b/cmd/create_preview.go new file mode 100644 index 0000000..0000b12 --- /dev/null +++ b/cmd/create_preview.go @@ -0,0 +1,151 @@ +package cmd + +import ( + "context" + "encoding/json" + "fmt" + "path/filepath" + "strings" + + "github.com/rgonek/confluence-markdown-sync/internal/converter" + "github.com/rgonek/confluence-markdown-sync/internal/fs" + syncflow "github.com/rgonek/confluence-markdown-sync/internal/sync" +) + +type localCreatePreview struct { + RelPath string + Title string + ResolvedParent string + CanonicalTargetPath string + AttachmentUploads []string + ADFBytes int + ADFTopLevelNodes int +} + +func buildLocalCreatePreview( + ctx context.Context, + spaceDir string, + relPath string, + domain string, + attachmentIndex map[string]string, + globalIndex syncflow.GlobalPageIndex, +) (localCreatePreview, error) { + relPath = normalizeRepoRelPath(relPath) + if relPath == "" { + return localCreatePreview{}, fmt.Errorf("relative path is required") + } + + absPath := filepath.Join(spaceDir, filepath.FromSlash(relPath)) + doc, err := fs.ReadMarkdownDocument(absPath) + if err != nil { + return localCreatePreview{}, err + } + + title := strings.TrimSpace(doc.Frontmatter.Title) + if title == "" { + title = strings.TrimSuffix(filepath.Base(relPath), filepath.Ext(relPath)) + } + + pageIndex, err := syncflow.BuildPageIndex(spaceDir) + if err != nil { + return localCreatePreview{}, err + } + if err := syncflow.SeedPendingPageIDsForFiles(spaceDir, pageIndex, []string{absPath}); err != nil { + return localCreatePreview{}, err + } + + referencedAssets, err := syncflow.CollectReferencedAssetPaths(spaceDir, absPath, doc.Body) + if err != nil { + return localCreatePreview{}, err + } + + strictAttachmentIndex, _, err := syncflow.BuildStrictAttachmentIndex(spaceDir, absPath, doc.Body, attachmentIndex) + if err != nil { + return localCreatePreview{}, err + } + preparedBody, err := syncflow.PrepareMarkdownForAttachmentConversion(spaceDir, absPath, doc.Body, strictAttachmentIndex) + if err != nil { + return localCreatePreview{}, err + } + linkHook := syncflow.NewReverseLinkHookWithGlobalIndex(spaceDir, pageIndex, globalIndex, domain) + mediaHook := syncflow.NewReverseMediaHook(spaceDir, strictAttachmentIndex) + reverseResult, err := converter.Reverse(ctx, []byte(preparedBody), converter.ReverseConfig{ + LinkHook: linkHook, + MediaHook: mediaHook, + Strict: true, + }, absPath) + if err != nil { + return localCreatePreview{}, err + } + + return localCreatePreview{ + RelPath: relPath, + Title: title, + ResolvedParent: resolvePreviewParent(relPath, doc.Frontmatter.ConfluenceParentPageID, pageIndex), + CanonicalTargetPath: canonicalCreatePreviewPath(relPath, title), + AttachmentUploads: referencedAssets, + ADFBytes: len(reverseResult.ADF), + ADFTopLevelNodes: adfTopLevelNodeCount(reverseResult.ADF), + }, nil +} + +func resolvePreviewParent(relPath, fallbackParentID string, pageIndex syncflow.PageIndex) string { + dirPath := normalizeRepoRelPath(filepath.Dir(relPath)) + if dirPath == "" || dirPath == "." { + if parentID := strings.TrimSpace(fallbackParentID); parentID != "" { + return "page " + parentID + } + return "space root" + } + + for currentDir := dirPath; currentDir != "" && currentDir != "."; { + indexPath := previewIndexPathForDir(currentDir) + if indexPath != "" && normalizeRepoRelPath(indexPath) != normalizeRepoRelPath(relPath) { + if pageID := strings.TrimSpace(pageIndex[indexPath]); pageID != "" { + return fmt.Sprintf("page %s (%s)", pageID, indexPath) + } + } + + nextDir := normalizeRepoRelPath(filepath.ToSlash(filepath.Dir(filepath.FromSlash(currentDir)))) + if nextDir == currentDir { + break + } + currentDir = nextDir + } + + if parentID := strings.TrimSpace(fallbackParentID); parentID != "" { + return "page " + parentID + } + return "space root" +} + +func previewIndexPathForDir(dirPath string) string { + dirPath = normalizeRepoRelPath(dirPath) + if dirPath == "" || dirPath == "." { + return "" + } + dirBase := strings.TrimSpace(filepath.Base(filepath.FromSlash(dirPath))) + if dirBase == "" || dirBase == "." { + return "" + } + return normalizeRepoRelPath(filepath.ToSlash(filepath.Join(dirPath, dirBase+".md"))) +} + +func canonicalCreatePreviewPath(relPath, title string) string { + dirPath := normalizeRepoRelPath(filepath.Dir(relPath)) + fileName := fs.SanitizeMarkdownFilename(title) + if dirPath == "" || dirPath == "." { + return fileName + } + return normalizeRepoRelPath(filepath.ToSlash(filepath.Join(dirPath, fileName))) +} + +func adfTopLevelNodeCount(adf []byte) int { + var parsed struct { + Content []json.RawMessage `json:"content"` + } + if err := json.Unmarshal(adf, &parsed); err != nil { + return 0 + } + return len(parsed.Content) +} diff --git a/cmd/diff.go b/cmd/diff.go index f690c78..c91b9a4 100644 --- a/cmd/diff.go +++ b/cmd/diff.go @@ -92,10 +92,7 @@ func runDiff(cmd *cobra.Command, target config.Target) (runErr error) { if err := ensureWorkspaceSyncReady("diff"); err != nil { return err } - if err := ensureDiffTargetSupportsRemoteComparison(target); err != nil { - return err - } - initialCtx, err := resolveInitialPullContext(target) + initialCtx, err := resolveInitialDiffContext(target) if err != nil { return err } @@ -143,6 +140,35 @@ func runDiff(cmd *cobra.Command, target config.Target) (runErr error) { return fmt.Errorf("load state: %w", err) } + if target.IsFile() { + absPath, err := filepath.Abs(target.Value) + if err != nil { + return err + } + doc, err := fs.ReadMarkdownDocument(absPath) + if err != nil { + return fmt.Errorf("read target file %s: %w", target.Value, err) + } + if strings.TrimSpace(doc.Frontmatter.ID) == "" { + globalPageIndex, buildErr := buildWorkspaceGlobalPageIndex(diffCtx.spaceDir) + if buildErr != nil { + return fmt.Errorf("build global page index: %w", buildErr) + } + preview, previewErr := buildLocalCreatePreview(ctx, diffCtx.spaceDir, diffDisplayRelPath(diffCtx.spaceDir, absPath), cfg.Domain, state.AttachmentIndex, globalPageIndex) + if previewErr != nil { + return fmt.Errorf("build create preview for %s: %w", target.Value, previewErr) + } + printDiffCreatePreview(out, preview) + report.MutatedFiles = append(report.MutatedFiles, preview.RelPath) + report.MutatedPages = append(report.MutatedPages, commandRunReportPage{ + Operation: "create-preview", + Path: preview.RelPath, + Title: preview.Title, + }) + return nil + } + } + pages, err := listAllDiffPages(ctx, remote, confluence.PageListOptions{ SpaceID: space.ID, SpaceKey: space.Key, @@ -212,29 +238,38 @@ func runDiff(cmd *cobra.Command, target config.Target) (runErr error) { return err } -func ensureDiffTargetSupportsRemoteComparison(target config.Target) error { +func resolveInitialDiffContext(target config.Target) (initialPullContext, error) { if !target.IsFile() { - return nil + return resolveInitialPullContext(target) } absPath, err := filepath.Abs(target.Value) if err != nil { - return err + return initialPullContext{}, err } - doc, err := fs.ReadMarkdownDocument(absPath) if err != nil { - return fmt.Errorf("read target file %s: %w", target.Value, err) + return initialPullContext{}, fmt.Errorf("read target file %s: %w", target.Value, err) + } + + spaceDir := findSpaceDirFromFile(absPath, "") + spaceKey := "" + if state, stateErr := fs.LoadState(spaceDir); stateErr == nil { + spaceKey = strings.TrimSpace(state.SpaceKey) + } + if spaceKey == "" { + spaceKey = inferSpaceKeyFromDirName(spaceDir) } - if strings.TrimSpace(doc.Frontmatter.ID) != "" { - return nil + if spaceKey == "" { + return initialPullContext{}, fmt.Errorf("target file %s missing tracked space context; run pull with a space target first", target.Value) } - return fmt.Errorf( - "target file %s has no id, so diff cannot compare it to an existing remote page; for a brand-new page preview, run `conf push --preflight %s`", - target.Value, - target.Value, - ) + return initialPullContext{ + spaceKey: spaceKey, + spaceDir: spaceDir, + targetPageID: strings.TrimSpace(doc.Frontmatter.ID), + fixedDir: true, + }, nil } func runDiffFileMode( @@ -349,6 +384,23 @@ func runDiffFileMode( return result, err } +func printDiffCreatePreview(out io.Writer, preview localCreatePreview) { + _, _ = fmt.Fprintln(out, "new page preview:") + _, _ = fmt.Fprintf(out, " operation: create page %q\n", preview.Title) + _, _ = fmt.Fprintf(out, " source file: %s\n", preview.RelPath) + _, _ = fmt.Fprintf(out, " resolved parent: %s\n", preview.ResolvedParent) + _, _ = fmt.Fprintf(out, " canonical target path: %s\n", preview.CanonicalTargetPath) + if len(preview.AttachmentUploads) == 0 { + _, _ = fmt.Fprintln(out, " attachment operations: none") + } else { + _, _ = fmt.Fprintf(out, " attachment operations: upload %d asset(s)\n", len(preview.AttachmentUploads)) + for _, asset := range preview.AttachmentUploads { + _, _ = fmt.Fprintf(out, " - %s\n", asset) + } + } + _, _ = fmt.Fprintf(out, " ADF summary: %d byte(s), %d top-level node(s)\n", preview.ADFBytes, preview.ADFTopLevelNodes) +} + func runDiffSpaceMode( ctx context.Context, out io.Writer, diff --git a/cmd/diff_test.go b/cmd/diff_test.go index eb04799..6f8b15c 100644 --- a/cmd/diff_test.go +++ b/cmd/diff_test.go @@ -147,8 +147,11 @@ func TestRunDiff_SpaceModeNoDifferences(t *testing.T) { } got := out.String() - if !strings.Contains(got, "diff completed with no differences") { - t.Fatalf("expected no-diff message, got:\n%s", got) + if !strings.Contains(got, "[PAGE_PATH_MOVED]") { + t.Fatalf("expected canonical path move diagnostic, got:\n%s", got) + } + if !strings.Contains(got, "root.md to Root.md") { + t.Fatalf("expected canonical root rename detail, got:\n%s", got) } } @@ -826,7 +829,7 @@ func TestRunDiff_RespectsCanceledContext(t *testing.T) { } } -func TestRunDiff_FileModeNewPageWithoutIDPointsToPreflight(t *testing.T) { +func TestRunDiff_FileModeNewPageWithoutIDShowsCreatePreview(t *testing.T) { runParallelCommandTest(t) repo := t.TempDir() setupGitRepo(t, repo) @@ -852,17 +855,33 @@ func TestRunDiff_FileModeNewPageWithoutIDPointsToPreflight(t *testing.T) { chdirRepo(t, repo) cmd := &cobra.Command{} - cmd.SetOut(&bytes.Buffer{}) + out := &bytes.Buffer{} + cmd.SetOut(out) + + fake := &cmdFakePullRemote{ + space: confluence.Space{ID: "space-1", Key: "ENG", Name: "Engineering"}, + attachments: map[string][]byte{}, + } + oldFactory := newDiffRemote + newDiffRemote = func(_ *config.Config) (syncflow.PullRemote, error) { return fake, nil } + t.Cleanup(func() { newDiffRemote = oldFactory }) - err := runDiff(cmd, config.Target{Mode: config.TargetModeFile, Value: newFile}) - if err == nil { - t.Fatal("expected diff guidance error for brand-new file") + if err := runDiff(cmd, config.Target{Mode: config.TargetModeFile, Value: newFile}); err != nil { + t.Fatalf("expected create preview for brand-new file, got: %v", err) + } + + got := out.String() + if !strings.Contains(got, "new page preview:") { + t.Fatalf("expected create preview header, got:\n%s", got) + } + if !strings.Contains(got, `operation: create page "New Page"`) { + t.Fatalf("expected create operation detail, got:\n%s", got) } - if !strings.Contains(err.Error(), "has no id") { - t.Fatalf("expected missing-id guidance, got: %v", err) + if !strings.Contains(got, "canonical target path: New-Page.md") { + t.Fatalf("expected canonical target path, got:\n%s", got) } - if !strings.Contains(err.Error(), "conf push --preflight") { - t.Fatalf("expected preflight guidance, got: %v", err) + if !strings.Contains(got, "resolved parent: space root") { + t.Fatalf("expected resolved parent detail, got:\n%s", got) } } diff --git a/cmd/more_tests_test.go b/cmd/more_tests_test.go index c8081a6..b99aeb0 100644 --- a/cmd/more_tests_test.go +++ b/cmd/more_tests_test.go @@ -16,7 +16,7 @@ func TestBuildStatusReport_SkipGit(t *testing.T) { mock := &mockStatusRemote{err: confluence.ErrNotFound} // Set targetRelPath to something to avoid remoteAdded logic for now - _, _ = buildStatusReport(context.Background(), mock, config.Target{}, initialPullContext{}, fs.SpaceState{}, "SPACE", "path") + _, _ = buildStatusReport(context.Background(), mock, config.Target{}, initialPullContext{}, fs.SpaceState{}, "SPACE", "path", false) } func TestPrintStatusList_Empty(t *testing.T) { diff --git a/cmd/push_dryrun_test.go b/cmd/push_dryrun_test.go index 62bd0ce..aad6094 100644 --- a/cmd/push_dryrun_test.go +++ b/cmd/push_dryrun_test.go @@ -371,6 +371,9 @@ func TestRunPush_PreflightUsesExistingRemotePageForContentStatusProbe(t *testing if !strings.Contains(text, "content-status metadata sync disabled for this push") { t.Fatalf("preflight output missing degraded-mode detail for new-page probe:\n%s", text) } + if !strings.Contains(text, "create new-page.md (parent: space root, canonical path: New-page.md)") { + t.Fatalf("preflight output missing create preview detail:\n%s", text) + } } func TestRunPush_PreflightHonorsExplicitForceConflictPolicy(t *testing.T) { diff --git a/cmd/push_preflight.go b/cmd/push_preflight.go index 7952076..71fc202 100644 --- a/cmd/push_preflight.go +++ b/cmd/push_preflight.go @@ -21,6 +21,8 @@ type pushPreflightContext struct { state fs.SpaceState remotePageByID map[string]confluence.Page concerns []string + domain string + globalIndex syncflow.GlobalPageIndex } type pushDeletePreview struct { @@ -63,8 +65,8 @@ func runPushPreflight( } printPushPreflightConcerns(out, preflightCtx.concerns) - printPushPreflightPageMutations(out, spaceDir, syncChanges, onConflict, preflightCtx) - printPushPreflightAttachmentMutations(out, spaceDir, syncChanges, preflightCtx.state) + printPushPreflightPageMutations(ctx, out, spaceDir, syncChanges, onConflict, preflightCtx) + printPushPreflightAttachmentMutations(ctx, out, spaceDir, syncChanges, preflightCtx) addCount, modifyCount, deleteCount := summarizePushChanges(syncChanges) _, _ = fmt.Fprintf(out, "changes: %d (A:%d M:%d D:%d)\n", len(syncChanges), addCount, modifyCount, deleteCount) @@ -113,11 +115,14 @@ func buildPushPreflightContext(ctx context.Context, spaceKey, spaceDir string, s } state, _ := fs.LoadState(spaceDir) + globalIndex, _ := buildWorkspaceGlobalPageIndex(spaceDir) return pushPreflightContext{ state: state, remotePageByID: remotePageByID, concerns: concerns, + domain: cfg.Domain, + globalIndex: globalIndex, }, nil } @@ -179,6 +184,7 @@ func normalizePreflightPageLifecycleState(state string) string { } func printPushPreflightPageMutations( + ctx context.Context, out io.Writer, spaceDir string, syncChanges []syncflow.PushFileChange, @@ -190,25 +196,29 @@ func printPushPreflightPageMutations( _, _ = fmt.Fprintf( out, " %s\n", - preflightPageMutationLine(spaceDir, change, onConflict, preflightCtx.state, preflightCtx.remotePageByID), + preflightPageMutationLine(ctx, spaceDir, change, onConflict, preflightCtx), ) } } func preflightPageMutationLine( + ctx context.Context, spaceDir string, change syncflow.PushFileChange, onConflict string, - state fs.SpaceState, - remotePageByID map[string]confluence.Page, + preflightCtx pushPreflightContext, ) string { switch change.Type { case syncflow.PushChangeAdd: - return fmt.Sprintf("add %s", change.Path) + preview, err := buildLocalCreatePreview(ctx, spaceDir, change.Path, preflightCtx.domain, preflightCtx.state.AttachmentIndex, preflightCtx.globalIndex) + if err != nil { + return fmt.Sprintf("add %s", change.Path) + } + return fmt.Sprintf("create %s (parent: %s, canonical path: %s)", change.Path, preview.ResolvedParent, preview.CanonicalTargetPath) case syncflow.PushChangeDelete: - return resolvePushDeletePreview(spaceDir, state, remotePageByID, change.Path).preflightMutationLine() + return resolvePushDeletePreview(spaceDir, preflightCtx.state, preflightCtx.remotePageByID, change.Path).preflightMutationLine() case syncflow.PushChangeModify: - return preflightModifyMutationLine(spaceDir, change.Path, onConflict, remotePageByID) + return preflightModifyMutationLine(spaceDir, change.Path, onConflict, preflightCtx.remotePageByID) default: return change.Path } @@ -243,8 +253,8 @@ func preflightModifyMutationLine( return fmt.Sprintf("update %s (page %s, %q, version %d)", relPath, pageID, remotePage.Title, plannedVersion) } -func printPushPreflightAttachmentMutations(out io.Writer, spaceDir string, syncChanges []syncflow.PushFileChange, state fs.SpaceState) { - uploads, deletes := preflightAttachmentMutations(spaceDir, syncChanges, state) +func printPushPreflightAttachmentMutations(ctx context.Context, out io.Writer, spaceDir string, syncChanges []syncflow.PushFileChange, preflightCtx pushPreflightContext) { + uploads, deletes := preflightAttachmentMutations(ctx, spaceDir, syncChanges, preflightCtx) if len(uploads) == 0 && len(deletes) == 0 { return } @@ -292,7 +302,7 @@ func isPreflightCapabilityProbeError(err error) bool { } } -func preflightAttachmentMutations(spaceDir string, syncChanges []syncflow.PushFileChange, state fs.SpaceState) (uploads, deletes []string) { +func preflightAttachmentMutations(_ context.Context, spaceDir string, syncChanges []syncflow.PushFileChange, preflightCtx pushPreflightContext) (uploads, deletes []string) { plannedUploadKeys := map[string]struct{}{} for _, change := range syncChanges { @@ -307,25 +317,28 @@ func preflightAttachmentMutations(spaceDir string, syncChanges []syncflow.PushFi } pageID := strings.TrimSpace(doc.Frontmatter.ID) - if pageID == "" { + referencedPaths, err := syncflow.CollectReferencedAssetPaths(spaceDir, absPath, doc.Body) + if err != nil { continue } - referencedPaths, err := syncflow.CollectReferencedAssetPaths(spaceDir, absPath, doc.Body) - if err != nil { + if pageID == "" { + for _, assetPath := range referencedPaths { + uploads = append(uploads, fmt.Sprintf("%s (new page: %s)", assetPath, change.Path)) + } continue } for _, assetPath := range referencedPaths { plannedKey := filepath.ToSlash(filepath.Join("assets", pageID, filepath.Base(assetPath))) plannedUploadKeys[plannedKey] = struct{}{} - if strings.TrimSpace(state.AttachmentIndex[plannedKey]) == "" { + if strings.TrimSpace(preflightCtx.state.AttachmentIndex[plannedKey]) == "" { uploads = append(uploads, plannedKey) } } prefix := "assets/" + pageID + "/" - for stateKey := range state.AttachmentIndex { + for stateKey := range preflightCtx.state.AttachmentIndex { if !strings.HasPrefix(stateKey, prefix) { continue } diff --git a/cmd/push_refactor_test.go b/cmd/push_refactor_test.go index b11cef9..817b7cb 100644 --- a/cmd/push_refactor_test.go +++ b/cmd/push_refactor_test.go @@ -1,6 +1,7 @@ package cmd import ( + "context" "os" "path/filepath" "reflect" @@ -77,10 +78,10 @@ func TestPreflightAttachmentMutations_ScopesDeletesPerPage(t *testing.T) { }, } - uploads, deletes := preflightAttachmentMutations(spaceDir, []syncflow.PushFileChange{ + uploads, deletes := preflightAttachmentMutations(context.Background(), spaceDir, []syncflow.PushFileChange{ {Type: syncflow.PushChangeModify, Path: "root.md"}, {Type: syncflow.PushChangeModify, Path: "other.md"}, - }, state) + }, pushPreflightContext{state: state}) if !reflect.DeepEqual(uploads, []string{"assets/1/new.png"}) { t.Fatalf("uploads = %#v", uploads) @@ -90,6 +91,30 @@ func TestPreflightAttachmentMutations_ScopesDeletesPerPage(t *testing.T) { } } +func TestPreflightAttachmentMutations_ShowsNewPageUploads(t *testing.T) { + t.Parallel() + + spaceDir := t.TempDir() + writeMarkdown(t, filepath.Join(spaceDir, "new-page.md"), fs.MarkdownDocument{ + Frontmatter: fs.Frontmatter{Title: "New Page"}, + Body: "![diagram](diagram.png)\n", + }) + if err := os.WriteFile(filepath.Join(spaceDir, "diagram.png"), []byte("png"), 0o600); err != nil { + t.Fatalf("write diagram: %v", err) + } + + uploads, deletes := preflightAttachmentMutations(context.Background(), spaceDir, []syncflow.PushFileChange{ + {Type: syncflow.PushChangeAdd, Path: "new-page.md"}, + }, pushPreflightContext{state: fs.SpaceState{AttachmentIndex: map[string]string{}}}) + + if !reflect.DeepEqual(uploads, []string{"diagram.png (new page: new-page.md)"}) { + t.Fatalf("uploads = %#v", uploads) + } + if len(deletes) != 0 { + t.Fatalf("deletes = %#v, want none", deletes) + } +} + func TestToSyncPushChanges_FiltersScopeAndNonMarkdownPaths(t *testing.T) { t.Parallel() diff --git a/cmd/status.go b/cmd/status.go index 0120779..a4d8a45 100644 --- a/cmd/status.go +++ b/cmd/status.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "io" + "os" "path/filepath" "sort" "strings" @@ -23,6 +24,7 @@ type StatusRemote interface { ListPages(ctx context.Context, opts confluence.PageListOptions) (confluence.PageListResult, error) GetPage(ctx context.Context, pageID string) (confluence.Page, error) GetFolder(ctx context.Context, folderID string) (confluence.Folder, error) + ListAttachments(ctx context.Context, pageID string) ([]confluence.Attachment, error) } // StatusReport contains the results of a sync drift inspection. @@ -36,9 +38,16 @@ type StatusReport struct { PlannedPathMoves []syncflow.PlannedPagePathMove ConflictAhead []string // pages that are both locally modified AND ahead on remote MaxVersionDrift int + LocalAttachmentAdded []string + LocalAttachmentDeleted []string + RemoteAttachmentAdded []string + RemoteAttachmentDeleted []string + OrphanedLocalAssets []string } -const statusScopeNote = "Scope: markdown/page drift only; attachment-only drift is excluded from `conf status` output. Use `git status` for local asset changes or `conf diff` for attachment-aware remote inspection. There is no attachment-aware `conf status` mode yet." +const statusScopeNote = "Scope: markdown/page drift by default. Use `conf status --attachments` to inspect local and remote attachment drift from the same command." + +var flagStatusAttachments bool var newStatusRemote = func(cfg *config.Config) (StatusRemote, error) { return newConfluenceClientFromConfig(cfg) @@ -51,7 +60,7 @@ func newStatusCmd() *cobra.Command { Short: "Inspect local and remote sync drift", Long: `status prints a high-level sync summary without mutating local files or remote content. -Status scope: markdown/page drift only; attachment-only drift is excluded from ` + "`conf status`" + ` output. Use ` + "`git status`" + ` for local asset changes or ` + "`conf diff`" + ` for attachment-aware remote inspection. There is no attachment-aware ` + "`conf status`" + ` mode yet. +Status scope defaults to markdown/page drift. Add ` + "`--attachments`" + ` to include local and remote attachment drift plus orphaned local asset files. TARGET follows the standard rule: - .md suffix => file mode (space inferred from file) @@ -65,6 +74,7 @@ TARGET follows the standard rule: return runStatus(cmd, config.ParseTarget(raw)) }, } + cmd.Flags().BoolVar(&flagStatusAttachments, "attachments", false, "Include attachment drift and orphaned local asset inspection") return cmd } @@ -126,7 +136,7 @@ func runStatus(cmd *cobra.Command, target config.Target) error { targetRelPath = normalizeRepoRelPath(rel) } - report, err := buildStatusReport(ctx, remote, target, initialCtx, state, spaceKey, targetRelPath) + report, err := buildStatusReport(ctx, remote, target, initialCtx, state, spaceKey, targetRelPath, flagStatusAttachments) if err != nil { return err } @@ -152,6 +162,12 @@ func runStatus(cmd *cobra.Command, target config.Target) error { _, _ = fmt.Fprintln(out, "\nVersion drift: no remote-ahead tracked pages") } + if flagStatusAttachments { + printStatusSection(out, "Attachment drift", report.LocalAttachmentAdded, nil, report.LocalAttachmentDeleted) + printStatusSection(out, "Remote attachment drift", report.RemoteAttachmentAdded, nil, report.RemoteAttachmentDeleted) + printStatusList(out, "orphaned local assets", report.OrphanedLocalAssets) + } + return nil } @@ -163,6 +179,7 @@ func buildStatusReport( state fs.SpaceState, spaceKey string, targetRelPath string, + includeAttachments bool, ) (StatusReport, error) { localAdded, localModified, localDeleted, err := collectLocalStatusChanges(target, initialCtx.spaceDir, spaceKey) if err != nil { @@ -281,6 +298,28 @@ func buildStatusReport( conflictAhead := computeConflictAhead(localModified, remoteModified) sort.Strings(conflictAhead) + localAttachmentAdded := []string{} + localAttachmentDeleted := []string{} + remoteAttachmentAdded := []string{} + remoteAttachmentDeleted := []string{} + orphanedLocalAssets := []string{} + + if includeAttachments { + var attachmentErr error + localAttachmentAdded, localAttachmentDeleted, remoteAttachmentAdded, remoteAttachmentDeleted, orphanedLocalAssets, attachmentErr = collectAttachmentStatus( + ctx, + remote, + target, + initialCtx.spaceDir, + state, + trackedPathByID, + targetRelPath, + ) + if attachmentErr != nil { + return StatusReport{}, attachmentErr + } + } + return StatusReport{ LocalAdded: localAdded, LocalModified: localModified, @@ -291,9 +330,177 @@ func buildStatusReport( PlannedPathMoves: plannedPathMoves, ConflictAhead: conflictAhead, MaxVersionDrift: maxVersionDrift, + LocalAttachmentAdded: localAttachmentAdded, + LocalAttachmentDeleted: localAttachmentDeleted, + RemoteAttachmentAdded: remoteAttachmentAdded, + RemoteAttachmentDeleted: remoteAttachmentDeleted, + OrphanedLocalAssets: orphanedLocalAssets, }, nil } +func collectAttachmentStatus( + ctx context.Context, + remote StatusRemote, + target config.Target, + spaceDir string, + state fs.SpaceState, + trackedPathByID map[string]string, + targetRelPath string, +) ([]string, []string, []string, []string, []string, error) { + targetCtx, err := resolveValidateTargetContext(target, spaceDir) + if err != nil { + return nil, nil, nil, nil, nil, fmt.Errorf("resolve attachment status target: %w", err) + } + + trackedStateByPageID := map[string]map[string]string{} + attachmentIDToPath := map[string]string{} + for relPath, attachmentID := range state.AttachmentIndex { + pageID := pageIDFromAttachmentPath(relPath) + if pageID == "" { + continue + } + if targetRelPath != "" { + if trackedPath, ok := trackedPathByID[pageID]; !ok || trackedPath != targetRelPath { + continue + } + } + if trackedStateByPageID[pageID] == nil { + trackedStateByPageID[pageID] = map[string]string{} + } + trackedStateByPageID[pageID][normalizeRepoRelPath(relPath)] = strings.TrimSpace(attachmentID) + attachmentIDToPath[strings.TrimSpace(attachmentID)] = normalizeRepoRelPath(relPath) + } + + localAdded := map[string]struct{}{} + localDeleted := map[string]struct{}{} + referencedByPage := map[string]map[string]struct{}{} + for _, file := range targetCtx.files { + relPath, relErr := filepath.Rel(spaceDir, file) + if relErr != nil { + continue + } + relPath = normalizeRepoRelPath(relPath) + doc, readErr := fs.ReadMarkdownDocument(file) + if readErr != nil { + continue + } + pageID := strings.TrimSpace(doc.Frontmatter.ID) + if pageID == "" { + pageID = strings.TrimSpace(state.PagePathIndex[relPath]) + } + if pageID == "" { + continue + } + if referencedByPage[pageID] == nil { + referencedByPage[pageID] = map[string]struct{}{} + } + referencedPaths, refErr := syncflow.CollectReferencedAssetPaths(spaceDir, file, doc.Body) + if refErr != nil { + continue + } + for _, assetPath := range referencedPaths { + plannedKey := normalizeRepoRelPath(filepath.ToSlash(filepath.Join("assets", pageID, filepath.Base(assetPath)))) + referencedByPage[pageID][plannedKey] = struct{}{} + if strings.TrimSpace(state.AttachmentIndex[plannedKey]) == "" { + localAdded[plannedKey] = struct{}{} + continue + } + if _, statErr := os.Stat(filepath.Join(spaceDir, filepath.FromSlash(plannedKey))); os.IsNotExist(statErr) { + localDeleted[plannedKey] = struct{}{} + } + } + } + + for pageID, statePaths := range trackedStateByPageID { + referencedSet := referencedByPage[pageID] + for relPath := range statePaths { + if referencedSet == nil { + localDeleted[relPath] = struct{}{} + continue + } + if _, exists := referencedSet[relPath]; !exists { + localDeleted[relPath] = struct{}{} + } + } + } + + remoteAdded := map[string]struct{}{} + remoteDeleted := map[string]struct{}{} + for pageID, trackedState := range trackedStateByPageID { + remoteAttachments, listErr := remote.ListAttachments(ctx, pageID) + if listErr != nil { + return nil, nil, nil, nil, nil, fmt.Errorf("list remote attachments for page %s: %w", pageID, listErr) + } + remoteIDs := map[string]confluence.Attachment{} + for _, attachment := range remoteAttachments { + attachmentID := strings.TrimSpace(attachment.ID) + if attachmentID == "" { + continue + } + remoteIDs[attachmentID] = attachment + if _, tracked := attachmentIDToPath[attachmentID]; !tracked { + remoteAdded[normalizeRepoRelPath(filepath.ToSlash(filepath.Join("assets", pageID, attachmentID+"-"+strings.TrimSpace(attachment.Filename))))] = struct{}{} + } + } + for relPath, attachmentID := range trackedState { + if _, exists := remoteIDs[attachmentID]; !exists { + remoteDeleted[relPath] = struct{}{} + } + } + } + + orphaned := []string{} + assetsRoot := filepath.Join(spaceDir, "assets") + if _, statErr := os.Stat(assetsRoot); statErr == nil { + walkErr := filepath.WalkDir(assetsRoot, func(path string, d os.DirEntry, walkErr error) error { + if walkErr != nil || d.IsDir() { + return walkErr + } + relPath, relErr := filepath.Rel(spaceDir, path) + if relErr != nil { + return relErr + } + normalized := normalizeRepoRelPath(relPath) + if strings.TrimSpace(state.AttachmentIndex[normalized]) == "" { + orphaned = append(orphaned, normalized) + } + return nil + }) + if walkErr != nil { + return nil, nil, nil, nil, nil, fmt.Errorf("walk local assets: %w", walkErr) + } + } + + return sortedStatusSet(localAdded), sortedStatusSet(localDeleted), sortedStatusSet(remoteAdded), sortedStatusSet(remoteDeleted), dedupeSortedStatusPaths(orphaned), nil +} + +func dedupeSortedStatusPaths(paths []string) []string { + if len(paths) == 0 { + return nil + } + sort.Strings(paths) + out := make([]string, 0, len(paths)) + for _, path := range paths { + if len(out) > 0 && out[len(out)-1] == path { + continue + } + out = append(out, path) + } + return out +} + +func sortedStatusSet(values map[string]struct{}) []string { + if len(values) == 0 { + return nil + } + out := make([]string, 0, len(values)) + for value := range values { + out = append(out, value) + } + sort.Strings(out) + return out +} + func listAllPagesForStatus(ctx context.Context, remote StatusRemote, opts confluence.PageListOptions) ([]confluence.Page, error) { pages := make([]confluence.Page, 0) cursor := opts.Cursor diff --git a/cmd/status_e2e_test.go b/cmd/status_e2e_test.go index 747a963..50ac37e 100644 --- a/cmd/status_e2e_test.go +++ b/cmd/status_e2e_test.go @@ -133,7 +133,7 @@ func TestBuildStatusReport_Success(t *testing.T) { spaceKey: "TEST", } - report, err := buildStatusReport(context.Background(), mock, target, initialCtx, state, "TEST", "") + report, err := buildStatusReport(context.Background(), mock, target, initialCtx, state, "TEST", "", false) if err != nil { t.Fatalf("unexpected error: %v", err) } diff --git a/cmd/status_run_test.go b/cmd/status_run_test.go index 0c00e42..581d407 100644 --- a/cmd/status_run_test.go +++ b/cmd/status_run_test.go @@ -48,7 +48,7 @@ func TestBuildStatusReport(t *testing.T) { target := config.Target{Value: "TEST", Mode: config.TargetModeSpace} ctx := context.Background() - _, _ = buildStatusReport(ctx, mock, target, initialPullContext{}, fs.SpaceState{}, "TEST", "filterPath") + _, _ = buildStatusReport(ctx, mock, target, initialPullContext{}, fs.SpaceState{}, "TEST", "filterPath", false) } func TestRunStatus_Integration(t *testing.T) { @@ -195,7 +195,7 @@ func TestBuildStatusReport_Drift(t *testing.T) { mock := &mockStatusRemote{} target := config.Target{Value: "TEST", Mode: config.TargetModeSpace} ctx := context.Background() - _, _ = buildStatusReport(ctx, mock, target, initialPullContext{}, fs.SpaceState{}, "TEST", "filterPath") + _, _ = buildStatusReport(ctx, mock, target, initialPullContext{}, fs.SpaceState{}, "TEST", "filterPath", false) } func TestRunStatus_ExplainsMarkdownOnlyScopeForAssetDrift(t *testing.T) { @@ -282,11 +282,11 @@ func TestStatusCmdHelp_ExplainsMarkdownPageScope(t *testing.T) { runParallelCommandTest(t) cmd := newStatusCmd() - if !strings.Contains(cmd.Long, "markdown/page drift only") { - t.Fatalf("expected long help to explain markdown/page-only scope, got:\n%s", cmd.Long) + if !strings.Contains(cmd.Long, "markdown/page drift") { + t.Fatalf("expected long help to explain markdown/page scope, got:\n%s", cmd.Long) } - if !strings.Contains(cmd.Long, "attachment-only drift is excluded") { - t.Fatalf("expected long help to explain excluded attachment-only drift, got:\n%s", cmd.Long) + if !strings.Contains(cmd.Long, "--attachments") { + t.Fatalf("expected long help to advertise --attachments mode, got:\n%s", cmd.Long) } } @@ -456,6 +456,63 @@ func TestRunStatus_ShowsPlannedPathMoves(t *testing.T) { } } +func TestRunStatus_AttachmentsFlagReportsAttachmentOnlyDrift(t *testing.T) { + runParallelCommandTest(t) + repo, spaceDir := setupStatusScopeRepo(t) + if err := os.Remove(filepath.Join(spaceDir, "assets", "1", "file.png")); err != nil { + t.Fatalf("remove asset: %v", err) + } + + oldAttachments := flagStatusAttachments + t.Cleanup(func() { flagStatusAttachments = oldAttachments }) + + mock := &mockStatusRemote{ + space: confluence.Space{ID: "space-1", Key: "TEST"}, + pages: confluence.PageListResult{ + Pages: []confluence.Page{ + {ID: "1", Title: "Page", Version: 1}, + }, + }, + page: confluence.Page{ID: "1", SpaceID: "space-1", Status: "current"}, + attachments: map[string][]confluence.Attachment{ + "1": { + {ID: "att-1", Filename: "file.png"}, + {ID: "att-2", Filename: "remote-extra.png"}, + }, + }, + } + + oldNewStatusRemote := newStatusRemote + newStatusRemote = func(cfg *config.Config) (StatusRemote, error) { + return mock, nil + } + t.Cleanup(func() { newStatusRemote = oldNewStatusRemote }) + + chdirRepo(t, repo) + cmd := newStatusCmd() + flagStatusAttachments = true + out := &bytes.Buffer{} + cmd.SetOut(out) + + if err := runStatus(cmd, config.Target{Value: "TEST", Mode: config.TargetModeSpace}); err != nil { + t.Fatalf("runStatus() error: %v", err) + } + + got := out.String() + if !strings.Contains(got, "Attachment drift:") { + t.Fatalf("expected attachment drift section, got:\n%s", got) + } + if !strings.Contains(got, "deleted (1):") || !strings.Contains(got, "assets/1/file.png") { + t.Fatalf("expected local attachment deletion, got:\n%s", got) + } + if !strings.Contains(got, "Remote attachment drift:") { + t.Fatalf("expected remote attachment drift section, got:\n%s", got) + } + if !strings.Contains(got, "added (1):") || !strings.Contains(got, "assets/1/att-2-remote-extra.png") { + t.Fatalf("expected remote attachment addition, got:\n%s", got) + } +} + func setupStatusScopeRepo(t *testing.T) (string, string) { t.Helper() diff --git a/cmd/status_test.go b/cmd/status_test.go index 7d0fc52..2c45b17 100644 --- a/cmd/status_test.go +++ b/cmd/status_test.go @@ -53,6 +53,7 @@ type mockStatusRemote struct { pages confluence.PageListResult page confluence.Page folders map[string]confluence.Folder + attachments map[string][]confluence.Attachment err error } @@ -82,6 +83,13 @@ func (m *mockStatusRemote) GetFolder(_ context.Context, folderID string) (conflu return folder, nil } +func (m *mockStatusRemote) ListAttachments(_ context.Context, pageID string) ([]confluence.Attachment, error) { + if m.err != nil { + return nil, m.err + } + return append([]confluence.Attachment(nil), m.attachments[pageID]...), nil +} + func TestListAllPagesForStatus(t *testing.T) { mock := &mockStatusRemote{ pages: confluence.PageListResult{ From 341c07c6a3f8280031aca92416873e0ca9fd1d05 Mon Sep 17 00:00:00 2001 From: Robert Gonek Date: Wed, 11 Mar 2026 11:13:59 +0100 Subject: [PATCH 04/10] push: require explicit merge policy and folder downgrade confirmation --- README.md | 10 +- cmd/diff_pages.go | 10 +- cmd/e2e_test.go | 179 ++++++++++++- cmd/folder_fallback.go | 86 ++++++ cmd/folder_fallback_test.go | 77 ++++++ cmd/pull_stash.go | 15 +- cmd/pull_stash_test.go | 50 ++++ cmd/pull_test.go | 10 +- cmd/push.go | 23 +- cmd/push_conflict_test.go | 71 +++++ cmd/push_target_test.go | 40 +++ cmd/push_worktree.go | 70 +++-- cmd/report_push_test.go | 8 +- docs/automation.md | 16 +- docs/compatibility.md | 36 ++- ...c-09-workflow-hardening-and-operator-ux.md | 247 ++++++++++++++++++ docs/usage.md | 17 +- internal/sync/folder_fallback.go | 33 ++- internal/sync/pull.go | 66 ++++- internal/sync/pull_pages.go | 6 +- internal/sync/pull_state_test.go | 24 +- internal/sync/push_hierarchy.go | 64 +---- internal/sync/push_types.go | 21 ++ internal/sync/tenant_capabilities_test.go | 56 ++-- 24 files changed, 1067 insertions(+), 168 deletions(-) create mode 100644 cmd/folder_fallback.go create mode 100644 cmd/folder_fallback_test.go create mode 100644 docs/plans/2026-03-11-live-sync-09-workflow-hardening-and-operator-ux.md diff --git a/README.md b/README.md index 64d9316..2cc9917 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ Write docs like code. Publish to Confluence with confidence. ✍️ ## Why teams use `conf` ✨ - πŸ“ Markdown-first authoring with Confluence as the destination. - πŸ›‘οΈ Safe sync model with validation before remote writes. -- πŸ‘€ Clear preview step via `conf diff` for tracked pages and `conf push --preflight` for brand-new files. +- πŸ‘€ Clear preview step via `conf diff` for tracked pages and brand-new files, plus `conf push --preflight` for full push planning. - πŸ”Ž Local full-text search across synced Markdown with SQLite or Bleve backends. - πŸ€– Works in local repos and automation pipelines. @@ -38,7 +38,7 @@ conf init `conf init` prepares Git metadata, `.gitignore`, and `.env` scaffolding, and creates an initial commit when it initializes a new Git repository. If `ATLASSIAN_*` or legacy `CONFLUENCE_*` credentials are already set in the environment, `conf init` writes `.env` from them without prompting. -`conf pull` mirrors Confluence hierarchy locally by placing folders and child pages in nested directories. Pages with children use `/.md` so they are distinct from pure folders. Incremental pulls reconcile remote creates, updates, and deletes without requiring `--force`. Leaf-page title renames can keep the existing Markdown path when the effective parent directory is unchanged, but pages that own subtree directories move when their self-owned directory segment changes. Hierarchy moves and ancestor/path-segment sanitization changes are surfaced as `PAGE_PATH_MOVED` notes in `conf pull`/`conf diff`, and `conf status` previews tracked moves before the next pull. +`conf pull` mirrors Confluence hierarchy locally by placing folders and child pages in nested directories. Pages with children use `/.md` so they are distinct from pure folders. Incremental pulls reconcile remote creates, updates, and deletes without requiring `--force`. Canonical pull paths always win, so older authored slugs are reconciled into the same path shape a fresh workspace would get. Hierarchy moves and ancestor/path-segment sanitization changes are surfaced as `PAGE_PATH_MOVED` notes in `conf pull`/`conf diff`, and `conf status` previews tracked moves before the next pull. ## Quick flow πŸ”„ > ⚠️ **IMPORTANT**: If you are developing `conf` itself, NEVER run sync commands against real Confluence spaces in the repository root. This prevents accidental commits of synced documentation. Use a separate sandbox folder. @@ -57,7 +57,7 @@ conf validate ENG conf diff ENG # Preview a brand-new file before its first push -conf push .\ENG\New-Page.md --preflight +conf diff .\ENG\New-Page.md # 4) Push local changes conf push ENG --on-conflict=cancel @@ -73,7 +73,9 @@ conf push ENG --on-conflict=cancel - Removing tracked Markdown pages archives the corresponding remote page; follow-up pull removes the archived page from tracked local state - `pull` and `push` are serialized per repository with a workspace lock, so concurrent mutating runs fail fast with a clear lock message - `push` failures retain recovery refs and print exact `conf recover`, `git switch`, and cleanup commands for the retained run -- Status scope: `conf status` reports Markdown page drift only; use `git status` for local asset changes or `conf diff` for attachment-aware remote inspection. There is no attachment-aware `conf status` mode yet +- Status scope: `conf status` reports Markdown page drift by default; add `--attachments` to inspect attachment-only drift and orphaned local assets from the same command +- Non-interactive `push --on-conflict=pull-merge` requires `--merge-resolution=fail|keep-local|keep-remote|keep-both` +- Push never silently converts a directory-backed folder into a page; interactive runs require explicit confirmation before any folder-to-page downgrade - Label rules: labels are trimmed, lowercased, deduplicated, and sorted; empty labels and labels containing whitespace are rejected - Search filters: `--space`, repeatable `--label`, `--heading`, `--created-by`, `--updated-by`, date bounds, and `--result-detail` - Git remote is optional (local Git is enough) diff --git a/cmd/diff_pages.go b/cmd/diff_pages.go index e528c5f..6bd91d5 100644 --- a/cmd/diff_pages.go +++ b/cmd/diff_pages.go @@ -147,7 +147,15 @@ func shouldIgnoreFolderHierarchyError(err error) bool { return true } var apiErr *confluence.APIError - return errors.As(err, &apiErr) + if errors.As(err, &apiErr) { + switch apiErr.StatusCode { + case 400, 409: + return false + default: + return true + } + } + return false } func diffDisplayRelPath(spaceDir, path string) string { diff --git a/cmd/e2e_test.go b/cmd/e2e_test.go index 1c7efd5..f5256b7 100644 --- a/cmd/e2e_test.go +++ b/cmd/e2e_test.go @@ -253,7 +253,7 @@ func TestWorkflow_PushAutoPullMerge(t *testing.T) { // 4. Run push with --on-conflict=pull-merge // This should trigger the automatic pull - pcmd := exec.Command(confBin, "push", simplePath, "--on-conflict=pull-merge", "--yes", "--non-interactive") + pcmd := exec.Command(confBin, "push", simplePath, "--on-conflict=pull-merge", "--merge-resolution=keep-both", "--yes", "--non-interactive") pcmd.Dir = tmpDir out, err := pcmd.CombinedOutput() // It might fail with a content conflict error (which is what happens in this test setup) @@ -286,6 +286,89 @@ func TestWorkflow_PushAutoPullMerge(t *testing.T) { } } +func TestWorkflow_PushAutoPullMergeFailDoesNotMutateWorkspace(t *testing.T) { + e2eCfg := requireE2EConfig(t) + spaceKey := e2eCfg.PrimarySpaceKey + + ctx := context.Background() + cfg, err := config.Load("") + if err != nil { + t.Fatalf("Load config: %v", err) + } + client, _ := confluence.NewClient(confluence.ClientConfig{ + BaseURL: cfg.Domain, + Email: cfg.Email, + APIToken: cfg.APIToken, + }) + + rootDir := projectRootFromWD(t) + confBin := confBinaryForOS(rootDir) + + tmpDir, err := os.MkdirTemp("", "conf-e2e-autopull-fail-*") + if err != nil { + t.Fatalf("MkdirTemp: %v", err) + } + defer os.RemoveAll(tmpDir) + + runCMS := func(args ...string) string { + cmd := exec.Command(confBin, args...) + cmd.Dir = tmpDir + out, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("Command conf %v failed: %v\n%s", args, err, string(out)) + } + return string(out) + } + + runCMS("init") + runCMS("pull", spaceKey, "--yes", "--non-interactive") + + spaceDir := findPulledSpaceDir(t, tmpDir) + simplePath, pageID := prepareE2EConflictTarget(t, spaceDir, client, runCMS) + doc, _ := fs.ReadMarkdownDocument(simplePath) + originalVersion := doc.Frontmatter.Version + doc.Body += "\n\nLocal change for auto pull-merge fail test" + if err := fs.WriteMarkdownDocument(simplePath, doc); err != nil { + t.Fatalf("WriteMarkdown: %v", err) + } + + remotePage, err := client.GetPage(ctx, pageID) + if err != nil { + t.Fatalf("GetPage: %v", err) + } + if _, err := client.UpdatePage(ctx, pageID, confluence.PageUpsertInput{ + SpaceID: remotePage.SpaceID, + ParentPageID: remotePage.ParentPageID, + Title: remotePage.Title, + Version: remotePage.Version + 1, + BodyADF: []byte(`{"version":1,"type":"doc","content":[{"type":"paragraph","content":[{"type":"text","text":"Remote update for auto pull-merge fail"}]}]}`), + }); err != nil { + t.Fatalf("Remote update (conflict simulation) failed: %v", err) + } + time.Sleep(5 * time.Second) + + cmd := exec.Command(confBin, "push", simplePath, "--on-conflict=pull-merge", "--merge-resolution=fail", "--yes", "--non-interactive") + cmd.Dir = tmpDir + out, err := cmd.CombinedOutput() + if err == nil { + t.Fatalf("push should fail closed when --merge-resolution=fail is selected\n%s", string(out)) + } + if !strings.Contains(string(out), "--merge-resolution=fail") { + t.Fatalf("expected fail-before-mutate guidance, got:\n%s", string(out)) + } + + docAfter, err := fs.ReadMarkdownDocument(simplePath) + if err != nil { + t.Fatalf("ReadMarkdown after failed auto pull-merge: %v", err) + } + if docAfter.Frontmatter.Version != originalVersion { + t.Fatalf("local version changed unexpectedly: got %d want %d", docAfter.Frontmatter.Version, originalVersion) + } + if !strings.Contains(docAfter.Body, "Local change for auto pull-merge fail test") { + t.Fatalf("expected local edit to remain untouched, body=\n%s", docAfter.Body) + } +} + func TestWorkflow_AgenticFullCycle(t *testing.T) { e2eCfg := requireE2EConfig(t) spaceKey := e2eCfg.PrimarySpaceKey @@ -338,6 +421,100 @@ func TestWorkflow_AgenticFullCycle(t *testing.T) { fmt.Printf("Agentic full cycle succeeded\n") } +func TestWorkflow_DiffPreviewForNewPage(t *testing.T) { + e2eCfg := requireE2EConfig(t) + spaceKey := e2eCfg.PrimarySpaceKey + + rootDir := projectRootFromWD(t) + confBin := confBinaryForOS(rootDir) + + tmpDir, err := os.MkdirTemp("", "conf-e2e-diff-preview-*") + if err != nil { + t.Fatalf("MkdirTemp: %v", err) + } + defer os.RemoveAll(tmpDir) + + runCMS := func(args ...string) string { + cmd := exec.Command(confBin, args...) + cmd.Dir = tmpDir + out, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("Command conf %v failed: %v\n%s", args, err, string(out)) + } + return string(out) + } + + runCMS("init") + runCMS("pull", spaceKey, "--yes", "--non-interactive") + + spaceDir := findPulledSpaceDirBySpaceKey(t, tmpDir, spaceKey) + filePath := filepath.Join(spaceDir, "diff-preview-e2e.md") + if err := fs.WriteMarkdownDocument(filePath, fs.MarkdownDocument{ + Frontmatter: fs.Frontmatter{Title: "Diff Preview E2E"}, + Body: "preview body\n", + }); err != nil { + t.Fatalf("WriteMarkdown: %v", err) + } + + cmd := exec.Command(confBin, "diff", filePath) + cmd.Dir = tmpDir + out, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("conf diff failed: %v\n%s", err, string(out)) + } + text := string(out) + if !strings.Contains(text, "new page preview:") { + t.Fatalf("expected new page preview output, got:\n%s", text) + } + if !strings.Contains(text, "canonical target path: Diff-Preview-E2E.md") { + t.Fatalf("expected canonical target path in preview, got:\n%s", text) + } +} + +func TestWorkflow_StatusAttachmentsFlagShowsOrphanedAsset(t *testing.T) { + e2eCfg := requireE2EConfig(t) + spaceKey := e2eCfg.PrimarySpaceKey + + rootDir := projectRootFromWD(t) + confBin := confBinaryForOS(rootDir) + + tmpDir, err := os.MkdirTemp("", "conf-e2e-status-attachments-*") + if err != nil { + t.Fatalf("MkdirTemp: %v", err) + } + defer os.RemoveAll(tmpDir) + + runCMS := func(args ...string) string { + cmd := exec.Command(confBin, args...) + cmd.Dir = tmpDir + out, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("Command conf %v failed: %v\n%s", args, err, string(out)) + } + return string(out) + } + + runCMS("init") + runCMS("pull", spaceKey, "--yes", "--non-interactive") + + spaceDir := findPulledSpaceDirBySpaceKey(t, tmpDir, spaceKey) + orphanPath := filepath.Join(spaceDir, "assets", "orphan-page", "ghost.txt") + if err := os.MkdirAll(filepath.Dir(orphanPath), 0o750); err != nil { + t.Fatalf("MkdirAll orphan asset dir: %v", err) + } + if err := os.WriteFile(orphanPath, []byte("ghost"), 0o600); err != nil { + t.Fatalf("WriteFile orphan asset: %v", err) + } + + out := runCMS("status", spaceKey, "--attachments") + if !strings.Contains(out, "orphaned local assets (1):") { + t.Fatalf("expected orphaned local assets section, got:\n%s", out) + } + if !strings.Contains(out, "assets/orphan-page/ghost.txt") { + t.Fatalf("expected orphaned asset path, got:\n%s", out) + } +} + func TestWorkflow_SandboxBaselineDiagnosticsAllowlist(t *testing.T) { e2eCfg := requireE2EConfig(t) diff --git a/cmd/folder_fallback.go b/cmd/folder_fallback.go new file mode 100644 index 0000000..8b75b33 --- /dev/null +++ b/cmd/folder_fallback.go @@ -0,0 +1,86 @@ +package cmd + +import ( + "fmt" + "io" + "os" + "path/filepath" + "strings" + + "github.com/charmbracelet/huh" + "github.com/rgonek/confluence-markdown-sync/internal/fs" + syncflow "github.com/rgonek/confluence-markdown-sync/internal/sync" +) + +func handleFolderPageFallback(in io.Reader, out io.Writer, spaceDir string, folderErr *syncflow.FolderPageFallbackRequiredError) (string, bool, error) { + if folderErr == nil { + return "", false, nil + } + if flagNonInteractive { + return "", false, fmt.Errorf("folder %q requires explicit interactive confirmation before converting it into a page-backed hierarchy; rerun without --non-interactive and accept the fallback or rename the folder", folderErr.Path) + } + + title := fmt.Sprintf("Convert %q into a page-with-subpages node?", folderErr.Path) + description := fmt.Sprintf("Confluence cannot keep %q as a pure folder (%s). Accepting will add %s locally and continue the push.", folderErr.Path, folderErr.Reason, previewIndexPathForDir(folderErr.Path)) + accepted := false + + if outputSupportsProgress(out) { + form := huh.NewForm( + huh.NewGroup( + huh.NewConfirm(). + Title(title). + Description(description). + Value(&accepted), + ), + ).WithOutput(out) + if err := form.Run(); err != nil { + return "", false, err + } + } else { + if _, err := fmt.Fprintf(out, "%s\n%s [y/N]: ", title, description); err != nil { + return "", false, err + } + choice, err := readPromptLine(in) + if err != nil { + return "", false, err + } + choice = strings.ToLower(strings.TrimSpace(choice)) + accepted = choice == "y" || choice == "yes" + } + + if !accepted { + return "", false, fmt.Errorf("folder downgrade for %q was not accepted; rename or restructure locally before retrying", folderErr.Path) + } + + indexRelPath, err := materializeFolderAsPage(spaceDir, folderErr.Path) + if err != nil { + return "", false, err + } + _, _ = fmt.Fprintf(out, "created page-backed hierarchy node %s to preserve local intent\n", indexRelPath) + return indexRelPath, true, nil +} + +func materializeFolderAsPage(spaceDir, dirPath string) (string, error) { + dirPath = normalizeRepoRelPath(dirPath) + if dirPath == "" || dirPath == "." { + return "", fmt.Errorf("folder path is required") + } + + indexRelPath := previewIndexPathForDir(dirPath) + indexAbsPath := filepath.Join(spaceDir, filepath.FromSlash(indexRelPath)) + if _, err := os.Stat(indexAbsPath); err == nil { + return indexRelPath, nil + } + if err := os.MkdirAll(filepath.Dir(indexAbsPath), 0o750); err != nil { + return "", err + } + if err := fs.WriteMarkdownDocument(indexAbsPath, fs.MarkdownDocument{ + Frontmatter: fs.Frontmatter{ + Title: filepath.Base(filepath.FromSlash(dirPath)), + }, + Body: "", + }); err != nil { + return "", err + } + return indexRelPath, nil +} diff --git a/cmd/folder_fallback_test.go b/cmd/folder_fallback_test.go new file mode 100644 index 0000000..ed15a69 --- /dev/null +++ b/cmd/folder_fallback_test.go @@ -0,0 +1,77 @@ +package cmd + +import ( + "bytes" + "path/filepath" + "strings" + "testing" + + "github.com/rgonek/confluence-markdown-sync/internal/fs" + syncflow "github.com/rgonek/confluence-markdown-sync/internal/sync" +) + +func TestHandleFolderPageFallbackAcceptsAndMaterializesPageNode(t *testing.T) { + spaceDir := t.TempDir() + + oldNonInteractive := flagNonInteractive + flagNonInteractive = false + t.Cleanup(func() { flagNonInteractive = oldNonInteractive }) + + out := &bytes.Buffer{} + indexRelPath, accepted, err := handleFolderPageFallback( + strings.NewReader("y\n"), + out, + spaceDir, + &syncflow.FolderPageFallbackRequiredError{ + Path: "Parent/Child", + Reason: "unsupported tenant capability", + }, + ) + if err != nil { + t.Fatalf("handleFolderPageFallback() error: %v", err) + } + if !accepted { + t.Fatal("expected fallback acceptance") + } + if indexRelPath != "Parent/Child/Child.md" { + t.Fatalf("index path = %q, want Parent/Child/Child.md", indexRelPath) + } + + doc, readErr := fs.ReadMarkdownDocument(filepath.Join(spaceDir, filepath.FromSlash(indexRelPath))) + if readErr != nil { + t.Fatalf("read materialized page node: %v", readErr) + } + if doc.Frontmatter.Title != "Child" { + t.Fatalf("materialized title = %q, want Child", doc.Frontmatter.Title) + } + if !strings.Contains(out.String(), "created page-backed hierarchy node Parent/Child/Child.md") { + t.Fatalf("expected confirmation output, got:\n%s", out.String()) + } +} + +func TestHandleFolderPageFallbackNonInteractiveFailsClosed(t *testing.T) { + spaceDir := t.TempDir() + + oldNonInteractive := flagNonInteractive + flagNonInteractive = true + t.Cleanup(func() { flagNonInteractive = oldNonInteractive }) + + _, accepted, err := handleFolderPageFallback( + strings.NewReader(""), + &bytes.Buffer{}, + spaceDir, + &syncflow.FolderPageFallbackRequiredError{ + Path: "Parent", + Reason: "folder semantic conflict", + }, + ) + if err == nil { + t.Fatal("handleFolderPageFallback() expected non-interactive refusal") + } + if accepted { + t.Fatal("expected fallback to remain unaccepted") + } + if !strings.Contains(err.Error(), "explicit interactive confirmation") { + t.Fatalf("expected interactive-confirmation guidance, got: %v", err) + } +} diff --git a/cmd/pull_stash.go b/cmd/pull_stash.go index 657ceaf..1e247b6 100644 --- a/cmd/pull_stash.go +++ b/cmd/pull_stash.go @@ -146,9 +146,18 @@ func handlePullConflict(repoRoot, stashRef, scopePath string, in io.Reader, out } if flagNonInteractive || flagYes { - return fmt.Errorf( - "a sync conflict needs your choice (keep local, keep website, or keep both), but interactive input is disabled. rerun without --non-interactive to continue", - ) + switch strings.TrimSpace(flagMergeResolution) { + case "keep-local": + return applyPullConflictChoice("local", repoRoot, stashRef, scopePath, conflictedPaths, out) + case "keep-remote": + return applyPullConflictChoice("remote", repoRoot, stashRef, scopePath, conflictedPaths, out) + case "keep-both": + return applyPullConflictChoice("both", repoRoot, stashRef, scopePath, conflictedPaths, out) + default: + return fmt.Errorf( + "a sync conflict needs your choice (keep local, keep website, or keep both), but interactive input is disabled. rerun without --non-interactive or pass --merge-resolution=keep-local|keep-remote|keep-both", + ) + } } const ( diff --git a/cmd/pull_stash_test.go b/cmd/pull_stash_test.go index c598df9..cf1e59d 100644 --- a/cmd/pull_stash_test.go +++ b/cmd/pull_stash_test.go @@ -85,6 +85,56 @@ func TestApplyAndDropStash_KeepBothCreatesSideBySideConflictCopy(t *testing.T) { } } +func TestApplyAndDropStash_NonInteractiveKeepBothUsesMergeResolution(t *testing.T) { + runParallelCommandTest(t) + + repo := t.TempDir() + setupGitRepo(t, repo) + + spaceDir := filepath.Join(repo, "Engineering (ENG)") + if err := os.MkdirAll(spaceDir, 0o750); err != nil { + t.Fatalf("mkdir space dir: %v", err) + } + + repoPath := filepath.ToSlash(filepath.Join("Engineering (ENG)", "Page.md")) + mainFile := filepath.Join(spaceDir, "Page.md") + if err := os.WriteFile(mainFile, []byte("base\n"), 0o600); err != nil { + t.Fatalf("write base file: %v", err) + } + + runGitForTest(t, repo, "add", ".") + runGitForTest(t, repo, "commit", "-m", "baseline") + + if err := os.WriteFile(mainFile, []byte("local edit\n"), 0o600); err != nil { + t.Fatalf("write local edit: %v", err) + } + runGitForTest(t, repo, "stash", "push", "--include-untracked", "-m", "local", "--", repoPath) + stashRef := strings.TrimSpace(runGitForTest(t, repo, "stash", "list", "-1", "--format=%gd")) + if stashRef == "" { + t.Fatal("expected stash ref") + } + + if err := os.WriteFile(mainFile, []byte("website edit\n"), 0o600); err != nil { + t.Fatalf("write website edit: %v", err) + } + runGitForTest(t, repo, "add", repoPath) + runGitForTest(t, repo, "commit", "-m", "website update") + + oldMergeResolution := flagMergeResolution + flagMergeResolution = "keep-both" + t.Cleanup(func() { flagMergeResolution = oldMergeResolution }) + setAutomationFlags(t, false, true) + + out := &bytes.Buffer{} + if err := applyAndDropStash(repo, stashRef, filepath.ToSlash(filepath.Base(spaceDir)), strings.NewReader(""), out); err != nil { + t.Fatalf("applyAndDropStash() error: %v", err) + } + + if _, err := os.Stat(filepath.Join(spaceDir, "Page (My Local Changes).md")); err != nil { + t.Fatalf("expected side-by-side backup under non-interactive keep-both: %v", err) + } +} + func TestRunPull_DiscardLocalFailureRestoresLocalChanges(t *testing.T) { runParallelCommandTest(t) diff --git a/cmd/pull_test.go b/cmd/pull_test.go index 123e349..79088b5 100644 --- a/cmd/pull_test.go +++ b/cmd/pull_test.go @@ -447,16 +447,16 @@ func TestRunPull_DraftSpaceListing(t *testing.T) { t.Fatalf("runPull() error: %v", err) } - // draft.md should NOT be deleted, and should be updated from remote - doc, err := fs.ReadMarkdownDocument(filepath.Join(spaceDir, "draft.md")) + // Pull should reconcile the tracked draft to its canonical path and update it from remote. + doc, err := fs.ReadMarkdownDocument(filepath.Join(spaceDir, "Draft-Page.md")) if err != nil { - t.Fatalf("read draft.md: %v", err) + t.Fatalf("read Draft-Page.md: %v", err) } if !strings.Contains(doc.Body, "remote draft body") { - t.Errorf("draft.md not updated from remote, body = %q", doc.Body) + t.Errorf("Draft-Page.md not updated from remote, body = %q", doc.Body) } if doc.Frontmatter.State != "draft" { - t.Errorf("draft.md status = %q, want draft", doc.Frontmatter.State) + t.Errorf("Draft-Page.md status = %q, want draft", doc.Frontmatter.State) } } diff --git a/cmd/push.go b/cmd/push.go index e7ac446..d91f9e4 100644 --- a/cmd/push.go +++ b/cmd/push.go @@ -35,11 +35,11 @@ var flagPushPreflight bool var flagPushKeepOrphanAssets bool var flagArchiveTaskTimeout = confluence.DefaultArchiveTaskTimeout var flagArchiveTaskPollInterval = confluence.DefaultArchiveTaskPollInterval +var flagMergeResolution string func newPushCmd() *cobra.Command { var onConflict string var dryRun bool - cmd := &cobra.Command{ Use: "push [TARGET]", Short: "Push local Markdown changes to Confluence", @@ -70,6 +70,7 @@ It uses an isolated worktree and a temporary branch to ensure safety.`, cmd.Flags().BoolVarP(&flagYes, "yes", "y", false, "Auto-approve safety confirmations") cmd.Flags().BoolVar(&flagNonInteractive, "non-interactive", false, "Disable prompts; fail fast when a decision is required") cmd.Flags().StringVar(&onConflict, "on-conflict", "", "Non-interactive conflict policy: pull-merge|force|cancel") + cmd.Flags().StringVar(&flagMergeResolution, "merge-resolution", "", "Non-interactive merge resolution for pull-merge conflicts: fail|keep-local|keep-remote|keep-both") addReportJSONFlag(cmd) return cmd } @@ -86,6 +87,18 @@ func validateOnConflict(v string) error { } } +func validateMergeResolution(v string) error { + if v == "" { + return nil + } + switch v { + case "fail", "keep-local", "keep-remote", "keep-both": + return nil + default: + return fmt.Errorf("invalid --merge-resolution value %q: must be fail, keep-local, keep-remote, or keep-both", v) + } +} + func runPush(cmd *cobra.Command, target config.Target, onConflict string, dryRun bool) (runErr error) { ctx := getCommandContext(cmd) actualOut := ensureSynchronizedCmdOutput(cmd) @@ -134,6 +147,9 @@ func runPush(cmd *cobra.Command, target config.Target, onConflict string, dryRun if preflight && dryRun { return errors.New("--preflight and --dry-run cannot be used together") } + if err := validateMergeResolution(flagMergeResolution); err != nil { + return err + } if !preflight { resolvedPolicy, err := resolvePushConflictPolicy(cmd.InOrStdin(), out, onConflict, target.IsSpace()) if err != nil { @@ -141,6 +157,9 @@ func runPush(cmd *cobra.Command, target config.Target, onConflict string, dryRun } onConflict = resolvedPolicy telemetryConflictPolicy = resolvedPolicy + if !dryRun && flagNonInteractive && onConflict == OnConflictPullMerge && strings.TrimSpace(flagMergeResolution) == "" { + return errors.New("--non-interactive with --on-conflict=pull-merge requires --merge-resolution=fail|keep-local|keep-remote|keep-both") + } } initialCtx, err := resolveInitialPushContext(target) @@ -333,7 +352,7 @@ func runPush(cmd *cobra.Command, target config.Target, onConflict string, dryRun } }() - outcome, err := runPushInWorktree(ctx, cmd, out, target, spaceKey, spaceDir, onConflict, tsStr, + outcome, err := runPushInWorktree(ctx, cmd, out, target, spaceKey, spaceDir, onConflict, flagMergeResolution, tsStr, gitClient, spaceScopePath, changeScopePath, worktreeDir, syncBranchName, snapshotName, &stashRef) report.Diagnostics = append(report.Diagnostics, reportDiagnosticsFromPush(outcome.Result.Diagnostics, spaceDir)...) for _, commit := range outcome.Result.Commits { diff --git a/cmd/push_conflict_test.go b/cmd/push_conflict_test.go index 93c33cc..536d98e 100644 --- a/cmd/push_conflict_test.go +++ b/cmd/push_conflict_test.go @@ -258,3 +258,74 @@ func TestRunPush_PullMergeDoesNotForceDiscardLocalDuringPull(t *testing.T) { t.Fatalf("expected pull-merge completion guidance, got:\n%s", out.String()) } } + +func TestRunPush_PullMergeFailResolutionStopsBeforeMutatingWorkspace(t *testing.T) { + runParallelCommandTest(t) + + repo := t.TempDir() + spaceDir := preparePushRepoWithBaseline(t, repo) + rootPath := filepath.Join(spaceDir, "root.md") + + writeMarkdown(t, rootPath, fs.MarkdownDocument{ + Frontmatter: fs.Frontmatter{ + Title: "Root", + ID: "1", + + Version: 1, + ConfluenceLastModified: "2026-02-01T10:00:00Z", + }, + Body: "local uncommitted content\n", + }) + + fake := newCmdFakePushRemote(3) + oldPushFactory := newPushRemote + oldPullFactory := newPullRemote + newPushRemote = func(_ *config.Config) (syncflow.PushRemote, error) { return fake, nil } + newPullRemote = func(_ *config.Config) (syncflow.PullRemote, error) { return fake, nil } + t.Cleanup(func() { + newPushRemote = oldPushFactory + newPullRemote = oldPullFactory + }) + + pullCalled := false + oldRunPullForPush := runPullForPush + runPullForPush = func(_ *cobra.Command, _ config.Target) (commandRunReport, error) { + pullCalled = true + return commandRunReport{}, nil + } + t.Cleanup(func() { + runPullForPush = oldRunPullForPush + }) + + oldMergeResolution := flagMergeResolution + flagMergeResolution = "fail" + t.Cleanup(func() { flagMergeResolution = oldMergeResolution }) + + setAutomationFlags(t, false, true) + setupEnv(t) + chdirRepo(t, spaceDir) + + cmd := &cobra.Command{} + cmd.SetOut(&bytes.Buffer{}) + err := runPush(cmd, config.Target{Mode: config.TargetModeSpace, Value: ""}, OnConflictPullMerge, false) + if err == nil { + t.Fatal("runPush() expected explicit fail-before-mutate error") + } + if !strings.Contains(err.Error(), "--merge-resolution=fail") { + t.Fatalf("expected fail-resolution guidance, got: %v", err) + } + if pullCalled { + t.Fatal("automatic pull-merge should not run when --merge-resolution=fail is selected") + } + + doc, readErr := fs.ReadMarkdownDocument(rootPath) + if readErr != nil { + t.Fatalf("read root markdown: %v", readErr) + } + if !strings.Contains(doc.Body, "local uncommitted content") { + t.Fatalf("expected local file to remain untouched, got body %q", doc.Body) + } + if stashList := strings.TrimSpace(runGitForTest(t, repo, "stash", "list")); stashList != "" { + t.Fatalf("expected stash to be empty when pull-merge is not attempted, got:\n%s", stashList) + } +} diff --git a/cmd/push_target_test.go b/cmd/push_target_test.go index db06675..3aa1176 100644 --- a/cmd/push_target_test.go +++ b/cmd/push_target_test.go @@ -198,6 +198,9 @@ func TestRunPush_SpaceModeAssumesPullMerge(t *testing.T) { setupEnv(t) chdirRepo(t, spaceDir) setAutomationFlags(t, false, true) // non-interactive + oldMergeResolution := flagMergeResolution + flagMergeResolution = "keep-both" + t.Cleanup(func() { flagMergeResolution = oldMergeResolution }) cmd := &cobra.Command{} cmd.SetOut(&bytes.Buffer{}) @@ -213,3 +216,40 @@ func TestRunPush_SpaceModeAssumesPullMerge(t *testing.T) { t.Fatal("expected remote factory to be called") } } + +func TestRunPush_SpaceModePullMergeRequiresMergeResolutionInNonInteractiveMode(t *testing.T) { + runParallelCommandTest(t) + + repo := t.TempDir() + spaceDir := preparePushRepoWithBaseline(t, repo) + + writeMarkdown(t, filepath.Join(spaceDir, "root.md"), fs.MarkdownDocument{ + Frontmatter: fs.Frontmatter{ + Title: "Root", + ID: "1", + Version: 1, + ConfluenceLastModified: "2026-02-01T10:00:00Z", + }, + Body: "Updated local content\n", + }) + runGitForTest(t, repo, "add", ".") + runGitForTest(t, repo, "commit", "-m", "local change") + + setupEnv(t) + chdirRepo(t, spaceDir) + setAutomationFlags(t, false, true) + oldMergeResolution := flagMergeResolution + flagMergeResolution = "" + t.Cleanup(func() { flagMergeResolution = oldMergeResolution }) + + cmd := &cobra.Command{} + cmd.SetOut(&bytes.Buffer{}) + + err := runPush(cmd, config.Target{Mode: config.TargetModeSpace, Value: ""}, "", false) + if err == nil { + t.Fatal("runPush() expected merge-resolution requirement error") + } + if !strings.Contains(err.Error(), "--merge-resolution") { + t.Fatalf("expected merge-resolution guidance, got: %v", err) + } +} diff --git a/cmd/push_worktree.go b/cmd/push_worktree.go index 6cd587a..48c2707 100644 --- a/cmd/push_worktree.go +++ b/cmd/push_worktree.go @@ -22,7 +22,7 @@ func runPushInWorktree( cmd *cobra.Command, out io.Writer, target config.Target, - spaceKey, spaceDir, onConflict, tsStr string, + spaceKey, spaceDir, onConflict, mergeResolution, tsStr string, gitClient *git.Client, spaceScopePath, changeScopePath string, worktreeDir, syncBranchName, snapshotRefName string, @@ -123,21 +123,48 @@ func runPushInWorktree( progress = newConsoleProgress(out, "Syncing to Confluence") } - result, err := syncflow.Push(ctx, remote, syncflow.PushOptions{ - SpaceKey: spaceKey, - SpaceDir: wtSpaceDir, - Domain: cfg.Domain, - State: state, - GlobalPageIndex: globalPageIndex, - Changes: syncChanges, - ConflictPolicy: toSyncConflictPolicy(onConflict), - KeepOrphanAssets: flagPushKeepOrphanAssets, - ArchiveTimeout: normalizedArchiveTaskTimeout(), - ArchivePollInterval: normalizedArchiveTaskPollInterval(), - Progress: progress, - }) - outcome.Result = result - if err != nil { + var result syncflow.PushResult + for { + nextResult, pushErr := syncflow.Push(ctx, remote, syncflow.PushOptions{ + SpaceKey: spaceKey, + SpaceDir: wtSpaceDir, + Domain: cfg.Domain, + State: state, + GlobalPageIndex: globalPageIndex, + Changes: syncChanges, + ConflictPolicy: toSyncConflictPolicy(onConflict), + KeepOrphanAssets: flagPushKeepOrphanAssets, + ArchiveTimeout: normalizedArchiveTaskTimeout(), + ArchivePollInterval: normalizedArchiveTaskPollInterval(), + Progress: progress, + }) + result = nextResult + outcome.Result = result + err = pushErr + if err == nil { + break + } + var folderErr *syncflow.FolderPageFallbackRequiredError + if errors.As(err, &folderErr) { + indexRelPath, accepted, handleErr := handleFolderPageFallback(cmd.InOrStdin(), out, wtSpaceDir, folderErr) + if handleErr != nil { + return outcome, handleErr + } + if accepted { + if target.IsFile() && strings.TrimSpace(indexRelPath) != "" { + syncChanges = append(syncChanges, syncflow.PushFileChange{Type: syncflow.PushChangeAdd, Path: indexRelPath}) + } else { + syncChanges, err = collectPushChangesForTarget(wtClient, baselineRef, target, spaceScopePath, changeScopePath) + if err != nil { + return outcome, err + } + } + if err := runPushValidation(ctx, out, config.Target{Mode: config.TargetModeSpace, Value: wtSpaceDir}, wtSpaceDir, "pre-push validate failed"); err != nil { + return outcome, err + } + continue + } + } var conflictErr *syncflow.PushConflictError if errors.As(err, &conflictErr) { slog.Warn("push_conflict_detected", @@ -148,10 +175,20 @@ func runPushInWorktree( "policy", conflictErr.Policy, ) if onConflict == OnConflictPullMerge { + if flagNonInteractive && strings.EqualFold(strings.TrimSpace(mergeResolution), "fail") { + outcome.ConflictResolution = &commandRunReportConflictResolution{ + Policy: OnConflictPullMerge, + Status: "failed", + } + return outcome, errors.New("automatic pull-merge stopped before mutating the main workspace because --merge-resolution=fail was requested") + } slog.Info("push_conflict_resolution", "strategy", OnConflictPullMerge, "action", "run_pull") _, _ = fmt.Fprintf(out, "conflict detected for %s; policy is %s, attempting automatic pull-merge...\n", conflictErr.Path, onConflict) + previousMergeResolution := flagMergeResolution + flagMergeResolution = mergeResolution if strings.TrimSpace(*stashRef) != "" { if err := restorePushStash(gitClient, *stashRef, spaceScopePath, nil); err != nil { + flagMergeResolution = previousMergeResolution return outcome, fmt.Errorf("restore local workspace before automatic pull-merge: %w", err) } *stashRef = "" @@ -167,6 +204,7 @@ func runPushInWorktree( AttachmentOperations: append([]commandRunReportAttachmentOp(nil), pullReport.AttachmentOperations...), FallbackModes: append([]string(nil), pullReport.FallbackModes...), } + flagMergeResolution = previousMergeResolution flagPullDiscardLocal = prevDiscardLocal if pullErr != nil { outcome.ConflictResolution.Status = "failed" diff --git a/cmd/report_push_test.go b/cmd/report_push_test.go index 4810f5e..ff71870 100644 --- a/cmd/report_push_test.go +++ b/cmd/report_push_test.go @@ -168,11 +168,11 @@ func TestRunPush_ReportJSONPullMergeEmitsSingleObjectAndCapturesPullMergeReport( if report.ConflictResolution.Status != "completed" { t.Fatalf("conflict resolution status = %q, want completed", report.ConflictResolution.Status) } - if !containsString(report.ConflictResolution.MutatedFiles, "root.md") { - t.Fatalf("conflict resolution mutated files = %v, want root.md", report.ConflictResolution.MutatedFiles) + if !containsString(report.ConflictResolution.MutatedFiles, "Root.md") { + t.Fatalf("conflict resolution mutated files = %v, want Root.md", report.ConflictResolution.MutatedFiles) } - if !containsString(report.MutatedFiles, "root.md") { - t.Fatalf("outer mutated files = %v, want root.md from pull-merge", report.MutatedFiles) + if !containsString(report.MutatedFiles, "Root.md") { + t.Fatalf("outer mutated files = %v, want Root.md from pull-merge", report.MutatedFiles) } } diff --git a/docs/automation.md b/docs/automation.md index 2748c6f..0b05724 100644 --- a/docs/automation.md +++ b/docs/automation.md @@ -28,6 +28,10 @@ Additional push flag: - `--on-conflict=pull-merge|force|cancel` - required with `push --non-interactive` for file targets. - optional for space targets (defaults to `pull-merge`). +- `--merge-resolution=fail|keep-local|keep-remote|keep-both` + - used with `--on-conflict=pull-merge --non-interactive`. + - `fail` stops before mutating the main workspace. + - `keep-local`, `keep-remote`, and `keep-both` apply the corresponding pull-conflict resolution automatically. ## Safety Confirmation Rules @@ -50,7 +54,11 @@ When remote versions are ahead: - `force`: overwrite based on remote head. - `cancel`: stop without remote writes. -In non-interactive usage, set one explicitly. +In non-interactive usage: + +- file targets still require an explicit `--on-conflict` policy, +- `pull-merge` also requires an explicit `--merge-resolution`, +- `--merge-resolution=fail` aborts before the automatic pull touches the main workspace. If `pull-merge` stops after preserving local edits, the CLI now prints the expected recovery sequence explicitly: @@ -79,7 +87,7 @@ Recommended operator flow: 2. Resolve markers and run `conf validate `. 3. Commit the merge-resolution result before the next `conf push`. -For automation (`--non-interactive`), conflicts fail fast and require manual follow-up. +For automation (`--non-interactive`), set `--merge-resolution=keep-local|keep-remote|keep-both` if you want deterministic conflict handling, or `--merge-resolution=fail` if you want the command to stop before mutating the workspace. ## Push Rollback Expectations @@ -108,8 +116,8 @@ If `ARCHIVE_TASK_STILL_RUNNING` appears, Confluence did not finish within the cu Asset drift note: -- `conf status` remains page-only. -- Use `git status` for local asset changes and `conf diff` when automation needs attachment-aware remote inspection. +- `conf status` defaults to page drift only, but `conf status --attachments` adds attachment-only drift and orphaned local asset inspection. +- Use `git status` when you want raw Git detail in addition to the sync-aware status view. - The first push for locally sourced assets may emit `ATTACHMENT_PATH_NORMALIZED` because `conf` relocates files into the managed `assets//...` hierarchy. That rename is expected and stable after the next pull. ## Dry-Run Behavior (`push --dry-run`) diff --git a/docs/compatibility.md b/docs/compatibility.md index b26a8c3..6dae1d6 100644 --- a/docs/compatibility.md +++ b/docs/compatibility.md @@ -9,7 +9,7 @@ when a dependency is unavailable. | Feature | Support Level | Tenant Dependency | Degraded Fallback | |---------|--------------|-------------------|-------------------| | Page sync (pull/push) | Full | None | β€” | -| Page hierarchy (folders) | Full | Folder API | Page-based hierarchy when folder API returns any API error (`FOLDER_COMPATIBILITY_MODE` / `FOLDER_LOOKUP_UNAVAILABLE`), with diagnostics, summaries, and JSON reports distinguishing unsupported capability from upstream endpoint failure | +| Page hierarchy (folders) | Full | Folder API | Pull/diff can fall back to page-based hierarchy lookup when folder lookup is unavailable; push fails closed and requires explicit interactive downgrade before converting a folder into a page-with-subpages node | | Content status (lozenges) | Full | Content Status API | Status sync disabled when API returns 404/405/501 (`CONTENT_STATUS_COMPATIBILITY_MODE`) | | Labels | Full | None | β€” | | Attachments (images/files) | Full | None | β€” | @@ -27,22 +27,22 @@ when a dependency is unavailable. ## Compatibility Mode Details -### Folder API (`FOLDER_COMPATIBILITY_MODE` / `FOLDER_LOOKUP_UNAVAILABLE`) +### Folder API (`FOLDER_LOOKUP_UNAVAILABLE`) `conf` uses the Confluence Folder API to resolve page hierarchy during `pull` -and `push`. If the tenant does not expose this API (any API-level error is -returned), `conf` automatically falls back to page-based hierarchy: +and `push`. -- **Pull**: hierarchy is derived from page parent relationships only; folder - nodes are treated as regular parent pages. Emits `FOLDER_LOOKUP_UNAVAILABLE`. -- **Push**: folder creation is skipped; pages are nested under page parents - instead. Emits `FOLDER_COMPATIBILITY_MODE`. +- **Pull / diff**: if folder lookup is unavailable, hierarchy is derived from + page parent relationships only and the run emits `FOLDER_LOOKUP_UNAVAILABLE`. +- **Push**: if folder creation would require changing a pure folder into a page, + the CLI fails closed. Interactive runs can ask the operator to accept a + folder-to-page semantic downgrade by rewriting the local workspace into a + page-with-subpages shape. Non-interactive runs do not auto-downgrade. -No configuration change is needed. The mode is detected automatically on the -first folder lookup attempt each run. Diagnostics should make it clear whether -the fallback was triggered by an unsupported tenant capability ("tenant does -not support the folder API") or by an upstream endpoint failure ("folder API -endpoint failed upstream"). +Compatibility diagnostics still distinguish unsupported capability from lookup +or endpoint failures on the read path. On the write path, unsupported tenant +capability and semantic conflicts are surfaced as explicit push errors instead +of silent fallback. ### Content Status API (`CONTENT_STATUS_COMPATIBILITY_MODE`) @@ -105,9 +105,7 @@ validate any workflow that relies on raw ADF preservation. Running `conf push --preflight` probes the remote tenant before any write and reports which compatibility modes are active. When the pending push needs -folder hierarchy writes, preflight surfaces whether fallback is caused by an -unsupported tenant capability or by an upstream folder endpoint failure. It -also probes content-status compatibility ahead of time so operators can decide -whether to proceed. The final push summary and JSON report surface the active -fallback mode as well, so degraded folder behavior is still visible after the -run completes. +folder hierarchy writes, preflight surfaces whether native folder behavior is +blocked by unsupported capability, semantic conflicts, or another folder API +failure. It also probes content-status compatibility ahead of time so operators +can decide whether to proceed. diff --git a/docs/plans/2026-03-11-live-sync-09-workflow-hardening-and-operator-ux.md b/docs/plans/2026-03-11-live-sync-09-workflow-hardening-and-operator-ux.md new file mode 100644 index 0000000..496e367 --- /dev/null +++ b/docs/plans/2026-03-11-live-sync-09-workflow-hardening-and-operator-ux.md @@ -0,0 +1,247 @@ +# Live Sync Follow-Up 09: Workflow Hardening and Operator UX + +**Goal:** Turn the March 11 live-sync findings into concrete production-hardening work for hierarchy correctness, canonical path reconciliation, pre-push inspection, conflict UX, and attachment visibility. + +**Covers:** `F-001`, `F-002`, `F-003` from the March 11 live test log, plus operator smoothness gaps confirmed during the real `TD2` / `SD2` run + +**Specs to update first if behavior changes:** +- `openspec/specs/push/spec.md` +- `openspec/specs/pull-and-validate/spec.md` +- `openspec/specs/compatibility/spec.md` +- `openspec/specs/workspace/spec.md` +- `openspec/specs/recovery-and-maintenance/spec.md` + +**Likely files:** +- `internal/sync/push_hierarchy.go` +- `internal/sync/folder_fallback.go` +- `internal/sync/tenant_capabilities.go` +- `internal/sync/pull_paths.go` +- `internal/sync/pull_pages.go` +- `internal/sync/status.go` +- `cmd/diff.go` +- `cmd/diff_pages.go` +- `cmd/push.go` +- `cmd/pull.go` +- `cmd/report.go` +- `cmd/inline_status.go` +- `cmd/progress.go` +- `cmd/e2e_test.go` +- `internal/sync/push_hierarchy_test.go` +- `internal/sync/pull_paths_test.go` +- `internal/sync/status_test.go` +- `docs/usage.md` +- `docs/automation.md` +- `docs/compatibility.md` + +## Implementation status + +- [x] Specs updated for canonical pull paths, explicit folder downgrade handling, and non-interactive merge policy requirements. +- [x] Canonical pull paths now reconcile authored slugs to the canonical hierarchy, and `validate` blocks duplicate directory-backed folder titles across a space. +- [x] `conf diff` now previews brand-new pages, and `conf status --attachments` reports attachment-only drift plus orphaned local assets. +- [x] Push now requires explicit non-interactive merge resolution, stops before mutating the main workspace for `--merge-resolution=fail`, and never silently downgrades folders into pages. +- [x] Added unit/integration coverage for canonical-path reconciliation, explicit folder fallback handling, merge-resolution behavior, create preview, and attachment-aware status. +- [x] Added live E2E coverage for new-page diff preview, attachment-aware status, and non-interactive `pull-merge` fail-closed behavior. + +## Required outcomes + +1. A local directory that represents a folder must never silently become a Confluence page unless the operator explicitly opts into that semantic downgrade. +2. Workspaces that authored short slugs must reconcile to the same canonical pull path as fresh workspaces, or the product must explicitly define and document stable author-path behavior. +3. `conf diff` for new pages must show a useful create preview instead of failing because the file has no `id`. +4. Non-interactive conflict handling must be deterministic and must not surprise-mutate the main workspace into an unresolved merge without an explicit policy. +5. `conf status` must be able to report attachment-only drift so operators can answer β€œis this synced?” from one command. +6. Interactive TTY workflows may be richer, but must remain optional and must never block automation or `--non-interactive` usage. + +## Findings to address + +### P0: Fail closed on folder-creation fallback + +Live finding: + +- During the March 11 TD2 push, the CLI attempted `POST /wiki/api/v2/folders`, received HTTP `400`, switched to compatibility mode, and created a page named `API` instead of preserving a pure folder node. +- The current push path in `internal/sync/push_hierarchy.go` treats any folder-related `APIError` as ignorable and falls back to `CreatePage(...)` for the directory segment. +- The tenant behavior appears to enforce folder-title uniqueness across the whole space, not just among siblings. That means `/test` and `/API/test` cannot both exist as folders if they share the same title. + +Required outcome: + +1. Folder creation errors must be classified narrowly. +2. β€œFolder becomes page” must not happen silently. +3. Broken or incomplete local state should fail explicitly instead of triggering a broad compatibility path. +4. Validation must detect space-wide folder-title conflicts before push mutates remote state. +5. Any semantic downgrade from folder to page-with-subpages must be explicit and interactive. + +Suggested tasks: + +1. Audit `shouldIgnoreFolderHierarchyError` and all call sites that currently treat any `APIError` as fallback-worthy. +2. Split the current behavior into: + - unsupported capability / endpoint unavailable + - transient upstream failure + - semantic conflict such as duplicate title or bad parent +3. Add validation coverage for duplicate folder titles at space scope, even when the local directories are under different parent paths. +4. Report that validation result as a hard error explaining that Confluence folder titles must be unique across the space. +5. In interactive push mode only, offer an explicit fallback choice: + - keep failing and rename/restructure locally + - convert the conflicting directory segment into a page with subpages +6. If the operator accepts fallback, perform the conversion intentionally and mirror it both: + - remotely, by creating a page and placing child pages under it + - locally, by rewriting the workspace into the page-with-subpages shape +7. In `--non-interactive`, fail closed instead of falling back. +8. Add E2E regressions for: + - validation error on duplicate folder title + - interactive acceptance of page fallback + - non-interactive refusal to downgrade semantics + +### P0: Reconcile canonical paths across workspaces + +Live finding: + +- A page authored locally as `Software-Development/XT-20260311-0712.md` stayed pinned to that path in the original workspace even after successful pulls and a force pull. +- Fresh workspaces pulled the same page as `Software-Development/Cross-Space-Target-2026-03-11-0712.md`. + +Required outcome: + +1. The sync engine must have one coherent rule for canonical local paths. +2. Original authoring workspaces and fresh pull workspaces must converge to the same path shape, unless specs deliberately say otherwise. + +Suggested tasks: + +1. Audit how `pull` computes canonical paths versus how `push` preserves tracked paths in `.confluence-state.json`. +2. Decide the product rule explicitly: + - canonical pull paths always win, or + - author-created paths are stable and preserved +3. Update specs first to reflect that decision. +4. Add regression tests covering: + - local short slug then push then pull + - fresh workspace pull of the same page + - force pull reconciliation + +### P1: Unify new-page `diff` with preflight + +Live finding: + +- `conf diff` on a brand-new file failed with β€œfile has no id” and told the operator to use `conf push --preflight`. + +Required outcome: + +1. `diff` must remain useful for new pages. +2. Operators should not have to switch mental models between `diff` and `preflight` for the simplest create flow. + +Suggested tasks: + +1. Detect β€œnew page with no id” early in `diff`. +2. Reuse the existing preflight planning pipeline. +3. Render a create preview that includes: + - operation type + - resolved parent + - canonical target path + - attachment operations + - ADF summary or optional raw ADF view +4. Keep machine-readable reporting aligned between `diff --report-json` and `push --preflight --report-json`. + +### P1: Make non-interactive conflict handling explicit + +Live finding: + +- `--on-conflict=pull-merge --non-interactive` performed a real pull, updated the working tree, and then stopped with conflict markers in `UU` state because no merge choice could be made automatically. + +Required outcome: + +1. Non-interactive conflict behavior must be deterministic. +2. The default non-interactive path should fail safely without surprising workspace mutation. +3. Operators who do want automatic conflict handling must opt into an explicit merge policy. + +Suggested tasks: + +1. Add an explicit merge-resolution policy for non-interactive use, for example: + - `fail` + - `keep-local` + - `keep-remote` + - `keep-both` +2. If no policy is provided, stop before mutating the main workspace. +3. Move the exploratory pull-merge attempt into an isolated temp worktree. +4. If a merge conflict remains unresolved, present artifacts and next steps without leaving the main workspace half-merged. +5. Extend recovery metadata and JSON reporting with conflict-state detail. + +### P1: Add attachment-aware status reporting + +Live finding: + +- `conf status` explicitly reported clean page drift while attachment work still required `git status` or `conf diff`. + +Required outcome: + +1. Operators need one command that answers whether the workspace is fully synced, not just markdown/page synced. +2. Attachment-only drift must be visible without dropping into Git internals. + +Suggested tasks: + +1. Extend `status` to summarize: + - local attachment additions + - local attachment deletions + - remote attachment additions + - remote attachment deletions + - orphaned local assets +2. If full attachment inspection is too expensive for the default path, add `conf status --attachments` and make the default output point to it explicitly. +3. Add unit and E2E coverage for attachment-only drift where page markdown is unchanged. + +### P2: Use Bubble Tea for richer interactive flows, but keep it optional + +Observation: + +- The repo already depends on `bubbletea`, `bubbles`, and `lipgloss`. +- That makes richer TTY-only workflows practical without adding a new UI stack. + +Required outcome: + +1. Interactive flows should be easier for humans. +2. Automation and `--non-interactive` behavior must remain plain, deterministic, and scriptable. + +Suggested tasks: + +1. Evaluate a TTY-only diff / preflight viewer for: + - new-page create preview + - attachment operations + - hierarchy changes +2. Evaluate a conflict-resolution picker for interactive `pull` / `push` runs: + - keep local + - keep remote + - keep both +3. Evaluate a folder-fallback confirmation flow for interactive `push` runs when validation or preflight detects a folder-title conflict: + - show the conflicting local path and the Confluence constraint + - explain that the fallback changes semantics from folder to page + - require explicit confirmation before any local or remote rewrite +4. Evaluate a recovery browser for retained runs from `conf recover`. +5. Keep all Bubble Tea flows behind TTY detection and/or an explicit `--interactive-ui` flag. +6. Preserve a plain-text fallback for every interactive surface. + +## Suggested implementation order + +### Task 1: Fix semantic correctness + +1. Remove or narrow push-side page fallback for folder creation. +2. Add regression coverage proving directory-backed folder paths stay folder-backed. +3. Decide and codify canonical path rules. + +### Task 2: Fix operator correctness signals + +1. Reconcile canonical paths during pull. +2. Add attachment-aware status reporting. +3. Align machine-readable reports with the new behavior. + +### Task 3: Improve operator smoothness + +1. Unify new-page diff and preflight. +2. Add explicit non-interactive merge policy handling. +3. Add optional Bubble Tea interactive views for human-in-the-loop usage. + +## Verification + +1. `go test ./internal/sync/... -run "Folder|Hierarchy|Path|Status"` +2. `go test ./cmd/... -run "Diff|Push|Pull|Recover|Status"` +3. `make test` +4. `make test-e2e` + +## Commit + +Use one section commit, for example: + +`docs(plan): capture workflow hardening and operator ux follow-ups` diff --git a/docs/usage.md b/docs/usage.md index 8cfff21..38b5a83 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -95,8 +95,7 @@ Highlights: - page files follow Confluence hierarchy (folders and parent/child pages become nested directories), - pages that have children are written as `/.md` so they are distinguishable from folders, - incremental pulls reconcile remote page creates, updates, and deletes without requiring `--force`, -- leaf-page title renames can keep the existing Markdown path when the effective parent directory is unchanged, -- pages that own subtree directories move when their self-owned directory segment changes, +- canonical pull paths always win, so previously authored short slugs are renamed into the same path shape a fresh workspace would get, - hierarchy moves and ancestor/path-segment sanitization changes move the Markdown file and emit `PAGE_PATH_MOVED` notes with old/new paths, - same-space links rewritten to relative Markdown links, - cross-space links preserved as readable remote URLs/references instead of being rewritten to local Markdown paths, @@ -116,6 +115,7 @@ Checks include: - frontmatter schema, - immutable metadata integrity, +- duplicate directory-backed folder titles that Confluence would reject across the space, - link/asset resolution, - strict Markdown -> ADF conversion compatibility. @@ -125,16 +125,17 @@ Use this before major pushes or in CI. ### `conf status [TARGET]` -Shows a high-level sync summary for Markdown pages. +Shows a high-level sync summary for Markdown pages, with optional attachment drift inspection. Highlights: - compares local Markdown drift against the last sync baseline, - checks whether tracked remote pages are ahead, missing, or newly added, - surfaces planned tracked-page path relocations that would happen on the next pull, -- focuses on Markdown page files only. +- defaults to Markdown page drift only for speed, +- `--attachments` adds local attachment additions/deletions, remote attachment additions/deletions, and orphaned local assets. -Attachment-only changes are intentionally excluded from `conf status`. Use `git status` for local asset changes or `conf diff` for attachment-aware remote inspection. There is no attachment-aware `conf status` mode yet. +The default output calls out that page-only scope explicitly. Use `conf status --attachments` when the operator question is β€œis this fully synced, including assets?” ### `conf diff [TARGET]` @@ -149,7 +150,7 @@ Highlights: - strips read-only author/timestamp metadata so the diff stays focused on actionable drift, - compares using `git diff --no-index`, - supports both file and space targets, -- requires an `id` for file-mode remote comparison; for a brand-new local file without `id`, use `conf push --preflight` instead. +- renders a create preview for brand-new local files without `id`, including resolved parent, canonical target path, attachment uploads, and an ADF summary. ### `conf init agents [TARGET]` @@ -187,7 +188,11 @@ Highlights: - failed pushes print concrete `recover`, branch inspection, and cleanup commands for the retained run, - space-scoped push, `--preflight`, and `--dry-run` validate the full target space whenever there are in-scope changes, - `--preflight` uses the same validation scope and strictness as a real push, +- `--preflight` and `conf diff ` share the same create-preview model for brand-new pages, - `--on-conflict=pull-merge` restores local edits before running `pull` and preserves them via merge results, conflict markers, or retained recovery state instead of silently discarding them, +- `--non-interactive --on-conflict=pull-merge` requires `--merge-resolution=fail|keep-local|keep-remote|keep-both`, +- `--merge-resolution=fail` stops before mutating the main workspace, while the other merge-resolution values apply the requested pull-conflict choice deterministically, +- push never silently downgrades a directory-backed folder into a page; if Confluence cannot represent the folder natively, interactive push requires explicit operator confirmation before rewriting the workspace to a page-with-subpages shape, - when `--on-conflict=pull-merge` stops after a conflict-preserving pull, the CLI prints explicit next steps to resolve files, `git add` them, and rerun push, - removing tracked Markdown pages archives the corresponding remote page and follow-up pull removes it from tracked local state, - tracked page removals are previewed and summarized as remote archive operations rather than hard deletes, diff --git a/internal/sync/folder_fallback.go b/internal/sync/folder_fallback.go index 5772d14..3a9d023 100644 --- a/internal/sync/folder_fallback.go +++ b/internal/sync/folder_fallback.go @@ -160,14 +160,21 @@ type folderFallbackCauseKind string const ( folderFallbackCauseUnsupportedCapability folderFallbackCauseKind = "unsupported tenant capability" folderFallbackCauseUpstreamFailure folderFallbackCauseKind = "upstream endpoint failure" + folderFallbackCauseSemanticConflict folderFallbackCauseKind = "folder semantic conflict" + folderFallbackCauseMissingFolder folderFallbackCauseKind = "missing folder" ) func folderFallbackCause(err error) folderFallbackCauseKind { + var apiErr *confluence.APIError switch { case err == nil: return folderFallbackCauseUpstreamFailure - case errors.Is(err, confluence.ErrNotFound), isCompatibilityProbeError(err): + case errors.Is(err, confluence.ErrNotFound): + return folderFallbackCauseMissingFolder + case isCompatibilityProbeError(err): return folderFallbackCauseUnsupportedCapability + case errors.As(err, &apiErr) && (apiErr.StatusCode == 400 || apiErr.StatusCode == 409): + return folderFallbackCauseSemanticConflict default: return folderFallbackCauseUpstreamFailure } @@ -176,3 +183,27 @@ func folderFallbackCause(err error) folderFallbackCauseKind { func folderFallbackCauseLabel(err error) string { return string(folderFallbackCause(err)) } + +func shouldDegradeFolderLookupError(err error) bool { + switch folderFallbackCause(err) { + case folderFallbackCauseMissingFolder, folderFallbackCauseUnsupportedCapability, folderFallbackCauseUpstreamFailure: + return true + default: + return false + } +} + +func isFolderDowngradeCandidate(err error) bool { + switch folderFallbackCause(err) { + case folderFallbackCauseUnsupportedCapability: + return true + case folderFallbackCauseSemanticConflict: + lower := strings.ToLower(strings.TrimSpace(err.Error())) + return strings.Contains(lower, "title already exists") || + strings.Contains(lower, "already exists") || + strings.Contains(lower, "duplicate") || + strings.Contains(lower, "conflict") + default: + return false + } +} diff --git a/internal/sync/pull.go b/internal/sync/pull.go index fb8f0fb..f34cba7 100644 --- a/internal/sync/pull.go +++ b/internal/sync/pull.go @@ -574,6 +574,15 @@ func Pull(ctx context.Context, remote PullRemote, opts PullOptions) (PullResult, if !ok { return PullResult{}, fmt.Errorf("planned path missing for page %s", page.ID) } + if previousRelPath, tracked := trackedPathForPageID(state.PagePathIndex, page.ID); tracked { + plannedRelPath := pagePathByIDRel[page.ID] + if sameRelPathDifferentCase(previousRelPath, plannedRelPath) { + previousAbsPath := filepath.Join(spaceDir, filepath.FromSlash(previousRelPath)) + if err := renameCaseOnlyPath(previousAbsPath, outputPath); err != nil { + return PullResult{}, fmt.Errorf("rename markdown %s to %s: %w", previousRelPath, plannedRelPath, err) + } + } + } if opts.Progress != nil { opts.Progress.SetCurrentItem(filepath.Base(outputPath)) @@ -660,7 +669,14 @@ func Pull(ctx context.Context, remote PullRemote, opts PullOptions) (PullResult, deletedMarkdownSet := map[string]struct{}{} for oldPath, pageID := range state.PagePathIndex { newPath, exists := pagePathByIDRel[pageID] - if !exists || normalizeRelPath(oldPath) != normalizeRelPath(newPath) { + if !exists { + deletedMarkdownSet[normalizeRelPath(oldPath)] = struct{}{} + continue + } + if sameRelPathDifferentCase(oldPath, newPath) { + continue + } + if normalizeRelPath(oldPath) != normalizeRelPath(newPath) { deletedMarkdownSet[normalizeRelPath(oldPath)] = struct{}{} } } @@ -846,6 +862,54 @@ func removeEmptyParentDirs(startDir, stopDir string) error { } } +func trackedPathForPageID(pagePathIndex map[string]string, pageID string) (string, bool) { + pageID = strings.TrimSpace(pageID) + if pageID == "" { + return "", false + } + for relPath, trackedPageID := range pagePathIndex { + if strings.TrimSpace(trackedPageID) != pageID { + continue + } + normalized := normalizeRelPath(relPath) + if normalized == "" { + continue + } + return normalized, true + } + return "", false +} + +func sameRelPathDifferentCase(a, b string) bool { + normalizedA := normalizeRelPath(a) + normalizedB := normalizeRelPath(b) + return normalizedA != "" && normalizedB != "" && normalizedA != normalizedB && strings.EqualFold(normalizedA, normalizedB) +} + +func renameCaseOnlyPath(oldPath, newPath string) error { + if !sameRelPathDifferentCase(oldPath, newPath) { + return nil + } + if _, err := os.Stat(oldPath); err != nil { + if os.IsNotExist(err) { + return nil + } + return err + } + if err := os.MkdirAll(filepath.Dir(newPath), 0o750); err != nil { + return err + } + tempPath := newPath + ".case-rename-tmp" + if err := os.Rename(oldPath, tempPath); err != nil { + return err + } + if err := os.Rename(tempPath, newPath); err != nil { + _ = os.Rename(tempPath, oldPath) + return err + } + return nil +} + func isSubpathOrSame(root, candidate string) bool { rel, err := filepath.Rel(root, candidate) if err != nil { diff --git a/internal/sync/pull_pages.go b/internal/sync/pull_pages.go index 997c1f2..0ae447c 100644 --- a/internal/sync/pull_pages.go +++ b/internal/sync/pull_pages.go @@ -195,11 +195,7 @@ func pageMatchesExpectedState(page confluence.Page, expectedVersion int, expecte } func shouldIgnoreFolderHierarchyError(err error) bool { - if errors.Is(err, confluence.ErrNotFound) { - return true - } - var apiErr *confluence.APIError - return errors.As(err, &apiErr) + return shouldDegradeFolderLookupError(err) } func listAllPages(ctx context.Context, remote PullRemote, opts confluence.PageListOptions, progress Progress) ([]confluence.Page, error) { diff --git a/internal/sync/pull_state_test.go b/internal/sync/pull_state_test.go index 2923aa0..b781e70 100644 --- a/internal/sync/pull_state_test.go +++ b/internal/sync/pull_state_test.go @@ -84,16 +84,16 @@ func TestPull_ForceFullPullsAllPagesWithoutIncrementalChanges(t *testing.T) { if err != nil { t.Fatalf("Pull() without force error: %v", err) } - if len(resultNoForce.UpdatedMarkdown) != 1 || resultNoForce.UpdatedMarkdown[0] != "root.md" { + if len(resultNoForce.UpdatedMarkdown) != 1 || resultNoForce.UpdatedMarkdown[0] != "Root.md" { t.Fatalf("expected incremental pull to update root.md without force, got %+v", resultNoForce.UpdatedMarkdown) } - rootNoForce, err := fs.ReadMarkdownDocument(filepath.Join(spaceDir, "root.md")) + rootNoForce, err := fs.ReadMarkdownDocument(filepath.Join(spaceDir, "Root.md")) if err != nil { - t.Fatalf("read root.md without force: %v", err) + t.Fatalf("read Root.md without force: %v", err) } if !strings.Contains(rootNoForce.Body, "new body") { - t.Fatalf("root.md should be updated without force when the remote version advances") + t.Fatalf("Root.md should be updated without force when the remote version advances") } forceRemote := &fakePullRemote{ @@ -120,12 +120,12 @@ func TestPull_ForceFullPullsAllPagesWithoutIncrementalChanges(t *testing.T) { t.Fatalf("expected one updated markdown with force, got %+v", resultForce.UpdatedMarkdown) } - rootForce, err := fs.ReadMarkdownDocument(filepath.Join(spaceDir, "root.md")) + rootForce, err := fs.ReadMarkdownDocument(filepath.Join(spaceDir, "Root.md")) if err != nil { - t.Fatalf("read root.md with force: %v", err) + t.Fatalf("read Root.md with force: %v", err) } if !strings.Contains(rootForce.Body, "new body") { - t.Fatalf("root.md should be updated with force; got body:\n%s", rootForce.Body) + t.Fatalf("Root.md should be updated with force; got body:\n%s", rootForce.Body) } } @@ -232,22 +232,22 @@ func TestPull_DraftRecovery(t *testing.T) { // Draft page should be preserved, not deleted foundDraft := false for _, p := range res.UpdatedMarkdown { - if p == "draft.md" { + if p == "Draft-Page.md" { foundDraft = true break } } if !foundDraft { - t.Errorf("draft.md not found in updated markdown, was it erroneously deleted?") + t.Errorf("Draft-Page.md not found in updated markdown, was it erroneously deleted?") } // Verify draft frontmatter - doc, err := fs.ReadMarkdownDocument(filepath.Join(spaceDir, "draft.md")) + doc, err := fs.ReadMarkdownDocument(filepath.Join(spaceDir, "Draft-Page.md")) if err != nil { - t.Fatalf("read draft.md: %v", err) + t.Fatalf("read Draft-Page.md: %v", err) } if doc.Frontmatter.State != "draft" { - t.Errorf("draft.md status = %q, want draft", doc.Frontmatter.State) + t.Errorf("Draft-Page.md status = %q, want draft", doc.Frontmatter.State) } } diff --git a/internal/sync/push_hierarchy.go b/internal/sync/push_hierarchy.go index 21137f4..6a9d54f 100644 --- a/internal/sync/push_hierarchy.go +++ b/internal/sync/push_hierarchy.go @@ -4,7 +4,6 @@ import ( "context" "errors" "fmt" - "log/slog" "path/filepath" "sort" "strings" @@ -103,29 +102,6 @@ func ensureFolderHierarchy( continue } - if opts != nil && opts.folderMode == tenantFolderModePageFallback { - pageCreated, pageErr := remote.CreatePage(ctx, confluence.PageUpsertInput{ - SpaceID: spaceID, - ParentPageID: parentID, - Title: seg, - Status: "current", - BodyADF: []byte(`{"version":1,"type":"doc","content":[]}`), - }) - if pageErr != nil { - return nil, fmt.Errorf("create compatibility hierarchy page %q: %w", currentPath, pageErr) - } - - createdID := strings.TrimSpace(pageCreated.ID) - if createdID == "" { - return nil, fmt.Errorf("create compatibility hierarchy page %q returned empty page ID", currentPath) - } - - folderIDByPath[currentPath] = createdID - parentID = createdID - parentType = "page" - continue - } - createInput := confluence.FolderCreateInput{ SpaceID: spaceID, Title: seg, @@ -137,35 +113,15 @@ func ensureFolderHierarchy( created, err := remote.CreateFolder(ctx, createInput) if err != nil { - if shouldIgnoreFolderHierarchyError(err) { - if opts != nil { - opts.folderMode = tenantFolderModePageFallback - if opts.folderListTracker != nil { - opts.folderListTracker.Report(currentPath, err) - } - } - appendPushFolderCompatibilityDiagnosticOnce(diagnostics, err) - slog.Warn("folder_api_unavailable_falling_back_to_page", "path", currentPath, "error", err.Error()) - pageCreated, pageErr := remote.CreatePage(ctx, confluence.PageUpsertInput{ - SpaceID: spaceID, - ParentPageID: parentID, - Title: seg, - Status: "current", - BodyADF: []byte(`{"version":1,"type":"doc","content":[]}`), - }) - if pageErr != nil { - return nil, fmt.Errorf("create compatibility hierarchy page %q after folder fallback: %w", currentPath, pageErr) + if isFolderDowngradeCandidate(err) { + return nil, &FolderPageFallbackRequiredError{ + Path: currentPath, + Reason: folderFallbackCauseLabel(err), + Message: fmt.Sprintf("folder %q cannot be created as a Confluence folder without changing semantics (%s): %v", currentPath, folderFallbackCauseLabel(err), err), + Cause: err, } - created = confluence.Folder{ - ID: pageCreated.ID, - SpaceID: pageCreated.SpaceID, - Title: pageCreated.Title, - ParentID: pageCreated.ParentPageID, - ParentType: pageCreated.ParentType, - } - } else { - return nil, fmt.Errorf("create folder %q: %w", currentPath, err) } + return nil, fmt.Errorf("create folder %q: %w", currentPath, err) } createdID := strings.TrimSpace(created.ID) @@ -175,11 +131,7 @@ func ensureFolderHierarchy( folderIDByPath[currentPath] = createdID parentID = createdID - if opts != nil && opts.folderMode == tenantFolderModePageFallback { - parentType = "page" - } else { - parentType = "folder" - } + parentType = "folder" if diagnostics != nil { *diagnostics = append(*diagnostics, PushDiagnostic{ diff --git a/internal/sync/push_types.go b/internal/sync/push_types.go index a659cf7..a327d49 100644 --- a/internal/sync/push_types.go +++ b/internal/sync/push_types.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "fmt" + "strings" "time" "github.com/rgonek/confluence-markdown-sync/internal/confluence" @@ -177,3 +178,23 @@ func (e *PushConflictError) Error() string { e.Policy, ) } + +// FolderPageFallbackRequiredError reports that continuing a push would require +// rewriting a local directory-backed folder into a page-with-subpages node. +type FolderPageFallbackRequiredError struct { + Path string + Reason string + Message string + Cause error +} + +func (e *FolderPageFallbackRequiredError) Error() string { + if strings.TrimSpace(e.Message) != "" { + return e.Message + } + return fmt.Sprintf("folder %s requires explicit page fallback: %v", e.Path, e.Cause) +} + +func (e *FolderPageFallbackRequiredError) Unwrap() error { + return e.Cause +} diff --git a/internal/sync/tenant_capabilities_test.go b/internal/sync/tenant_capabilities_test.go index 1ad62e6..9a9ceb2 100644 --- a/internal/sync/tenant_capabilities_test.go +++ b/internal/sync/tenant_capabilities_test.go @@ -2,6 +2,7 @@ package sync import ( "context" + "errors" "path/filepath" "strings" "testing" @@ -596,7 +597,7 @@ func TestPush_ContentStatusRealErrorsSurfaceForNewPageInEmptySpace(t *testing.T) } } -func TestPush_FolderCapabilityFallbackUsesPageHierarchyMode(t *testing.T) { +func TestPush_FolderCapabilityFallbackFailsClosedOnUpstreamError(t *testing.T) { spaceDir := t.TempDir() nestedDir := filepath.Join(spaceDir, "Parent") if err := fs.WriteMarkdownDocument(filepath.Join(nestedDir, "Child.md"), fs.MarkdownDocument{ @@ -622,30 +623,24 @@ func TestPush_FolderCapabilityFallbackUsesPageHierarchyMode(t *testing.T) { ConflictPolicy: PushConflictPolicyCancel, Changes: []PushFileChange{{Type: PushChangeAdd, Path: "Parent/Child.md"}}, }) - if err != nil { - t.Fatalf("Push() unexpected error: %v", err) + if err == nil { + t.Fatal("Push() error = nil, want explicit folder create failure") } - if remote.createFolderCalls != 1 { - t.Fatalf("create folder calls = %d, want 1 attempted native folder creation before fallback", remote.createFolderCalls) + t.Fatalf("create folder calls = %d, want 1 attempted native folder creation", remote.createFolderCalls) } - if remote.createPageCalls < 2 { - t.Fatalf("create page calls = %d, want at least 2 for parent compatibility page + child", remote.createPageCalls) + if remote.createPageCalls != 0 { + t.Fatalf("create page calls = %d, want 0 without explicit fallback acceptance", remote.createPageCalls) } - - foundMode := false - for _, diag := range result.Diagnostics { - if diag.Code == "FOLDER_COMPATIBILITY_MODE" && strings.Contains(diag.Message, "folder API endpoint failed upstream") { - foundMode = true - break - } + if !strings.Contains(err.Error(), `create folder "Parent"`) { + t.Fatalf("expected explicit folder create failure, got %v", err) } - if !foundMode { - t.Fatalf("expected upstream folder compatibility diagnostic, got %+v", result.Diagnostics) + if len(result.Diagnostics) != 0 { + t.Fatalf("unexpected diagnostics on hard failure: %+v", result.Diagnostics) } } -func TestPush_FolderCapabilityFallbackDistinguishesUnsupportedTenantCapability(t *testing.T) { +func TestPush_FolderCapabilityFallbackRequiresExplicitDowngrade(t *testing.T) { spaceDir := t.TempDir() nestedDir := filepath.Join(spaceDir, "Parent") if err := fs.WriteMarkdownDocument(filepath.Join(nestedDir, "Child.md"), fs.MarkdownDocument{ @@ -671,18 +666,23 @@ func TestPush_FolderCapabilityFallbackDistinguishesUnsupportedTenantCapability(t ConflictPolicy: PushConflictPolicyCancel, Changes: []PushFileChange{{Type: PushChangeAdd, Path: "Parent/Child.md"}}, }) - if err != nil { - t.Fatalf("Push() unexpected error: %v", err) + if err == nil { + t.Fatal("Push() error = nil, want explicit downgrade requirement") } - - foundMode := false - for _, diag := range result.Diagnostics { - if diag.Code == "FOLDER_COMPATIBILITY_MODE" && strings.Contains(diag.Message, "tenant does not support the folder API") { - foundMode = true - break - } + var fallbackErr *FolderPageFallbackRequiredError + if !errors.As(err, &fallbackErr) { + t.Fatalf("expected FolderPageFallbackRequiredError, got %v", err) } - if !foundMode { - t.Fatalf("expected unsupported folder compatibility diagnostic, got %+v", result.Diagnostics) + if fallbackErr.Path != "Parent" { + t.Fatalf("fallback path = %q, want Parent", fallbackErr.Path) + } + if fallbackErr.Reason != "unsupported tenant capability" { + t.Fatalf("fallback reason = %q, want unsupported tenant capability", fallbackErr.Reason) + } + if remote.createPageCalls != 0 { + t.Fatalf("create page calls = %d, want 0 without explicit fallback acceptance", remote.createPageCalls) + } + if len(result.Diagnostics) != 0 { + t.Fatalf("unexpected diagnostics on downgrade-required error: %+v", result.Diagnostics) } } From ae76b1fe0292b330d94a048dfe24f7f3686aabe7 Mon Sep 17 00:00:00 2001 From: Robert Gonek Date: Wed, 11 Mar 2026 11:32:23 +0100 Subject: [PATCH 05/10] test: align canonical path expectations and cleanup --- cmd/pull_state_test.go | 4 +-- cmd/report_relink_test.go | 12 ++++---- cmd/status.go | 52 ++++++++++++++++---------------- cmd/status_test.go | 10 +++--- internal/sync/folder_fallback.go | 9 ------ internal/sync/pull_paths_test.go | 2 +- internal/sync/push_hierarchy.go | 16 ---------- 7 files changed, 40 insertions(+), 65 deletions(-) diff --git a/cmd/pull_state_test.go b/cmd/pull_state_test.go index 148e791..bc21787 100644 --- a/cmd/pull_state_test.go +++ b/cmd/pull_state_test.go @@ -96,8 +96,8 @@ func TestRunPull_HealsCorruptedStateFileWithConflictMarkers(t *testing.T) { if err != nil { t.Fatalf("load healed state: %v", err) } - if got := strings.TrimSpace(state.PagePathIndex["root.md"]); got != "1" { - t.Fatalf("healed page_path_index[root.md] = %q, want 1", got) + if got := strings.TrimSpace(state.PagePathIndex["Root.md"]); got != "1" { + t.Fatalf("healed page_path_index[Root.md] = %q, want 1", got) } rawState, err := os.ReadFile(filepath.Join(spaceDir, fs.StateFileName)) //nolint:gosec // test data diff --git a/cmd/report_relink_test.go b/cmd/report_relink_test.go index bfd490b..51b0f9e 100644 --- a/cmd/report_relink_test.go +++ b/cmd/report_relink_test.go @@ -111,8 +111,8 @@ func TestRunPull_ReportJSONWithRelinkKeepsStdoutJSONAndCapturesRelinkedFiles(t * report := decodeCommandReportJSON(t, stdout.Bytes()) assertReportMetadata(t, report, "pull", true) - if !containsString(report.MutatedFiles, "target.md") { - t.Fatalf("mutated files = %v, want target.md", report.MutatedFiles) + if !containsString(report.MutatedFiles, "Target.md") { + t.Fatalf("mutated files = %v, want Target.md", report.MutatedFiles) } if !containsString(report.MutatedFiles, "../Source (SRC)/doc.md") { t.Fatalf("mutated files = %v, want relinked source doc", report.MutatedFiles) @@ -122,7 +122,7 @@ func TestRunPull_ReportJSONWithRelinkKeepsStdoutJSONAndCapturesRelinkedFiles(t * if err != nil { t.Fatalf("read source doc: %v", err) } - if !strings.Contains(string(raw), "../Target%20%28TGT%29/target.md") { + if !strings.Contains(string(raw), "../Target%20%28TGT%29/Target.md") { t.Fatalf("expected source doc to be relinked, got:\n%s", string(raw)) } } @@ -238,8 +238,8 @@ func TestRunPull_ReportJSONWithRelinkPreservesAppliedFilesOnLaterError(t *testin if !strings.Contains(report.Error, "auto-relink") { t.Fatalf("error = %q, want auto-relink failure", report.Error) } - if !containsString(report.MutatedFiles, "target.md") { - t.Fatalf("mutated files = %v, want target.md", report.MutatedFiles) + if !containsString(report.MutatedFiles, "Target.md") { + t.Fatalf("mutated files = %v, want Target.md", report.MutatedFiles) } if !containsString(report.MutatedFiles, "../Source (SRC)/a.md") { t.Fatalf("mutated files = %v, want applied relink file", report.MutatedFiles) @@ -249,7 +249,7 @@ func TestRunPull_ReportJSONWithRelinkPreservesAppliedFilesOnLaterError(t *testin if err != nil { t.Fatalf("read applied relink file: %v", err) } - if !strings.Contains(string(appliedRaw), "../Target%20%28TGT%29/target.md") { + if !strings.Contains(string(appliedRaw), "../Target%20%28TGT%29/Target.md") { t.Fatalf("expected applied relink file to be rewritten, got:\n%s", string(appliedRaw)) } } diff --git a/cmd/status.go b/cmd/status.go index a4d8a45..8b7a4b8 100644 --- a/cmd/status.go +++ b/cmd/status.go @@ -29,20 +29,20 @@ type StatusRemote interface { // StatusReport contains the results of a sync drift inspection. type StatusReport struct { - LocalAdded []string - LocalModified []string - LocalDeleted []string - RemoteAdded []string - RemoteModified []string - RemoteDeleted []string - PlannedPathMoves []syncflow.PlannedPagePathMove - ConflictAhead []string // pages that are both locally modified AND ahead on remote - MaxVersionDrift int - LocalAttachmentAdded []string - LocalAttachmentDeleted []string - RemoteAttachmentAdded []string + LocalAdded []string + LocalModified []string + LocalDeleted []string + RemoteAdded []string + RemoteModified []string + RemoteDeleted []string + PlannedPathMoves []syncflow.PlannedPagePathMove + ConflictAhead []string // pages that are both locally modified AND ahead on remote + MaxVersionDrift int + LocalAttachmentAdded []string + LocalAttachmentDeleted []string + RemoteAttachmentAdded []string RemoteAttachmentDeleted []string - OrphanedLocalAssets []string + OrphanedLocalAssets []string } const statusScopeNote = "Scope: markdown/page drift by default. Use `conf status --attachments` to inspect local and remote attachment drift from the same command." @@ -321,20 +321,20 @@ func buildStatusReport( } return StatusReport{ - LocalAdded: localAdded, - LocalModified: localModified, - LocalDeleted: localDeleted, - RemoteAdded: remoteAdded, - RemoteModified: remoteModified, - RemoteDeleted: remoteDeleted, - PlannedPathMoves: plannedPathMoves, - ConflictAhead: conflictAhead, - MaxVersionDrift: maxVersionDrift, - LocalAttachmentAdded: localAttachmentAdded, - LocalAttachmentDeleted: localAttachmentDeleted, - RemoteAttachmentAdded: remoteAttachmentAdded, + LocalAdded: localAdded, + LocalModified: localModified, + LocalDeleted: localDeleted, + RemoteAdded: remoteAdded, + RemoteModified: remoteModified, + RemoteDeleted: remoteDeleted, + PlannedPathMoves: plannedPathMoves, + ConflictAhead: conflictAhead, + MaxVersionDrift: maxVersionDrift, + LocalAttachmentAdded: localAttachmentAdded, + LocalAttachmentDeleted: localAttachmentDeleted, + RemoteAttachmentAdded: remoteAttachmentAdded, RemoteAttachmentDeleted: remoteAttachmentDeleted, - OrphanedLocalAssets: orphanedLocalAssets, + OrphanedLocalAssets: orphanedLocalAssets, }, nil } diff --git a/cmd/status_test.go b/cmd/status_test.go index 2c45b17..d106e02 100644 --- a/cmd/status_test.go +++ b/cmd/status_test.go @@ -49,12 +49,12 @@ func TestStatusCmd(t *testing.T) { // mockStatusRemote implements the StatusRemote interface for testing type mockStatusRemote struct { - space confluence.Space - pages confluence.PageListResult - page confluence.Page - folders map[string]confluence.Folder + space confluence.Space + pages confluence.PageListResult + page confluence.Page + folders map[string]confluence.Folder attachments map[string][]confluence.Attachment - err error + err error } func (m *mockStatusRemote) GetSpace(_ context.Context, _ string) (confluence.Space, error) { diff --git a/internal/sync/folder_fallback.go b/internal/sync/folder_fallback.go index 3a9d023..788b4f9 100644 --- a/internal/sync/folder_fallback.go +++ b/internal/sync/folder_fallback.go @@ -146,15 +146,6 @@ func folderLookupUnavailableMessage(err error) string { } } -func folderCompatibilityModeMessage(err error) string { - switch folderFallbackCause(err) { - case folderFallbackCauseUnsupportedCapability: - return "compatibility mode active: tenant does not support the folder API; using page-based hierarchy mode for this push" - default: - return "compatibility mode active: folder API endpoint failed upstream; using page-based hierarchy mode for this push" - } -} - type folderFallbackCauseKind string const ( diff --git a/internal/sync/pull_paths_test.go b/internal/sync/pull_paths_test.go index abd01bb..45bb80a 100644 --- a/internal/sync/pull_paths_test.go +++ b/internal/sync/pull_paths_test.go @@ -87,7 +87,7 @@ func TestPlanPagePaths_ReconcilesShortSlugToCanonicalPath(t *testing.T) { {ID: "10", Title: "Software Development"}, } previousPageIndex := map[string]string{ - "Software-Development/XT-20260311-0712.md": "1", + "Software-Development/XT-20260311-0712.md": "1", "Software-Development/Software-Development.md": "10", } diff --git a/internal/sync/push_hierarchy.go b/internal/sync/push_hierarchy.go index 6a9d54f..17ff0d7 100644 --- a/internal/sync/push_hierarchy.go +++ b/internal/sync/push_hierarchy.go @@ -145,22 +145,6 @@ func ensureFolderHierarchy( return folderIDByPath, nil } -func appendPushFolderCompatibilityDiagnosticOnce(diagnostics *[]PushDiagnostic, err error) { - if diagnostics == nil { - return - } - for _, diag := range *diagnostics { - if strings.TrimSpace(diag.Code) == "FOLDER_COMPATIBILITY_MODE" { - return - } - } - *diagnostics = append(*diagnostics, PushDiagnostic{ - Path: "", - Code: "FOLDER_COMPATIBILITY_MODE", - Message: folderCompatibilityModeMessage(err), - }) -} - func collapseFolderParentIfIndexPage( ctx context.Context, remote PushRemote, From dcd09379bfcc1ef536d995568d5c5d657a468fcd Mon Sep 17 00:00:00 2001 From: Robert Gonek Date: Wed, 11 Mar 2026 12:43:48 +0100 Subject: [PATCH 06/10] e2e: harden live sandbox workflow coverage --- Makefile | 2 +- cmd/e2e_helpers_test.go | 29 +++++++ cmd/e2e_test.go | 48 ++++++++---- cmd/pull_context.go | 12 ++- docs/automation.md | 12 ++- internal/sync/pull_incremental_test.go | 100 +++++++++++++++++++++++++ internal/sync/pull_pages.go | 3 + 7 files changed, 185 insertions(+), 21 deletions(-) diff --git a/Makefile b/Makefile index 64a5c0f..08db6fa 100644 --- a/Makefile +++ b/Makefile @@ -26,7 +26,7 @@ coverage-check: ## test-e2e: run all end-to-end tests (requires CONF_E2E_DOMAIN, CONF_E2E_EMAIL, CONF_E2E_API_TOKEN, CONF_E2E_PRIMARY_SPACE_KEY, CONF_E2E_SECONDARY_SPACE_KEY) test-e2e: build - $(GO) test -v -tags=e2e ./cmd -run '^TestWorkflow_' + $(GO) test -timeout 20m -v -tags=e2e ./cmd -run '^TestWorkflow_' ## release-check: run the release gate, including live sandbox E2E coverage release-check: fmt-check lint test-unit test-e2e diff --git a/cmd/e2e_helpers_test.go b/cmd/e2e_helpers_test.go index ba0b578..2663899 100644 --- a/cmd/e2e_helpers_test.go +++ b/cmd/e2e_helpers_test.go @@ -274,6 +274,7 @@ func findMarkdownByPageID(t *testing.T, spaceDir, pageID string) string { t.Helper() var matched string + var backupMatch string err := filepath.WalkDir(spaceDir, func(path string, d os.DirEntry, walkErr error) error { if walkErr != nil { return walkErr @@ -293,6 +294,12 @@ func findMarkdownByPageID(t *testing.T, spaceDir, pageID string) string { return err } if strings.TrimSpace(doc.Frontmatter.ID) == pageID { + if strings.Contains(filepath.Base(path), "(My Local Changes") { + if strings.TrimSpace(backupMatch) == "" { + backupMatch = path + } + return nil + } matched = path return filepath.SkipAll } @@ -301,6 +308,9 @@ func findMarkdownByPageID(t *testing.T, spaceDir, pageID string) string { if err != nil && err != filepath.SkipAll { t.Fatalf("find markdown by page id: %v", err) } + if strings.TrimSpace(matched) == "" { + matched = backupMatch + } if strings.TrimSpace(matched) == "" { t.Fatalf("could not find markdown file for page ID %s in %s", pageID, spaceDir) } @@ -537,6 +547,25 @@ func waitForPageADF(t *testing.T, ctx context.Context, client *confluence.Client } } +func waitForPageVersion(t *testing.T, ctx context.Context, client *confluence.Client, pageID string, expectedVersion int) confluence.Page { + t.Helper() + + deadline := time.Now().Add(45 * time.Second) + for { + page, err := client.GetPage(ctx, pageID) + if err == nil && page.Version >= expectedVersion { + return page + } + if time.Now().After(deadline) { + if err != nil { + t.Fatalf("GetPage(%s) did not reach version %d before timeout: %v", pageID, expectedVersion, err) + } + t.Fatalf("page %s version did not reach %d before timeout: got %d", pageID, expectedVersion, page.Version) + } + time.Sleep(2 * time.Second) + } +} + func waitForPageAttachments(t *testing.T, ctx context.Context, client *confluence.Client, pageID string, predicate func([]confluence.Attachment) bool) []confluence.Attachment { t.Helper() diff --git a/cmd/e2e_test.go b/cmd/e2e_test.go index f5256b7..d62ee0c 100644 --- a/cmd/e2e_test.go +++ b/cmd/e2e_test.go @@ -37,27 +37,47 @@ var sandboxBaselineDiagnosticAllowlist = map[string][]e2eExpectedDiagnostic{ Path: "17727489", Code: "UNKNOWN_MEDIA_ID_UNRESOLVED", }, + { + Path: "Technical-Documentation/Live-Verification-2026-03-11-0712/Live-Verification-2026-03-11-0712.md", + Code: "CROSS_SPACE_LINK_PRESERVED", + MessageContains: "pageId=26673153", + }, { Path: "Technical-Documentation/Live-Workflow-Test-2026-03-05/Endpoint-Notes.md", - Code: "unresolved_reference", + Code: "CROSS_SPACE_LINK_PRESERVED", MessageContains: "pageId=17727489#Task-list", }, { Path: "Technical-Documentation/Live-Workflow-Test-2026-03-05/Live-Workflow-Test-2026-03-05.md", - Code: "unresolved_reference", + Code: "CROSS_SPACE_LINK_PRESERVED", MessageContains: "pageId=17727489", }, { Path: "Technical-Documentation/Live-Workflow-Test-2026-03-05/Live-Workflow-Test-2026-03-05.md", - Code: "unresolved_reference", + Code: "CROSS_SPACE_LINK_PRESERVED", MessageContains: "pageId=17530900#Task-list", }, + { + Path: "Technical-Documentation/Live-Workflow-Test-2026-03-10-1655/Live-Workflow-Test-2026-03-10-1655.md", + Code: "CROSS_SPACE_LINK_PRESERVED", + MessageContains: "pageId=25460737", + }, { Path: "Technical-Documentation/Live-Workflow-Test-2026-03-05/Checklist-and-Diagrams.md", MessageContains: "UNKNOWN_MEDIA_ID", }, }, "SD2": { + { + Path: "Software-Development/Cross-Space-Target-2026-03-10-1655.md", + Code: "CROSS_SPACE_LINK_PRESERVED", + MessageContains: "pageId=25591809", + }, + { + Path: "Software-Development/Cross-Space-Target-2026-03-11-0712.md", + Code: "CROSS_SPACE_LINK_PRESERVED", + MessageContains: "pageId=26771457", + }, { Path: "Software-Development/Release-Sandbox-2026-03-05.md", Code: "CROSS_SPACE_LINK_PRESERVED", @@ -147,9 +167,7 @@ func TestWorkflow_ConflictResolution(t *testing.T) { t.Fatalf("Remote update (conflict simulation) failed: %v", err) } fmt.Printf("Remote version after simulation: %d\n", updatedRemote.Version) - - // Wait for eventual consistency - time.Sleep(5 * time.Second) + waitForPageVersion(t, ctx, client, pageID, updatedRemote.Version) // 4. Try push --on-conflict=cancel -> should fail cmd := exec.Command(confBin, "push", simplePath, "--on-conflict=cancel", "--yes", "--non-interactive") @@ -242,14 +260,17 @@ func TestWorkflow_PushAutoPullMerge(t *testing.T) { // 3. Simulate remote update (bump version) remotePage, _ := client.GetPage(ctx, pageID) - client.UpdatePage(ctx, pageID, confluence.PageUpsertInput{ + updatedRemote, err := client.UpdatePage(ctx, pageID, confluence.PageUpsertInput{ SpaceID: remotePage.SpaceID, ParentPageID: remotePage.ParentPageID, Title: remotePage.Title, Version: remotePage.Version + 1, BodyADF: []byte(`{"version":1,"type":"doc","content":[{"type":"paragraph","content":[{"type":"text","text":"Remote update for auto pull-merge"}]}]}`), }) - time.Sleep(5 * time.Second) + if err != nil { + t.Fatalf("Remote update (auto pull-merge simulation) failed: %v", err) + } + waitForPageVersion(t, ctx, client, pageID, updatedRemote.Version) // 4. Run push with --on-conflict=pull-merge // This should trigger the automatic pull @@ -270,8 +291,8 @@ func TestWorkflow_PushAutoPullMerge(t *testing.T) { simplePath = findMarkdownByPageID(t, spaceDir, pageID) doc, _ = fs.ReadMarkdownDocument(simplePath) // After pull (even with conflict), the frontmatter version should be updated - if doc.Frontmatter.Version != remotePage.Version+1 { - t.Fatalf("Local version should be updated after auto pull-merge: got %d, want %d", doc.Frontmatter.Version, remotePage.Version+1) + if doc.Frontmatter.Version != updatedRemote.Version { + t.Fatalf("Local version should be updated after auto pull-merge: got %d, want %d", doc.Frontmatter.Version, updatedRemote.Version) } raw, readErr := os.ReadFile(simplePath) if readErr != nil { @@ -336,16 +357,17 @@ func TestWorkflow_PushAutoPullMergeFailDoesNotMutateWorkspace(t *testing.T) { if err != nil { t.Fatalf("GetPage: %v", err) } - if _, err := client.UpdatePage(ctx, pageID, confluence.PageUpsertInput{ + updatedRemote, err := client.UpdatePage(ctx, pageID, confluence.PageUpsertInput{ SpaceID: remotePage.SpaceID, ParentPageID: remotePage.ParentPageID, Title: remotePage.Title, Version: remotePage.Version + 1, BodyADF: []byte(`{"version":1,"type":"doc","content":[{"type":"paragraph","content":[{"type":"text","text":"Remote update for auto pull-merge fail"}]}]}`), - }); err != nil { + }) + if err != nil { t.Fatalf("Remote update (conflict simulation) failed: %v", err) } - time.Sleep(5 * time.Second) + waitForPageVersion(t, ctx, client, pageID, updatedRemote.Version) cmd := exec.Command(confBin, "push", simplePath, "--on-conflict=pull-merge", "--merge-resolution=fail", "--yes", "--non-interactive") cmd.Dir = tmpDir diff --git a/cmd/pull_context.go b/cmd/pull_context.go index 2cedcf6..08226dd 100644 --- a/cmd/pull_context.go +++ b/cmd/pull_context.go @@ -7,6 +7,7 @@ import ( "os" "os/exec" "path/filepath" + "runtime" "strings" "time" @@ -537,7 +538,12 @@ func listAllPullChangesForEstimate( } func runGit(workdir string, args ...string) (string, error) { - cmd := exec.Command("git", args...) //nolint:gosec // Intentionally running git + gitArgs := args + if runtime.GOOS == "windows" { + gitArgs = append([]string{"-c", "core.longpaths=true"}, args...) + } + + cmd := exec.Command("git", gitArgs...) //nolint:gosec // Intentionally running git if strings.TrimSpace(workdir) != "" { cmd.Dir = workdir } @@ -545,9 +551,9 @@ func runGit(workdir string, args ...string) (string, error) { if err != nil { msg := strings.TrimSpace(string(out)) if msg == "" { - return "", fmt.Errorf("git %s failed: %w", strings.Join(args, " "), err) + return "", fmt.Errorf("git %s failed: %w", strings.Join(gitArgs, " "), err) } - return "", fmt.Errorf("git %s failed: %s", strings.Join(args, " "), msg) + return "", fmt.Errorf("git %s failed: %s", strings.Join(gitArgs, " "), msg) } return string(out), nil } diff --git a/docs/automation.md b/docs/automation.md index 0b05724..ac66505 100644 --- a/docs/automation.md +++ b/docs/automation.md @@ -208,11 +208,15 @@ Current documented baseline allowlist for the maintained release sandbox: | Space | Expected diagnostic match | Reason | |------|----------------------------|--------| | `TD2` | `path=17727489`, `code=UNKNOWN_MEDIA_ID_UNRESOLVED` | Existing seed page still contains unresolved media identities; pull skips stale-attachment pruning for safety. | -| `TD2` | `path=Technical-Documentation/Live-Workflow-Test-2026-03-05/Endpoint-Notes.md`, `code=unresolved_reference`, message contains `pageId=17727489#Task-list` | Seed content now includes another unresolved same-space task-list anchor reference. | -| `TD2` | `path=Technical-Documentation/Live-Workflow-Test-2026-03-05/Live-Workflow-Test-2026-03-05.md`, `code=unresolved_reference`, message contains `pageId=17727489` | Seed content now includes another unresolved same-space page reference. | -| `TD2` | `path=Technical-Documentation/Live-Workflow-Test-2026-03-05/Live-Workflow-Test-2026-03-05.md`, `code=unresolved_reference`, message contains `pageId=17530900#Task-list` | Seed content still links to an unresolved remote target. | +| `TD2` | `path=Technical-Documentation/Live-Verification-2026-03-11-0712/Live-Verification-2026-03-11-0712.md`, `code=CROSS_SPACE_LINK_PRESERVED`, message contains `pageId=26673153` | Seed content now intentionally preserves a cross-space page reference during pull. | +| `TD2` | `path=Technical-Documentation/Live-Workflow-Test-2026-03-05/Endpoint-Notes.md`, `code=CROSS_SPACE_LINK_PRESERVED`, message contains `pageId=17727489#Task-list` | Seed content preserves a cross-space task-list anchor reference during pull. | +| `TD2` | `path=Technical-Documentation/Live-Workflow-Test-2026-03-05/Live-Workflow-Test-2026-03-05.md`, `code=CROSS_SPACE_LINK_PRESERVED`, message contains `pageId=17727489` | Seed content preserves a cross-space page reference during pull. | +| `TD2` | `path=Technical-Documentation/Live-Workflow-Test-2026-03-05/Live-Workflow-Test-2026-03-05.md`, `code=CROSS_SPACE_LINK_PRESERVED`, message contains `pageId=17530900#Task-list` | Seed content preserves a cross-space task-list anchor reference during pull. | +| `TD2` | `path=Technical-Documentation/Live-Workflow-Test-2026-03-10-1655/Live-Workflow-Test-2026-03-10-1655.md`, `code=CROSS_SPACE_LINK_PRESERVED`, message contains `pageId=25460737` | Seed content preserves another cross-space page reference during pull. | | `TD2` | `path=Technical-Documentation/Live-Workflow-Test-2026-03-05/Checklist-and-Diagrams.md`, message contains `UNKNOWN_MEDIA_ID` | Seed content still contains unresolved media fallback output. | -| `SD2` | `path=Software-Development/Release-Sandbox-2026-03-05.md`, `code=unresolved_reference`, message contains `pageId=17334539` | Seed content still links to an unresolved remote target. | +| `SD2` | `path=Software-Development/Cross-Space-Target-2026-03-10-1655.md`, `code=CROSS_SPACE_LINK_PRESERVED`, message contains `pageId=25591809` | Seed content preserves a cross-space page reference during pull. | +| `SD2` | `path=Software-Development/Cross-Space-Target-2026-03-11-0712.md`, `code=CROSS_SPACE_LINK_PRESERVED`, message contains `pageId=26771457` | Seed content preserves a cross-space page reference during pull. | +| `SD2` | `path=Software-Development/Release-Sandbox-2026-03-05.md`, `code=CROSS_SPACE_LINK_PRESERVED`, message contains `pageId=17334539` | Seed content preserves a cross-space page reference during pull. | If these spaces are cleaned up later, remove the allowlist entries in the same change that removes the warnings. diff --git a/internal/sync/pull_incremental_test.go b/internal/sync/pull_incremental_test.go index 447b655..516068a 100644 --- a/internal/sync/pull_incremental_test.go +++ b/internal/sync/pull_incremental_test.go @@ -411,6 +411,106 @@ func TestPull_IncrementalUpdateRetriesUntilExpectedVersionIsReadable(t *testing. } } +func TestPull_TargetPageUsesFreshGetPageVersionWhenListingLags(t *testing.T) { + tmpDir := t.TempDir() + spaceDir := filepath.Join(tmpDir, "ENG") + if err := os.MkdirAll(spaceDir, 0o750); err != nil { + t.Fatalf("mkdir space: %v", err) + } + + pagePath := filepath.Join(spaceDir, "Conflict-E2E.md") + if err := fs.WriteMarkdownDocument(pagePath, fs.MarkdownDocument{ + Frontmatter: fs.Frontmatter{ + Title: "Conflict E2E", + ID: "20", + Version: 2, + }, + Body: "old body\n", + }); err != nil { + t.Fatalf("write Conflict-E2E.md: %v", err) + } + + stalePage := confluence.Page{ + ID: "20", + SpaceID: "space-1", + Title: "Conflict E2E", + Version: 2, + LastModified: time.Date(2026, time.March, 11, 9, 0, 0, 0, time.UTC), + BodyADF: rawJSON(t, map[string]any{ + "version": 1, + "type": "doc", + "content": []any{ + map[string]any{"type": "paragraph", "content": []any{map[string]any{"type": "text", "text": "stale body"}}}, + }, + }), + } + freshPage := confluence.Page{ + ID: "20", + SpaceID: "space-1", + Title: "Conflict E2E", + Version: 3, + LastModified: time.Date(2026, time.March, 11, 9, 5, 0, 0, time.UTC), + BodyADF: rawJSON(t, map[string]any{ + "version": 1, + "type": "doc", + "content": []any{ + map[string]any{"type": "paragraph", "content": []any{map[string]any{"type": "text", "text": "fresh body"}}}, + }, + }), + } + + fake := &fakePullRemote{ + space: confluence.Space{ID: "space-1", Key: "ENG", Name: "Engineering"}, + pages: []confluence.Page{ + {ID: "20", SpaceID: "space-1", Title: "Conflict E2E", Version: stalePage.Version, LastModified: stalePage.LastModified}, + }, + pagesByID: map[string]confluence.Page{ + "20": freshPage, + }, + } + getPageCalls := 0 + fake.getPageFunc = func(pageID string) (confluence.Page, error) { + if pageID != "20" { + return confluence.Page{}, confluence.ErrNotFound + } + getPageCalls++ + if getPageCalls == 1 { + // The target-page probe should see the fresh version before the later fetch loop runs. + return freshPage, nil + } + return freshPage, nil + } + + result, err := Pull(context.Background(), fake, PullOptions{ + SpaceKey: "ENG", + SpaceDir: spaceDir, + TargetPageID: "20", + State: fs.SpaceState{ + PagePathIndex: map[string]string{ + "Conflict-E2E.md": "20", + }, + }, + PullStartedAt: time.Date(2026, time.March, 11, 9, 10, 0, 0, time.UTC), + }) + if err != nil { + t.Fatalf("Pull() error: %v", err) + } + + doc, err := fs.ReadMarkdownDocument(pagePath) + if err != nil { + t.Fatalf("read Conflict-E2E.md: %v", err) + } + if doc.Frontmatter.Version != 3 { + t.Fatalf("version = %d, want 3", doc.Frontmatter.Version) + } + if !strings.Contains(doc.Body, "fresh body") { + t.Fatalf("expected fresh body after target-page pull, got:\n%s", doc.Body) + } + if len(result.UpdatedMarkdown) != 1 || result.UpdatedMarkdown[0] != "Conflict-E2E.md" { + t.Fatalf("unexpected updated markdown list: %+v", result.UpdatedMarkdown) + } +} + func TestPull_PreservesAbsoluteCrossSpaceLinksWithoutUnresolvedWarnings(t *testing.T) { repo := t.TempDir() engDir := filepath.Join(repo, "Engineering (ENG)") diff --git a/internal/sync/pull_pages.go b/internal/sync/pull_pages.go index 0ae447c..8fdd3da 100644 --- a/internal/sync/pull_pages.go +++ b/internal/sync/pull_pages.go @@ -28,6 +28,9 @@ func selectChangedPages( return nil, changeByPageID, nil } changeByPageID[targetID] = changeFromPage(pageByID[targetID], opts.SpaceKey) + if latestPage, err := remote.GetPage(ctx, targetID); err == nil { + changeByPageID[targetID] = mergeChangedPage(changeByPageID[targetID], changeFromPage(latestPage, opts.SpaceKey)) + } return []string{targetID}, changeByPageID, nil } From 9a8df3e4173a625930cb718771d3fd423f500290 Mon Sep 17 00:00:00 2001 From: Robert Gonek Date: Wed, 11 Mar 2026 14:04:46 +0100 Subject: [PATCH 07/10] test: remove pull retry counter races --- internal/sync/pull_incremental_test.go | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/internal/sync/pull_incremental_test.go b/internal/sync/pull_incremental_test.go index 516068a..58cfcd7 100644 --- a/internal/sync/pull_incremental_test.go +++ b/internal/sync/pull_incremental_test.go @@ -5,6 +5,7 @@ import ( "os" "path/filepath" "strings" + "sync/atomic" "testing" "time" @@ -245,11 +246,10 @@ func TestPull_IncrementalCreateRetriesUntilRemotePageMaterializes(t *testing.T) "20": {ID: "20", SpaceID: "space-1", Title: "Remote Child", ParentPageID: "10", Version: 1, LastModified: modifiedAt, BodyADF: rawJSON(t, emptyADF)}, }, } - childFetches := 0 + var childFetches atomic.Int32 fake.getPageFunc = func(pageID string) (confluence.Page, error) { if pageID == "20" { - childFetches++ - if childFetches == 1 { + if childFetches.Add(1) == 1 { return confluence.Page{}, confluence.ErrNotFound } } @@ -276,8 +276,8 @@ func TestPull_IncrementalCreateRetriesUntilRemotePageMaterializes(t *testing.T) t.Fatalf("Pull() error: %v", err) } - if childFetches < 2 { - t.Fatalf("expected child page fetch to retry, got %d attempt(s)", childFetches) + if childFetches.Load() < 2 { + t.Fatalf("expected child page fetch to retry, got %d attempt(s)", childFetches.Load()) } if _, err := os.Stat(filepath.Join(spaceDir, "Parent", "Parent.md")); err != nil { t.Fatalf("expected moved parent markdown: %v", err) @@ -364,13 +364,12 @@ func TestPull_IncrementalUpdateRetriesUntilExpectedVersionIsReadable(t *testing. "20": freshPage, }, } - updateFetches := 0 + var updateFetches atomic.Int32 fake.getPageFunc = func(pageID string) (confluence.Page, error) { if pageID != "20" { return confluence.Page{}, confluence.ErrNotFound } - updateFetches++ - if updateFetches == 1 { + if updateFetches.Add(1) == 1 { return stalePage, nil } return freshPage, nil @@ -392,8 +391,8 @@ func TestPull_IncrementalUpdateRetriesUntilExpectedVersionIsReadable(t *testing. t.Fatalf("Pull() error: %v", err) } - if updateFetches < 2 { - t.Fatalf("expected updated page fetch to retry, got %d attempt(s)", updateFetches) + if updateFetches.Load() < 2 { + t.Fatalf("expected updated page fetch to retry, got %d attempt(s)", updateFetches.Load()) } doc, err := fs.ReadMarkdownDocument(pagePath) @@ -468,13 +467,12 @@ func TestPull_TargetPageUsesFreshGetPageVersionWhenListingLags(t *testing.T) { "20": freshPage, }, } - getPageCalls := 0 + var getPageCalls atomic.Int32 fake.getPageFunc = func(pageID string) (confluence.Page, error) { if pageID != "20" { return confluence.Page{}, confluence.ErrNotFound } - getPageCalls++ - if getPageCalls == 1 { + if getPageCalls.Add(1) == 1 { // The target-page probe should see the fresh version before the later fetch loop runs. return freshPage, nil } From fe1e0aa018304d748a9e3b95459e0c85a84a6790 Mon Sep 17 00:00:00 2001 From: Robert Gonek Date: Wed, 11 Mar 2026 14:22:26 +0100 Subject: [PATCH 08/10] tooling: add docker ubuntu ci replica --- .dockerignore | 8 ++++++++ Makefile | 7 ++++++- docker/ubuntu-ci.Dockerfile | 22 ++++++++++++++++++++++ docs/automation.md | 9 +++++++++ scripts/ci-ubuntu.ps1 | 12 ++++++++++++ scripts/ci-ubuntu.sh | 20 ++++++++++++++++++++ 6 files changed, 77 insertions(+), 1 deletion(-) create mode 100644 .dockerignore create mode 100644 docker/ubuntu-ci.Dockerfile create mode 100644 scripts/ci-ubuntu.ps1 create mode 100644 scripts/ci-ubuntu.sh diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..f6fa8a6 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,8 @@ +.git +.github +.confluence-search-index +conf +dist +workspace +test-output +docs/test-logs diff --git a/Makefile b/Makefile index 08db6fa..4f65099 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ MAIN := ./cmd/conf GO := go GOFLAGS := -.PHONY: build install test test-unit test-e2e release-check coverage-check fmt fmt-check lint clean +.PHONY: build install test test-unit test-e2e ci-ubuntu release-check coverage-check fmt fmt-check lint clean ## build: compile the conf binary build: @@ -28,6 +28,11 @@ coverage-check: test-e2e: build $(GO) test -timeout 20m -v -tags=e2e ./cmd -run '^TestWorkflow_' +## ci-ubuntu: run the GitHub Actions ubuntu test job inside Docker +ci-ubuntu: + docker build -f docker/ubuntu-ci.Dockerfile -t conf-ci-ubuntu . + docker run --rm conf-ci-ubuntu + ## release-check: run the release gate, including live sandbox E2E coverage release-check: fmt-check lint test-unit test-e2e diff --git a/docker/ubuntu-ci.Dockerfile b/docker/ubuntu-ci.Dockerfile new file mode 100644 index 0000000..8524267 --- /dev/null +++ b/docker/ubuntu-ci.Dockerfile @@ -0,0 +1,22 @@ +FROM golang:1.25.5-bookworm + +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates \ + gcc \ + git \ + g++ \ + make \ + && rm -rf /var/lib/apt/lists/* + +ENV PATH="/root/go/bin:${PATH}" \ + CGO_ENABLED=1 + +WORKDIR /src + +COPY go.mod go.sum ./ +RUN go mod download +RUN go install github.com/golangci/golangci-lint/cmd/golangci-lint@v2.8.0 + +COPY . . + +CMD ["bash", "./scripts/ci-ubuntu.sh"] diff --git a/docs/automation.md b/docs/automation.md index ac66505..a66b1dd 100644 --- a/docs/automation.md +++ b/docs/automation.md @@ -220,6 +220,15 @@ Current documented baseline allowlist for the maintained release sandbox: If these spaces are cleaned up later, remove the allowlist entries in the same change that removes the warnings. +## Reproducing Ubuntu CI Locally + +The GitHub Actions Ubuntu job can be reproduced locally in Docker with the same command sequence used by `.github/workflows/ci.yml`. + +- `make ci-ubuntu` +- or on Windows PowerShell: `./scripts/ci-ubuntu.ps1` + +The Docker image installs the Linux toolchain needed for `go test -race`, runs `go vet`, `go build -trimpath ./cmd/conf`, `go test -race ./...`, `go run ./tools/coveragecheck`, `go run ./tools/gofmtcheck`, and `golangci-lint run`. + ## Live Sandbox Release Checklist Use this checklist for a release candidate. It turns the 2026-03-09 one-off live verification into a repeatable gate. diff --git a/scripts/ci-ubuntu.ps1 b/scripts/ci-ubuntu.ps1 new file mode 100644 index 0000000..ea095d5 --- /dev/null +++ b/scripts/ci-ubuntu.ps1 @@ -0,0 +1,12 @@ +$ErrorActionPreference = "Stop" + +$image = "conf-ci-ubuntu" + +if (-not (Get-Command docker -ErrorAction SilentlyContinue)) { + throw "docker is not installed or not on PATH." +} + +docker version | Out-Null + +docker build -f docker/ubuntu-ci.Dockerfile -t $image . +docker run --rm $image diff --git a/scripts/ci-ubuntu.sh b/scripts/ci-ubuntu.sh new file mode 100644 index 0000000..90b92c6 --- /dev/null +++ b/scripts/ci-ubuntu.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash +set -euo pipefail + +echo "[ci-ubuntu] go vet ./..." +go vet ./... + +echo "[ci-ubuntu] go build -trimpath ./cmd/conf" +go build -trimpath ./cmd/conf + +echo "[ci-ubuntu] go test -race ./..." +go test -race ./... + +echo "[ci-ubuntu] go run ./tools/coveragecheck" +go run ./tools/coveragecheck + +echo "[ci-ubuntu] go run ./tools/gofmtcheck" +go run ./tools/gofmtcheck + +echo "[ci-ubuntu] golangci-lint run" +golangci-lint run From a2f83049c2077c427ca3ae1e79353d06e9426f37 Mon Sep 17 00:00:00 2001 From: Robert Gonek Date: Wed, 11 Mar 2026 15:16:46 +0100 Subject: [PATCH 09/10] tooling: fix docker golangci-lint install path --- docker/ubuntu-ci.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/ubuntu-ci.Dockerfile b/docker/ubuntu-ci.Dockerfile index 8524267..b5a9733 100644 --- a/docker/ubuntu-ci.Dockerfile +++ b/docker/ubuntu-ci.Dockerfile @@ -15,7 +15,7 @@ WORKDIR /src COPY go.mod go.sum ./ RUN go mod download -RUN go install github.com/golangci/golangci-lint/cmd/golangci-lint@v2.8.0 +RUN go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.8.0 COPY . . From ff0870182bc9f63e249e54c89899f733d0c9c607 Mon Sep 17 00:00:00 2001 From: Robert Gonek Date: Wed, 11 Mar 2026 15:55:43 +0100 Subject: [PATCH 10/10] test: align linux fixtures and docker runner --- cmd/diff_metadata_test.go | 6 +++--- cmd/pull_context_test.go | 10 +++++----- docker/ubuntu-ci.Dockerfile | 11 +++++++++-- 3 files changed, 17 insertions(+), 10 deletions(-) diff --git a/cmd/diff_metadata_test.go b/cmd/diff_metadata_test.go index a16d82d..773ab3f 100644 --- a/cmd/diff_metadata_test.go +++ b/cmd/diff_metadata_test.go @@ -270,7 +270,7 @@ func TestRunDiff_SpaceModeShowsMetadataSummaryForRemoteMetadataOnlyChanges(t *te t.Fatalf("mkdir space: %v", err) } - writeMarkdown(t, filepath.Join(spaceDir, "root.md"), fs.MarkdownDocument{ + writeMarkdown(t, filepath.Join(spaceDir, "Root.md"), fs.MarkdownDocument{ Frontmatter: fs.Frontmatter{ Title: "Root", ID: "1", @@ -281,7 +281,7 @@ func TestRunDiff_SpaceModeShowsMetadataSummaryForRemoteMetadataOnlyChanges(t *te if err := fs.SaveState(spaceDir, fs.SpaceState{ PagePathIndex: map[string]string{ - "root.md": "1", + "Root.md": "1", }, AttachmentIndex: map[string]string{}, }); err != nil { @@ -328,7 +328,7 @@ func TestRunDiff_SpaceModeShowsMetadataSummaryForRemoteMetadataOnlyChanges(t *te if !strings.Contains(got, "metadata drift summary") { t.Fatalf("expected metadata summary, got:\n%s", got) } - if !strings.Contains(got, "root.md") { + if !strings.Contains(got, "Root.md") { t.Fatalf("expected metadata summary to include path, got:\n%s", got) } if !strings.Contains(got, "state: current -> draft") { diff --git a/cmd/pull_context_test.go b/cmd/pull_context_test.go index f1595ac..ad37ff7 100644 --- a/cmd/pull_context_test.go +++ b/cmd/pull_context_test.go @@ -208,7 +208,7 @@ func TestRunPull_ForcePullRefreshesEntireSpace(t *testing.T) { if err := os.MkdirAll(spaceDir, 0o750); err != nil { t.Fatalf("mkdir space: %v", err) } - writeMarkdown(t, filepath.Join(spaceDir, "root.md"), fs.MarkdownDocument{ + writeMarkdown(t, filepath.Join(spaceDir, "Root.md"), fs.MarkdownDocument{ Frontmatter: fs.Frontmatter{ Title: "Root", ID: "1", @@ -221,7 +221,7 @@ func TestRunPull_ForcePullRefreshesEntireSpace(t *testing.T) { if err := fs.SaveState(spaceDir, fs.SpaceState{ LastPullHighWatermark: "2026-02-02T00:00:00Z", PagePathIndex: map[string]string{ - "root.md": "1", + "Root.md": "1", }, AttachmentIndex: map[string]string{}, }); err != nil { @@ -275,12 +275,12 @@ func TestRunPull_ForcePullRefreshesEntireSpace(t *testing.T) { t.Fatalf("runPull() error: %v", err) } - rootDoc, err := fs.ReadMarkdownDocument(filepath.Join(spaceDir, "root.md")) + rootDoc, err := fs.ReadMarkdownDocument(filepath.Join(spaceDir, "Root.md")) if err != nil { - t.Fatalf("read root.md: %v", err) + t.Fatalf("read Root.md: %v", err) } if !strings.Contains(rootDoc.Body, "new body") { - t.Fatalf("expected root.md body to be refreshed on force pull, got:\n%s", rootDoc.Body) + t.Fatalf("expected Root.md body to be refreshed on force pull, got:\n%s", rootDoc.Body) } } diff --git a/docker/ubuntu-ci.Dockerfile b/docker/ubuntu-ci.Dockerfile index b5a9733..693c12a 100644 --- a/docker/ubuntu-ci.Dockerfile +++ b/docker/ubuntu-ci.Dockerfile @@ -8,8 +8,10 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ make \ && rm -rf /var/lib/apt/lists/* -ENV PATH="/root/go/bin:${PATH}" \ - CGO_ENABLED=1 +RUN useradd --create-home --uid 1000 runner + +ENV CGO_ENABLED=1 \ + GOBIN=/usr/local/bin WORKDIR /src @@ -18,5 +20,10 @@ RUN go mod download RUN go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.8.0 COPY . . +RUN mkdir -p /home/runner/.cache/go-build && chown -R runner:runner /home/runner /src + +USER runner +ENV HOME=/home/runner \ + GOCACHE=/home/runner/.cache/go-build CMD ["bash", "./scripts/ci-ubuntu.sh"]