From 8d999eb7099ee9e907dedb0e403c000449e02312 Mon Sep 17 00:00:00 2001 From: Jaap de Haan <261428+jdehaan@users.noreply.github.com> Date: Sat, 21 Mar 2026 16:46:25 +0000 Subject: [PATCH 1/6] refactor: Consolidate cobra command construction --- backup/cmd/job.go | 56 ++++++++++++++++++++++++++++++++ backup/cmd/list.go | 27 ++++----------- backup/cmd/run.go | 34 +++++-------------- backup/cmd/simulate.go | 35 +++++--------------- backup/cmd/test/commands_test.go | 54 ++++++++++++++++++++++++++++++ 5 files changed, 133 insertions(+), 73 deletions(-) create mode 100644 backup/cmd/job.go diff --git a/backup/cmd/job.go b/backup/cmd/job.go new file mode 100644 index 0000000..710da55 --- /dev/null +++ b/backup/cmd/job.go @@ -0,0 +1,56 @@ +package cmd + +import ( + "backup-rsync/backup/internal" + "fmt" + "io" + "log" + "time" + + "github.com/spf13/afero" + "github.com/spf13/cobra" +) + +type jobCommandOptions struct { + use string + short string + needsLog bool + simulate bool + factory func(rsyncPath string, logPath string, out io.Writer) internal.JobCommand +} + +func buildJobCommand(fs afero.Fs, opts jobCommandOptions) *cobra.Command { + return &cobra.Command{ + Use: opts.use, + Short: opts.short, + RunE: func(cmd *cobra.Command, args []string) error { + configPath, _ := cmd.Flags().GetString("config") + rsyncPath, _ := cmd.Flags().GetString("rsync-path") + + cfg, err := internal.LoadResolvedConfig(configPath) + if err != nil { + return fmt.Errorf("loading config: %w", err) + } + + out := cmd.OutOrStdout() + logger := log.New(io.Discard, "", 0) + + var logPath string + + if opts.needsLog { + var cleanup func() error + + logger, logPath, cleanup, err = internal.CreateMainLogger(fs, configPath, opts.simulate, time.Now()) + if err != nil { + return fmt.Errorf("creating logger: %w", err) + } + + defer cleanup() + } + + command := opts.factory(rsyncPath, logPath, out) + + return cfg.Apply(command, logger, out) + }, + } +} diff --git a/backup/cmd/list.go b/backup/cmd/list.go index 341105a..d7a4f54 100644 --- a/backup/cmd/list.go +++ b/backup/cmd/list.go @@ -2,32 +2,17 @@ package cmd import ( "backup-rsync/backup/internal" - "fmt" "io" - "log" "github.com/spf13/cobra" ) func buildListCommand(shell internal.Exec) *cobra.Command { - return &cobra.Command{ - Use: "list", - Short: "List the commands that will be executed", - RunE: func(cmd *cobra.Command, args []string) error { - configPath, _ := cmd.Flags().GetString("config") - rsyncPath, _ := cmd.Flags().GetString("rsync-path") - - cfg, err := internal.LoadResolvedConfig(configPath) - if err != nil { - return fmt.Errorf("loading config: %w", err) - } - - out := cmd.OutOrStdout() - command := internal.NewListCommand(rsyncPath, shell, out) - - logger := log.New(io.Discard, "", 0) - - return cfg.Apply(command, logger, out) + return buildJobCommand(nil, jobCommandOptions{ + use: "list", + short: "List the commands that will be executed", + factory: func(rsyncPath string, _ string, out io.Writer) internal.JobCommand { + return internal.NewListCommand(rsyncPath, shell, out) }, - } + }) } diff --git a/backup/cmd/run.go b/backup/cmd/run.go index 353c19a..d9f0923 100644 --- a/backup/cmd/run.go +++ b/backup/cmd/run.go @@ -2,37 +2,19 @@ package cmd import ( "backup-rsync/backup/internal" - "fmt" - "time" + "io" "github.com/spf13/afero" "github.com/spf13/cobra" ) func buildRunCommand(fs afero.Fs, shell internal.Exec) *cobra.Command { - return &cobra.Command{ - Use: "run", - Short: "Execute the sync jobs", - RunE: func(cmd *cobra.Command, args []string) error { - configPath, _ := cmd.Flags().GetString("config") - rsyncPath, _ := cmd.Flags().GetString("rsync-path") - - cfg, err := internal.LoadResolvedConfig(configPath) - if err != nil { - return fmt.Errorf("loading config: %w", err) - } - - logger, logPath, cleanup, err := internal.CreateMainLogger(fs, configPath, false, time.Now()) - if err != nil { - return fmt.Errorf("creating logger: %w", err) - } - - defer cleanup() - - out := cmd.OutOrStdout() - command := internal.NewSyncCommand(rsyncPath, logPath, shell, out) - - return cfg.Apply(command, logger, out) + return buildJobCommand(fs, jobCommandOptions{ + use: "run", + short: "Execute the sync jobs", + needsLog: true, + factory: func(rsyncPath string, logPath string, out io.Writer) internal.JobCommand { + return internal.NewSyncCommand(rsyncPath, logPath, shell, out) }, - } + }) } diff --git a/backup/cmd/simulate.go b/backup/cmd/simulate.go index ebcf75e..441d7c8 100644 --- a/backup/cmd/simulate.go +++ b/backup/cmd/simulate.go @@ -2,37 +2,20 @@ package cmd import ( "backup-rsync/backup/internal" - "fmt" - "time" + "io" "github.com/spf13/afero" "github.com/spf13/cobra" ) func buildSimulateCommand(fs afero.Fs, shell internal.Exec) *cobra.Command { - return &cobra.Command{ - Use: "simulate", - Short: "Simulate the sync jobs", - RunE: func(cmd *cobra.Command, args []string) error { - configPath, _ := cmd.Flags().GetString("config") - rsyncPath, _ := cmd.Flags().GetString("rsync-path") - - cfg, err := internal.LoadResolvedConfig(configPath) - if err != nil { - return fmt.Errorf("loading config: %w", err) - } - - logger, logPath, cleanup, err := internal.CreateMainLogger(fs, configPath, true, time.Now()) - if err != nil { - return fmt.Errorf("creating logger: %w", err) - } - - defer cleanup() - - out := cmd.OutOrStdout() - command := internal.NewSimulateCommand(rsyncPath, logPath, shell, out) - - return cfg.Apply(command, logger, out) + return buildJobCommand(fs, jobCommandOptions{ + use: "simulate", + short: "Simulate the sync jobs", + needsLog: true, + simulate: true, + factory: func(rsyncPath string, logPath string, out io.Writer) internal.JobCommand { + return internal.NewSimulateCommand(rsyncPath, logPath, shell, out) }, - } + }) } diff --git a/backup/cmd/test/commands_test.go b/backup/cmd/test/commands_test.go index 376c234..3b3b00b 100644 --- a/backup/cmd/test/commands_test.go +++ b/backup/cmd/test/commands_test.go @@ -4,6 +4,7 @@ import ( "bytes" "os" "path/filepath" + "strings" "testing" "backup-rsync/backup/cmd" @@ -394,3 +395,56 @@ jobs: assert.Contains(t, stdout, "Job: docs") assert.Contains(t, stdout, "Status [docs]: SUCCESS") } + +// --- run: logger cleanup happens after cfg.Apply completes --- + +func TestRun_LoggerOpenDuringApply(t *testing.T) { + cfgPath := writeConfigFile(t, ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "docs" + source: "/home/docs/" + target: "/backup/docs/" + enabled: true +`) + + shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} + fs := afero.NewMemMapFs() + + stdout, err := executeCommandWithDeps(t, fs, shell, "run", "--config", cfgPath) + + require.NoError(t, err) + assert.Contains(t, stdout, "Status [docs]: SUCCESS") + + // Walk the in-memory filesystem to find the summary log written by the logger. + // cfg.Apply writes "STATUS [docs]: SUCCESS" via the logger after the if-block + // where defer cleanup() is registered. If cleanup ran too early (closing the + // log file before Apply), the log file would be empty or missing this entry. + var summaryContent string + + _ = afero.Walk(fs, "logs", func(path string, info os.FileInfo, walkErr error) error { + if walkErr != nil { + return walkErr + } + + if info.IsDir() { + return nil + } + + if strings.HasSuffix(path, "summary.log") { + data, readErr := afero.ReadFile(fs, path) + require.NoError(t, readErr) + + summaryContent = string(data) + } + + return nil + }) + + require.NotEmpty(t, summaryContent, "summary.log should have been created") + assert.Contains(t, summaryContent, "STATUS [docs]: SUCCESS", + "logger must remain open during cfg.Apply — proves defer cleanup() is function-scoped") +} From e089708787a57bc7c45a42ec3350ea098aee1015 Mon Sep 17 00:00:00 2001 From: Jaap de Haan <261428+jdehaan@users.noreply.github.com> Date: Sat, 21 Mar 2026 16:54:00 +0000 Subject: [PATCH 2/6] refactor: Shift WriteConfigFile into a testuil package --- backup/cmd/test/commands_test.go | 38 +++++++-------------- backup/cmd/test/integration_test.go | 53 ++++++++++++----------------- backup/internal/test/config_test.go | 29 +++++----------- backup/internal/testutil/config.go | 25 ++++++++++++++ 4 files changed, 67 insertions(+), 78 deletions(-) create mode 100644 backup/internal/testutil/config.go diff --git a/backup/cmd/test/commands_test.go b/backup/cmd/test/commands_test.go index 3b3b00b..437d656 100644 --- a/backup/cmd/test/commands_test.go +++ b/backup/cmd/test/commands_test.go @@ -3,12 +3,12 @@ package cmd_test import ( "bytes" "os" - "path/filepath" "strings" "testing" "backup-rsync/backup/cmd" "backup-rsync/backup/internal" + "backup-rsync/backup/internal/testutil" "github.com/spf13/afero" "github.com/stretchr/testify/assert" @@ -24,18 +24,6 @@ func (s *stubExec) Execute(_ string, _ ...string) ([]byte, error) { return s.output, s.err } -func writeConfigFile(t *testing.T, content string) string { - t.Helper() - - dir := t.TempDir() - path := filepath.Join(dir, "test.yaml") - - err := os.WriteFile(path, []byte(content), 0600) - require.NoError(t, err) - - return path -} - func executeCommand(t *testing.T, args ...string) (string, error) { t.Helper() @@ -77,7 +65,7 @@ func executeCommandWithDeps(t *testing.T, fs afero.Fs, shell internal.Exec, args // --- config show --- func TestConfigShow_ValidConfig(t *testing.T) { - cfgPath := writeConfigFile(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/home" targets: @@ -104,7 +92,7 @@ func TestConfigShow_MissingFile(t *testing.T) { } func TestConfigShow_InvalidYAML(t *testing.T) { - cfgPath := writeConfigFile(t, `{{{invalid yaml`) + cfgPath := testutil.WriteConfigFile(t, `{{{invalid yaml`) _, err := executeCommand(t, "config", "show", "--config", cfgPath) @@ -115,7 +103,7 @@ func TestConfigShow_InvalidYAML(t *testing.T) { // --- config validate --- func TestConfigValidate_ValidConfig(t *testing.T) { - cfgPath := writeConfigFile(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/home" targets: @@ -140,7 +128,7 @@ func TestConfigValidate_MissingFile(t *testing.T) { } func TestConfigValidate_DuplicateJobNames(t *testing.T) { - cfgPath := writeConfigFile(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/home" targets: @@ -179,7 +167,7 @@ func TestRun_MissingConfig(t *testing.T) { } func TestRun_CreateLoggerError(t *testing.T) { - cfgPath := writeConfigFile(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/home" targets: @@ -202,7 +190,7 @@ jobs: } func TestRun_ValidConfig(t *testing.T) { - cfgPath := writeConfigFile(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/home" targets: @@ -233,7 +221,7 @@ func TestSimulate_MissingConfig(t *testing.T) { } func TestSimulate_CreateLoggerError(t *testing.T) { - cfgPath := writeConfigFile(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/home" targets: @@ -256,7 +244,7 @@ jobs: } func TestSimulate_ValidConfig(t *testing.T) { - cfgPath := writeConfigFile(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/home" targets: @@ -303,7 +291,7 @@ func TestCheckCoverage_MissingConfig(t *testing.T) { } func TestCheckCoverage_WithUncoveredPaths(t *testing.T) { - cfgPath := writeConfigFile(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/src" targets: @@ -326,7 +314,7 @@ jobs: } func TestCheckCoverage_ValidConfig(t *testing.T) { - cfgPath := writeConfigFile(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/src" targets: @@ -375,7 +363,7 @@ func TestVersion_WithMockExec(t *testing.T) { // --- list (positive path) --- func TestList_ValidConfig(t *testing.T) { - cfgPath := writeConfigFile(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/home" targets: @@ -399,7 +387,7 @@ jobs: // --- run: logger cleanup happens after cfg.Apply completes --- func TestRun_LoggerOpenDuringApply(t *testing.T) { - cfgPath := writeConfigFile(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/home" targets: diff --git a/backup/cmd/test/integration_test.go b/backup/cmd/test/integration_test.go index 76c073f..d138215 100644 --- a/backup/cmd/test/integration_test.go +++ b/backup/cmd/test/integration_test.go @@ -55,17 +55,6 @@ func readFileContent(t *testing.T, path string) string { return string(data) } -func writeIntegrationConfig(t *testing.T, yaml string) string { - t.Helper() - - dir := t.TempDir() - path := filepath.Join(dir, "test.yaml") - - require.NoError(t, os.WriteFile(path, []byte(yaml), 0600)) - - return path -} - func executeIntegrationCommand(t *testing.T, args ...string) (string, error) { t.Helper() @@ -90,7 +79,7 @@ func TestIntegration_Run_BasicSync(t *testing.T) { writeFile(t, filepath.Join(src, "hello.txt"), "hello world") writeFile(t, filepath.Join(src, "subdir", "nested.txt"), "nested content") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -119,7 +108,7 @@ func TestIntegration_Run_IdempotentSync(t *testing.T) { writeFile(t, filepath.Join(src, "data.txt"), "same content") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -151,7 +140,7 @@ func TestIntegration_Run_DeleteRemovesExtraFiles(t *testing.T) { writeFile(t, filepath.Join(dst, "keep.txt"), "keep me") writeFile(t, filepath.Join(dst, "stale.txt"), "should be removed") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -180,7 +169,7 @@ func TestIntegration_Run_NoDeletePreservesExtraFiles(t *testing.T) { writeFile(t, filepath.Join(src, "a.txt"), "a") writeFile(t, filepath.Join(dst, "extra.txt"), "should remain") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -210,7 +199,7 @@ func TestIntegration_Run_Exclusions(t *testing.T) { writeFile(t, filepath.Join(src, "cache", "tmp.dat"), "temporary data") writeFile(t, filepath.Join(src, "logs", "app.log"), "log data") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -241,7 +230,7 @@ func TestIntegration_Run_DisabledJobSkipped(t *testing.T) { writeFile(t, filepath.Join(src, "file.txt"), "content") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -278,7 +267,7 @@ func TestIntegration_Run_MultipleJobs(t *testing.T) { writeFile(t, filepath.Join(srcA, "a.txt"), "alpha") writeFile(t, filepath.Join(srcB, "b.txt"), "bravo") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+base+`" targets: @@ -311,7 +300,7 @@ func TestIntegration_Run_PartialChanges(t *testing.T) { writeFile(t, filepath.Join(src, "unchanged.txt"), "same") writeFile(t, filepath.Join(src, "modified.txt"), "original") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -348,7 +337,7 @@ func TestIntegration_Simulate_NoChanges(t *testing.T) { writeFile(t, filepath.Join(src, "new.txt"), "should not appear in target") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -377,7 +366,7 @@ func TestIntegration_Simulate_ShowsChanges(t *testing.T) { writeFile(t, filepath.Join(src, "report.txt"), "quarterly report") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -403,7 +392,7 @@ func TestIntegration_SimulateThenRun(t *testing.T) { writeFile(t, filepath.Join(src, "data.txt"), "important data") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -435,7 +424,7 @@ func TestIntegration_List_ShowsCommands(t *testing.T) { writeFile(t, filepath.Join(src, "x.txt"), "x") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -468,7 +457,7 @@ func TestIntegration_Run_VariableSubstitution(t *testing.T) { writeFile(t, filepath.Join(src, "v.txt"), "vars work") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -507,7 +496,7 @@ func TestIntegration_Run_MixedJobsSummary(t *testing.T) { writeFile(t, filepath.Join(srcOK, "ok.txt"), "ok") writeFile(t, filepath.Join(srcSkip, "skip.txt"), "skip") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+base+`" targets: @@ -539,7 +528,7 @@ jobs: func TestIntegration_Run_EmptySource(t *testing.T) { src, dst := setupDirs(t) - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -563,7 +552,7 @@ func TestIntegration_Run_DeepHierarchy(t *testing.T) { writeFile(t, filepath.Join(src, "a", "b", "c", "d", "deep.txt"), "deep file") - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -591,7 +580,7 @@ func TestIntegration_CheckCoverage_FullCoverage(t *testing.T) { dst := t.TempDir() - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -625,7 +614,7 @@ func TestIntegration_CheckCoverage_IncompleteCoverage(t *testing.T) { dst := t.TempDir() - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "`+src+`" targets: @@ -646,7 +635,7 @@ jobs: // --- config show: end-to-end with variable resolution --- func TestIntegration_ConfigShow(t *testing.T) { - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/data" targets: @@ -669,7 +658,7 @@ jobs: // --- config validate: valid config passes --- func TestIntegration_ConfigValidate_Valid(t *testing.T) { - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/data" targets: @@ -689,7 +678,7 @@ jobs: // --- config validate: overlapping sources are rejected --- func TestIntegration_ConfigValidate_OverlappingSources(t *testing.T) { - cfgPath := writeIntegrationConfig(t, ` + cfgPath := testutil.WriteConfigFile(t, ` sources: - path: "/data" targets: diff --git a/backup/internal/test/config_test.go b/backup/internal/test/config_test.go index 8556a96..3b069b1 100644 --- a/backup/internal/test/config_test.go +++ b/backup/internal/test/config_test.go @@ -3,8 +3,6 @@ package internal_test import ( "bytes" "log" - "os" - "path/filepath" "testing" "github.com/stretchr/testify/assert" @@ -13,6 +11,7 @@ import ( "gopkg.in/yaml.v3" . "backup-rsync/backup/internal" + "backup-rsync/backup/internal/testutil" ) func TestLoadConfig1(t *testing.T) { @@ -365,18 +364,6 @@ func TestResolveConfig(t *testing.T) { assert.Equal(t, "/backup/user/Pictures", resolvedCfg.Jobs[1].Target) } -// writeTestConfig writes YAML content to a temp file and returns its path. -func writeTestConfig(t *testing.T, content string) string { - t.Helper() - - dir := t.TempDir() - path := filepath.Join(dir, "test.yaml") - err := os.WriteFile(path, []byte(content), 0600) - require.NoError(t, err) - - return path -} - func TestLoadResolvedConfig_FileNotFound(t *testing.T) { _, err := LoadResolvedConfig("/nonexistent/path/config.yaml") require.Error(t, err) @@ -384,7 +371,7 @@ func TestLoadResolvedConfig_FileNotFound(t *testing.T) { } func TestLoadResolvedConfig_InvalidYAML(t *testing.T) { - path := writeTestConfig(t, "{{invalid yaml") + path := testutil.WriteConfigFile(t, "{{invalid yaml") _, err := LoadResolvedConfig(path) require.Error(t, err) @@ -405,7 +392,7 @@ jobs: source: "/src/b" target: "/tgt/b" ` - path := writeTestConfig(t, yaml) + path := testutil.WriteConfigFile(t, yaml) _, err := LoadResolvedConfig(path) require.Error(t, err) @@ -424,7 +411,7 @@ jobs: source: "/invalid/source" target: "/backup/stuff" ` - path := writeTestConfig(t, yaml) + path := testutil.WriteConfigFile(t, yaml) _, err := LoadResolvedConfig(path) require.Error(t, err) @@ -445,7 +432,7 @@ jobs: source: "/home/user/docs" target: "/backup/docs" ` - path := writeTestConfig(t, yaml) + path := testutil.WriteConfigFile(t, yaml) _, err := LoadResolvedConfig(path) require.Error(t, err) @@ -468,7 +455,7 @@ jobs: source: "/home/user/docs" target: "/backup/docs" ` - path := writeTestConfig(t, yaml) + path := testutil.WriteConfigFile(t, yaml) cfg, err := LoadResolvedConfig(path) require.NoError(t, err) @@ -489,7 +476,7 @@ jobs: source: "/home/photos" target: "/backup/all/photos" ` - path := writeTestConfig(t, yaml) + path := testutil.WriteConfigFile(t, yaml) _, err := LoadResolvedConfig(path) require.Error(t, err) @@ -509,7 +496,7 @@ jobs: source: "/home/docs" target: "${base}/docs" ` - path := writeTestConfig(t, yaml) + path := testutil.WriteConfigFile(t, yaml) cfg, err := LoadResolvedConfig(path) require.NoError(t, err) diff --git a/backup/internal/testutil/config.go b/backup/internal/testutil/config.go new file mode 100644 index 0000000..9b0c3ec --- /dev/null +++ b/backup/internal/testutil/config.go @@ -0,0 +1,25 @@ +// Package testutil provides shared test helpers for use across test packages. +package testutil + +import ( + "os" + "path/filepath" + "testing" + + "backup-rsync/backup/internal" +) + +// WriteConfigFile writes YAML content to a temp file and returns its path. +func WriteConfigFile(t *testing.T, content string) string { + t.Helper() + + dir := t.TempDir() + path := filepath.Join(dir, "test.yaml") + + err := os.WriteFile(path, []byte(content), internal.LogFilePermission) + if err != nil { + t.Fatalf("failed to write config file: %v", err) + } + + return path +} From b5a4cc0fc18b0b99c60712245c112ddf124852a0 Mon Sep 17 00:00:00 2001 From: Jaap de Haan <261428+jdehaan@users.noreply.github.com> Date: Sat, 21 Mar 2026 17:05:08 +0000 Subject: [PATCH 3/6] reactor: Replace YAML with declarative job builder --- backup/cmd/test/commands_test.go | 162 ++++---------- backup/cmd/test/integration_test.go | 329 ++++++++-------------------- backup/internal/test/config_test.go | 111 +++------- backup/internal/testutil/builder.go | 132 +++++++++++ 4 files changed, 297 insertions(+), 437 deletions(-) create mode 100644 backup/internal/testutil/builder.go diff --git a/backup/cmd/test/commands_test.go b/backup/cmd/test/commands_test.go index 437d656..ea20564 100644 --- a/backup/cmd/test/commands_test.go +++ b/backup/cmd/test/commands_test.go @@ -65,16 +65,10 @@ func executeCommandWithDeps(t *testing.T, fs afero.Fs, shell internal.Exec, args // --- config show --- func TestConfigShow_ValidConfig(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "docs" - source: "/home/docs/" - target: "/backup/docs/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("docs", "/home/docs/", "/backup/docs/"). + Build()) stdout, err := executeCommand(t, "config", "show", "--config", cfgPath) @@ -103,16 +97,10 @@ func TestConfigShow_InvalidYAML(t *testing.T) { // --- config validate --- func TestConfigValidate_ValidConfig(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "docs" - source: "/home/docs/" - target: "/backup/docs/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("docs", "/home/docs/", "/backup/docs/"). + Build()) stdout, err := executeCommand(t, "config", "validate", "--config", cfgPath) @@ -128,19 +116,11 @@ func TestConfigValidate_MissingFile(t *testing.T) { } func TestConfigValidate_DuplicateJobNames(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "same" - source: "/home/a/" - target: "/backup/a/" - - name: "same" - source: "/home/b/" - target: "/backup/b/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("same", "/home/a/", "/backup/a/"). + AddJob("same", "/home/b/", "/backup/b/"). + Build()) _, err := executeCommand(t, "config", "validate", "--config", cfgPath) @@ -167,16 +147,10 @@ func TestRun_MissingConfig(t *testing.T) { } func TestRun_CreateLoggerError(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "docs" - source: "/home/docs/" - target: "/backup/docs/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("docs", "/home/docs/", "/backup/docs/"). + Build()) // Use a read-only filesystem to block log directory creation fs := afero.NewReadOnlyFs(afero.NewMemMapFs()) @@ -190,17 +164,10 @@ jobs: } func TestRun_ValidConfig(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "docs" - source: "/home/docs/" - target: "/backup/docs/" - enabled: true -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("docs", "/home/docs/", "/backup/docs/", testutil.Enabled(true), testutil.Delete(true)). + Build()) shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} @@ -221,16 +188,10 @@ func TestSimulate_MissingConfig(t *testing.T) { } func TestSimulate_CreateLoggerError(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "docs" - source: "/home/docs/" - target: "/backup/docs/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("docs", "/home/docs/", "/backup/docs/"). + Build()) // Use a read-only filesystem to block log directory creation fs := afero.NewReadOnlyFs(afero.NewMemMapFs()) @@ -244,17 +205,10 @@ jobs: } func TestSimulate_ValidConfig(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "docs" - source: "/home/docs/" - target: "/backup/docs/" - enabled: true -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("docs", "/home/docs/", "/backup/docs/", testutil.Enabled(true)). + Build()) shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} @@ -291,16 +245,10 @@ func TestCheckCoverage_MissingConfig(t *testing.T) { } func TestCheckCoverage_WithUncoveredPaths(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/src" -targets: - - path: "/dst" -jobs: - - name: "docs" - source: "/src/docs/" - target: "/dst/docs/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/src").Target("/dst"). + AddJob("docs", "/src/docs/", "/dst/docs/"). + Build()) fs := afero.NewMemMapFs() _ = fs.MkdirAll("/src/docs", 0755) @@ -314,16 +262,10 @@ jobs: } func TestCheckCoverage_ValidConfig(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/src" -targets: - - path: "/dst" -jobs: - - name: "docs" - source: "/src/docs/" - target: "/dst/docs/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/src").Target("/dst"). + AddJob("docs", "/src/docs/", "/dst/docs/"). + Build()) fs := afero.NewMemMapFs() _ = fs.MkdirAll("/src/docs", 0755) @@ -363,17 +305,10 @@ func TestVersion_WithMockExec(t *testing.T) { // --- list (positive path) --- func TestList_ValidConfig(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "docs" - source: "/home/docs/" - target: "/backup/docs/" - enabled: true -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("docs", "/home/docs/", "/backup/docs/", testutil.Enabled(true)). + Build()) shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} @@ -387,17 +322,10 @@ jobs: // --- run: logger cleanup happens after cfg.Apply completes --- func TestRun_LoggerOpenDuringApply(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "docs" - source: "/home/docs/" - target: "/backup/docs/" - enabled: true -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("docs", "/home/docs/", "/backup/docs/", testutil.Enabled(true)). + Build()) shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} fs := afero.NewMemMapFs() diff --git a/backup/cmd/test/integration_test.go b/backup/cmd/test/integration_test.go index d138215..7a21a4a 100644 --- a/backup/cmd/test/integration_test.go +++ b/backup/cmd/test/integration_test.go @@ -9,6 +9,7 @@ import ( "testing" "backup-rsync/backup/cmd" + "backup-rsync/backup/internal/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -79,17 +80,10 @@ func TestIntegration_Run_BasicSync(t *testing.T) { writeFile(t, filepath.Join(src, "hello.txt"), "hello world") writeFile(t, filepath.Join(src, "subdir", "nested.txt"), "nested content") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "basic" - source: "`+src+`/" - target: "`+dst+`/" - delete: false -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("basic", src+"/", dst+"/", testutil.Delete(false)). + Build()) stdout, err := executeIntegrationCommand(t, "run", "--config", cfgPath) @@ -108,16 +102,10 @@ func TestIntegration_Run_IdempotentSync(t *testing.T) { writeFile(t, filepath.Join(src, "data.txt"), "same content") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "idem" - source: "`+src+`/" - target: "`+dst+`/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("idem", src+"/", dst+"/"). + Build()) // First sync _, err := executeIntegrationCommand(t, "run", "--config", cfgPath) @@ -140,17 +128,10 @@ func TestIntegration_Run_DeleteRemovesExtraFiles(t *testing.T) { writeFile(t, filepath.Join(dst, "keep.txt"), "keep me") writeFile(t, filepath.Join(dst, "stale.txt"), "should be removed") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "cleanup" - source: "`+src+`/" - target: "`+dst+`/" - delete: true -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("cleanup", src+"/", dst+"/", testutil.Delete(true)). + Build()) stdout, err := executeIntegrationCommand(t, "run", "--config", cfgPath) @@ -169,17 +150,10 @@ func TestIntegration_Run_NoDeletePreservesExtraFiles(t *testing.T) { writeFile(t, filepath.Join(src, "a.txt"), "a") writeFile(t, filepath.Join(dst, "extra.txt"), "should remain") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "nodelete" - source: "`+src+`/" - target: "`+dst+`/" - delete: false -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("nodelete", src+"/", dst+"/", testutil.Delete(false)). + Build()) stdout, err := executeIntegrationCommand(t, "run", "--config", cfgPath) @@ -199,19 +173,10 @@ func TestIntegration_Run_Exclusions(t *testing.T) { writeFile(t, filepath.Join(src, "cache", "tmp.dat"), "temporary data") writeFile(t, filepath.Join(src, "logs", "app.log"), "log data") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "filtered" - source: "`+src+`/" - target: "`+dst+`/" - exclusions: - - "cache" - - "logs" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("filtered", src+"/", dst+"/", testutil.Exclusions("cache", "logs")). + Build()) stdout, err := executeIntegrationCommand(t, "run", "--config", cfgPath) @@ -230,17 +195,10 @@ func TestIntegration_Run_DisabledJobSkipped(t *testing.T) { writeFile(t, filepath.Join(src, "file.txt"), "content") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "disabled-job" - source: "`+src+`/" - target: "`+dst+`/" - enabled: false -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("disabled-job", src+"/", dst+"/", testutil.Enabled(false)). + Build()) stdout, err := executeIntegrationCommand(t, "run", "--config", cfgPath) @@ -267,19 +225,11 @@ func TestIntegration_Run_MultipleJobs(t *testing.T) { writeFile(t, filepath.Join(srcA, "a.txt"), "alpha") writeFile(t, filepath.Join(srcB, "b.txt"), "bravo") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+base+`" -targets: - - path: "`+base+`" -jobs: - - name: "jobA" - source: "`+srcA+`/" - target: "`+dstA+`/" - - name: "jobB" - source: "`+srcB+`/" - target: "`+dstB+`/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(base).Target(base). + AddJob("jobA", srcA+"/", dstA+"/"). + AddJob("jobB", srcB+"/", dstB+"/"). + Build()) stdout, err := executeIntegrationCommand(t, "run", "--config", cfgPath) @@ -300,16 +250,10 @@ func TestIntegration_Run_PartialChanges(t *testing.T) { writeFile(t, filepath.Join(src, "unchanged.txt"), "same") writeFile(t, filepath.Join(src, "modified.txt"), "original") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "partial" - source: "`+src+`/" - target: "`+dst+`/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("partial", src+"/", dst+"/"). + Build()) // Initial sync _, err := executeIntegrationCommand(t, "run", "--config", cfgPath) @@ -337,16 +281,10 @@ func TestIntegration_Simulate_NoChanges(t *testing.T) { writeFile(t, filepath.Join(src, "new.txt"), "should not appear in target") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "dryrun" - source: "`+src+`/" - target: "`+dst+`/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("dryrun", src+"/", dst+"/"). + Build()) stdout, err := executeIntegrationCommand(t, "simulate", "--config", cfgPath) @@ -366,16 +304,10 @@ func TestIntegration_Simulate_ShowsChanges(t *testing.T) { writeFile(t, filepath.Join(src, "report.txt"), "quarterly report") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "preview" - source: "`+src+`/" - target: "`+dst+`/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("preview", src+"/", dst+"/"). + Build()) stdout, err := executeIntegrationCommand(t, "simulate", "--config", cfgPath) @@ -392,16 +324,10 @@ func TestIntegration_SimulateThenRun(t *testing.T) { writeFile(t, filepath.Join(src, "data.txt"), "important data") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "workflow" - source: "`+src+`/" - target: "`+dst+`/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("workflow", src+"/", dst+"/"). + Build()) // Simulate first _, err := executeIntegrationCommand(t, "simulate", "--config", cfgPath) @@ -424,18 +350,10 @@ func TestIntegration_List_ShowsCommands(t *testing.T) { writeFile(t, filepath.Join(src, "x.txt"), "x") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "listjob" - source: "`+src+`/" - target: "`+dst+`/" - exclusions: - - "temp" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("listjob", src+"/", dst+"/", testutil.Exclusions("temp")). + Build()) stdout, err := executeIntegrationCommand(t, "list", "--config", cfgPath) @@ -457,19 +375,11 @@ func TestIntegration_Run_VariableSubstitution(t *testing.T) { writeFile(t, filepath.Join(src, "v.txt"), "vars work") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -variables: - src_dir: "`+src+`" - dst_dir: "`+dst+`" -jobs: - - name: "var-job" - source: "${src_dir}/" - target: "${dst_dir}/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + Variable("src_dir", src).Variable("dst_dir", dst). + AddJob("var-job", "${src_dir}/", "${dst_dir}/"). + Build()) stdout, err := executeIntegrationCommand(t, "run", "--config", cfgPath) @@ -496,21 +406,11 @@ func TestIntegration_Run_MixedJobsSummary(t *testing.T) { writeFile(t, filepath.Join(srcOK, "ok.txt"), "ok") writeFile(t, filepath.Join(srcSkip, "skip.txt"), "skip") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+base+`" -targets: - - path: "`+base+`" -jobs: - - name: "active" - source: "`+srcOK+`/" - target: "`+dstOK+`/" - enabled: true - - name: "inactive" - source: "`+srcSkip+`/" - target: "`+dstSkip+`/" - enabled: false -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(base).Target(base). + AddJob("active", srcOK+"/", dstOK+"/", testutil.Enabled(true)). + AddJob("inactive", srcSkip+"/", dstSkip+"/", testutil.Enabled(false)). + Build()) stdout, err := executeIntegrationCommand(t, "run", "--config", cfgPath) @@ -528,16 +428,10 @@ jobs: func TestIntegration_Run_EmptySource(t *testing.T) { src, dst := setupDirs(t) - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "empty" - source: "`+src+`/" - target: "`+dst+`/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("empty", src+"/", dst+"/"). + Build()) stdout, err := executeIntegrationCommand(t, "run", "--config", cfgPath) @@ -552,16 +446,10 @@ func TestIntegration_Run_DeepHierarchy(t *testing.T) { writeFile(t, filepath.Join(src, "a", "b", "c", "d", "deep.txt"), "deep file") - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "deep" - source: "`+src+`/" - target: "`+dst+`/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("deep", src+"/", dst+"/"). + Build()) stdout, err := executeIntegrationCommand(t, "run", "--config", cfgPath) @@ -580,19 +468,11 @@ func TestIntegration_CheckCoverage_FullCoverage(t *testing.T) { dst := t.TempDir() - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "docs" - source: "`+filepath.Join(src, "docs")+`/" - target: "`+filepath.Join(dst, "docs")+`/" - - name: "photos" - source: "`+filepath.Join(src, "photos")+`/" - target: "`+filepath.Join(dst, "photos")+`/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("docs", filepath.Join(src, "docs")+"/", filepath.Join(dst, "docs")+"/"). + AddJob("photos", filepath.Join(src, "photos")+"/", filepath.Join(dst, "photos")+"/"). + Build()) stdout, err := executeIntegrationCommand(t, "check-coverage", "--config", cfgPath) @@ -614,16 +494,10 @@ func TestIntegration_CheckCoverage_IncompleteCoverage(t *testing.T) { dst := t.TempDir() - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "docs-only" - source: "`+filepath.Join(src, "docs")+`/" - target: "`+filepath.Join(dst, "docs")+`/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source(src).Target(dst). + AddJob("docs-only", filepath.Join(src, "docs")+"/", filepath.Join(dst, "docs")+"/"). + Build()) stdout, err := executeIntegrationCommand(t, "check-coverage", "--config", cfgPath) @@ -635,18 +509,11 @@ jobs: // --- config show: end-to-end with variable resolution --- func TestIntegration_ConfigShow(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/data" -targets: - - path: "/backup" -variables: - base: "/backup" -jobs: - - name: "resolved" - source: "/data/files/" - target: "${base}/files/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/data").Target("/backup"). + Variable("base", "/backup"). + AddJob("resolved", "/data/files/", "${base}/files/"). + Build()) stdout, err := executeIntegrationCommand(t, "config", "show", "--config", cfgPath) @@ -658,16 +525,10 @@ jobs: // --- config validate: valid config passes --- func TestIntegration_ConfigValidate_Valid(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/data" -targets: - - path: "/backup" -jobs: - - name: "valid" - source: "/data/stuff/" - target: "/backup/stuff/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/data").Target("/backup"). + AddJob("valid", "/data/stuff/", "/backup/stuff/"). + Build()) stdout, err := executeIntegrationCommand(t, "config", "validate", "--config", cfgPath) @@ -678,19 +539,11 @@ jobs: // --- config validate: overlapping sources are rejected --- func TestIntegration_ConfigValidate_OverlappingSources(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, ` -sources: - - path: "/data" -targets: - - path: "/backup" -jobs: - - name: "parent" - source: "/data/user/" - target: "/backup/user/" - - name: "child" - source: "/data/user/docs/" - target: "/backup/docs/" -`) + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/data").Target("/backup"). + AddJob("parent", "/data/user/", "/backup/user/"). + AddJob("child", "/data/user/docs/", "/backup/docs/"). + Build()) _, err := executeIntegrationCommand(t, "config", "validate", "--config", cfgPath) diff --git a/backup/internal/test/config_test.go b/backup/internal/test/config_test.go index 3b069b1..cee781b 100644 --- a/backup/internal/test/config_test.go +++ b/backup/internal/test/config_test.go @@ -379,20 +379,11 @@ func TestLoadResolvedConfig_InvalidYAML(t *testing.T) { } func TestLoadResolvedConfig_DuplicateJobNames(t *testing.T) { - yaml := ` -sources: - - path: "/src" -targets: - - path: "/tgt" -jobs: - - name: "dup" - source: "/src/a" - target: "/tgt/a" - - name: "dup" - source: "/src/b" - target: "/tgt/b" -` - path := testutil.WriteConfigFile(t, yaml) + path := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/src").Target("/tgt"). + AddJob("dup", "/src/a", "/tgt/a"). + AddJob("dup", "/src/b", "/tgt/b"). + Build()) _, err := LoadResolvedConfig(path) require.Error(t, err) @@ -401,17 +392,10 @@ jobs: } func TestLoadResolvedConfig_InvalidSourcePath(t *testing.T) { - yaml := ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "job1" - source: "/invalid/source" - target: "/backup/stuff" -` - path := testutil.WriteConfigFile(t, yaml) + path := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("job1", "/invalid/source", "/backup/stuff"). + Build()) _, err := LoadResolvedConfig(path) require.Error(t, err) @@ -419,20 +403,11 @@ jobs: } func TestLoadResolvedConfig_OverlappingSourcePaths(t *testing.T) { - yaml := ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "parent" - source: "/home/user" - target: "/backup/user" - - name: "child" - source: "/home/user/docs" - target: "/backup/docs" -` - path := testutil.WriteConfigFile(t, yaml) + path := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("parent", "/home/user", "/backup/user"). + AddJob("child", "/home/user/docs", "/backup/docs"). + Build()) _, err := LoadResolvedConfig(path) require.Error(t, err) @@ -440,22 +415,11 @@ jobs: } func TestLoadResolvedConfig_OverlappingSourcePathsAllowedByExclusion(t *testing.T) { - yaml := ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "parent" - source: "/home/user" - target: "/backup/user" - exclusions: - - "docs" - - name: "child" - source: "/home/user/docs" - target: "/backup/docs" -` - path := testutil.WriteConfigFile(t, yaml) + path := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("parent", "/home/user", "/backup/user", testutil.Exclusions("docs")). + AddJob("child", "/home/user/docs", "/backup/docs"). + Build()) cfg, err := LoadResolvedConfig(path) require.NoError(t, err) @@ -463,20 +427,11 @@ jobs: } func TestLoadResolvedConfig_OverlappingTargetPaths(t *testing.T) { - yaml := ` -sources: - - path: "/home" -targets: - - path: "/backup" -jobs: - - name: "job1" - source: "/home/docs" - target: "/backup/all" - - name: "job2" - source: "/home/photos" - target: "/backup/all/photos" -` - path := testutil.WriteConfigFile(t, yaml) + path := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("job1", "/home/docs", "/backup/all"). + AddJob("job2", "/home/photos", "/backup/all/photos"). + Build()) _, err := LoadResolvedConfig(path) require.Error(t, err) @@ -484,19 +439,11 @@ jobs: } func TestLoadResolvedConfig_ValidConfig(t *testing.T) { - yaml := ` -sources: - - path: "/home" -targets: - - path: "/backup" -variables: - base: "/backup" -jobs: - - name: "docs" - source: "/home/docs" - target: "${base}/docs" -` - path := testutil.WriteConfigFile(t, yaml) + path := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + Variable("base", "/backup"). + AddJob("docs", "/home/docs", "${base}/docs"). + Build()) cfg, err := LoadResolvedConfig(path) require.NoError(t, err) diff --git a/backup/internal/testutil/builder.go b/backup/internal/testutil/builder.go new file mode 100644 index 0000000..a2cf083 --- /dev/null +++ b/backup/internal/testutil/builder.go @@ -0,0 +1,132 @@ +package testutil + +import ( + "fmt" + "strings" +) + +// JobOpt configures optional fields on a job definition. +type JobOpt func(*jobDef) + +type jobDef struct { + name string + source string + target string + delete *bool + enabled *bool + exclusions []string +} + +// ConfigBuilder constructs YAML config strings declaratively. +type ConfigBuilder struct { + sources []string + targets []string + variables map[string]string + jobs []jobDef +} + +// NewConfigBuilder creates an empty ConfigBuilder. +func NewConfigBuilder() *ConfigBuilder { + return &ConfigBuilder{ + variables: make(map[string]string), + } +} + +// Source adds a source path. +func (b *ConfigBuilder) Source(path string) *ConfigBuilder { + b.sources = append(b.sources, path) + + return b +} + +// Target adds a target path. +func (b *ConfigBuilder) Target(path string) *ConfigBuilder { + b.targets = append(b.targets, path) + + return b +} + +// Variable adds a variable for substitution. +func (b *ConfigBuilder) Variable(key, value string) *ConfigBuilder { + b.variables[key] = value + + return b +} + +// AddJob adds a job with the given name, source, and target paths. +func (b *ConfigBuilder) AddJob(name, source, target string, opts ...JobOpt) *ConfigBuilder { + j := jobDef{name: name, source: source, target: target} + for _, opt := range opts { + opt(&j) + } + + b.jobs = append(b.jobs, j) + + return b +} + +// Build produces the YAML config string. +func (b *ConfigBuilder) Build() string { + var result strings.Builder + + result.WriteString("sources:\n") + + for _, s := range b.sources { + fmt.Fprintf(&result, " - path: %q\n", s) + } + + result.WriteString("targets:\n") + + for _, t := range b.targets { + fmt.Fprintf(&result, " - path: %q\n", t) + } + + if len(b.variables) > 0 { + result.WriteString("variables:\n") + + for k, v := range b.variables { + fmt.Fprintf(&result, " %s: %q\n", k, v) + } + } + + result.WriteString("jobs:\n") + + for _, job := range b.jobs { + fmt.Fprintf(&result, " - name: %q\n", job.name) + fmt.Fprintf(&result, " source: %q\n", job.source) + fmt.Fprintf(&result, " target: %q\n", job.target) + + if job.delete != nil { + fmt.Fprintf(&result, " delete: %v\n", *job.delete) + } + + if job.enabled != nil { + fmt.Fprintf(&result, " enabled: %v\n", *job.enabled) + } + + if len(job.exclusions) > 0 { + result.WriteString(" exclusions:\n") + + for _, e := range job.exclusions { + fmt.Fprintf(&result, " - %q\n", e) + } + } + } + + return result.String() +} + +// Enabled sets the enabled flag on a job. +func Enabled(v bool) JobOpt { + return func(j *jobDef) { j.enabled = &v } +} + +// Delete sets the delete flag on a job. +func Delete(v bool) JobOpt { + return func(j *jobDef) { j.delete = &v } +} + +// Exclusions sets exclusion patterns on a job. +func Exclusions(v ...string) JobOpt { + return func(j *jobDef) { j.exclusions = v } +} From 8bfc46b56856a0928f5e971a3c64f6bc9ce21487 Mon Sep 17 00:00:00 2001 From: Jaap de Haan <261428+jdehaan@users.noreply.github.com> Date: Sat, 21 Mar 2026 17:12:53 +0000 Subject: [PATCH 4/6] refactor: Add testutil NewTestJob --- backup/internal/test/job_test.go | 19 ++++--------------- backup/internal/test/rsync_test.go | 26 ++++++++------------------ backup/internal/testutil/job.go | 30 ++++++++++++++++++++++++++++++ 3 files changed, 42 insertions(+), 33 deletions(-) create mode 100644 backup/internal/testutil/job.go diff --git a/backup/internal/test/job_test.go b/backup/internal/test/job_test.go index 3186f81..5cb851d 100644 --- a/backup/internal/test/job_test.go +++ b/backup/internal/test/job_test.go @@ -2,6 +2,7 @@ package internal_test import ( . "backup-rsync/backup/internal" + "backup-rsync/backup/internal/testutil" "testing" "github.com/stretchr/testify/assert" @@ -9,22 +10,10 @@ import ( "gopkg.in/yaml.v3" ) -func newJob() Job { - return Job{ - Name: "job", - Source: "", - Target: "", - Delete: true, - Enabled: true, - Exclusions: []string{}, - } -} - func TestApply_DisabledJob_ReturnsSkippedAndRunIsNotCalled(t *testing.T) { mockJobCommand := NewMockJobCommand(t) - disabledJob := newJob() - disabledJob.Enabled = false + disabledJob := testutil.NewTestJob(testutil.WithEnabled(false)) // No expectations set - Run should NOT be called for disabled jobs @@ -36,7 +25,7 @@ func TestApply_DisabledJob_ReturnsSkippedAndRunIsNotCalled(t *testing.T) { func TestApply_JobFailing_RunIsCalledAndReturnsFailure(t *testing.T) { mockJobCommand := NewMockJobCommand(t) - job := newJob() + job := testutil.NewTestJob() // Set expectation that Run will be called and return Failure mockJobCommand.EXPECT().Run(job).Return(Failure).Once() @@ -49,7 +38,7 @@ func TestApply_JobFailing_RunIsCalledAndReturnsFailure(t *testing.T) { func TestApply_JobSucceeds_RunIsCalledAndReturnsSuccess(t *testing.T) { mockJobCommand := NewMockJobCommand(t) - job := newJob() + job := testutil.NewTestJob() // Set expectation that Run will be called and return Success mockJobCommand.EXPECT().Run(job).Return(Success).Once() diff --git a/backup/internal/test/rsync_test.go b/backup/internal/test/rsync_test.go index 90598b9..d2b7f67 100644 --- a/backup/internal/test/rsync_test.go +++ b/backup/internal/test/rsync_test.go @@ -2,6 +2,7 @@ package internal_test import ( . "backup-rsync/backup/internal" + "backup-rsync/backup/internal/testutil" "bytes" "errors" "io" @@ -150,17 +151,6 @@ func TestGetVersionInfo_IncompletePath(t *testing.T) { assert.Empty(t, fullpath) } -func newTestJob() Job { - return Job{ - Name: "test-job", - Source: "/home/user/docs/", - Target: "/backup/user/docs/", - Delete: true, - Enabled: true, - Exclusions: []string{"*.tmp"}, - } -} - func TestNewSharedCommand(t *testing.T) { mockExec := NewMockExec(t) cmd := NewSharedCommand(rsyncPath, "/logs/base", mockExec, io.Discard) @@ -173,7 +163,7 @@ func TestNewSharedCommand(t *testing.T) { func TestJobLogPath(t *testing.T) { cmd := NewSharedCommand(rsyncPath, "/logs/sync-2025", nil, io.Discard) - job := newTestJob() + job := testutil.NewTestJob() logPath := cmd.JobLogPath(job) @@ -195,7 +185,7 @@ func TestListCommand_Run_ReturnsSuccess(t *testing.T) { var buf bytes.Buffer cmd := NewListCommand(rsyncPath, mockExec, &buf) - job := newTestJob() + job := testutil.NewTestJob() status := cmd.Run(job) @@ -219,7 +209,7 @@ func TestSyncCommand_Run_Success(t *testing.T) { var buf bytes.Buffer cmd := NewSyncCommand(rsyncPath, "/logs/base", mockExec, &buf) - job := newTestJob() + job := testutil.NewTestJob() mockExec.EXPECT().Execute(rsyncPath, mock.AnythingOfType("[]string")). Return([]byte("sync output"), nil).Once() @@ -234,7 +224,7 @@ func TestSyncCommand_Run_Success(t *testing.T) { func TestSyncCommand_Run_Failure(t *testing.T) { mockExec := NewMockExec(t) cmd := NewSyncCommand(rsyncPath, "/logs/base", mockExec, io.Discard) - job := newTestJob() + job := testutil.NewTestJob() mockExec.EXPECT().Execute(rsyncPath, mock.AnythingOfType("[]string")). Return(nil, errCommandNotFound).Once() @@ -260,7 +250,7 @@ func TestSimulateCommand_Run_Success(t *testing.T) { var buf bytes.Buffer cmd := NewSimulateCommand(rsyncPath, logDir, mockExec, &buf) - job := newTestJob() + job := testutil.NewTestJob() mockExec.EXPECT().Execute(rsyncPath, mock.AnythingOfType("[]string")). Return([]byte("simulated output"), nil).Once() @@ -275,7 +265,7 @@ func TestSimulateCommand_Run_Failure(t *testing.T) { mockExec := NewMockExec(t) logDir := t.TempDir() cmd := NewSimulateCommand(rsyncPath, logDir, mockExec, io.Discard) - job := newTestJob() + job := testutil.NewTestJob() mockExec.EXPECT().Execute(rsyncPath, mock.AnythingOfType("[]string")). Return(nil, errCommandNotFound).Once() @@ -292,7 +282,7 @@ func TestSimulateCommand_Run_LogWriteError(t *testing.T) { // Use a non-existent directory so WriteFile fails cmd := NewSimulateCommand(rsyncPath, "/nonexistent/path", mockExec, &buf) - job := newTestJob() + job := testutil.NewTestJob() mockExec.EXPECT().Execute(rsyncPath, mock.AnythingOfType("[]string")). Return([]byte("output"), nil).Once() diff --git a/backup/internal/testutil/job.go b/backup/internal/testutil/job.go new file mode 100644 index 0000000..7f357a6 --- /dev/null +++ b/backup/internal/testutil/job.go @@ -0,0 +1,30 @@ +package testutil + +import "backup-rsync/backup/internal" + +// TestJobOpt configures a test Job struct. +type TestJobOpt func(*internal.Job) + +// NewTestJob creates a Job with sensible defaults for testing. +// Override individual fields with option functions. +func NewTestJob(opts ...TestJobOpt) internal.Job { + job := internal.Job{ + Name: "test-job", + Source: "/home/user/docs/", + Target: "/backup/user/docs/", + Delete: true, + Enabled: true, + Exclusions: []string{"*.tmp"}, + } + + for _, opt := range opts { + opt(&job) + } + + return job +} + +// WithEnabled sets the job enabled flag. +func WithEnabled(enabled bool) TestJobOpt { + return func(job *internal.Job) { job.Enabled = enabled } +} From c0323859f7096589c05d570c33194741dd626aa0 Mon Sep 17 00:00:00 2001 From: Jaap de Haan <261428+jdehaan@users.noreply.github.com> Date: Sat, 21 Mar 2026 17:22:13 +0000 Subject: [PATCH 5/6] refactor: Table-driven tests (canonical Go pattern) --- .golangci.yml | 2 - backup/cmd/test/commands_test.go | 128 ++++---- backup/internal/test/check_test.go | 197 +++++------- backup/internal/test/config_test.go | 451 +++++++++++----------------- backup/internal/test/job_test.go | 71 +++-- backup/internal/test/rsync_test.go | 205 ++++++------- 6 files changed, 435 insertions(+), 619 deletions(-) diff --git a/.golangci.yml b/.golangci.yml index 3445966..9e7e0f4 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -9,8 +9,6 @@ linters: - errcheck - forbidigo settings: - dupl: - threshold: 200 depguard: rules: main: diff --git a/backup/cmd/test/commands_test.go b/backup/cmd/test/commands_test.go index ea20564..df2262d 100644 --- a/backup/cmd/test/commands_test.go +++ b/backup/cmd/test/commands_test.go @@ -85,6 +85,53 @@ func TestConfigShow_MissingFile(t *testing.T) { assert.Contains(t, err.Error(), "loading config") } +// --- missing config (shared pattern) --- + +func TestMissingConfig(t *testing.T) { + tests := []struct { + name string + args []string + wantErr string + }{ + {"list", []string{"list", "--config", "/nonexistent/config.yaml"}, "loading config"}, + {"run", []string{"run", "--config", "/nonexistent/config.yaml"}, "loading config"}, + {"simulate", []string{"simulate", "--config", "/nonexistent/config.yaml"}, "loading config"}, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + _, err := executeCommand(t, test.args...) + + require.Error(t, err) + assert.Contains(t, err.Error(), test.wantErr) + }) + } +} + +// --- create logger error (shared pattern) --- + +func TestCreateLoggerError(t *testing.T) { + commands := []string{"run", "simulate"} + + for _, command := range commands { + t.Run(command, func(t *testing.T) { + cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). + Source("/home").Target("/backup"). + AddJob("docs", "/home/docs/", "/backup/docs/"). + Build()) + + fs := afero.NewReadOnlyFs(afero.NewMemMapFs()) + + shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} + + _, err := executeCommandWithDeps(t, fs, shell, command, "--config", cfgPath) + + require.Error(t, err) + assert.Contains(t, err.Error(), "creating logger") + }) + } +} + func TestConfigShow_InvalidYAML(t *testing.T) { cfgPath := testutil.WriteConfigFile(t, `{{{invalid yaml`) @@ -128,41 +175,8 @@ func TestConfigValidate_DuplicateJobNames(t *testing.T) { assert.Contains(t, err.Error(), "validating config") } -// --- list --- - -func TestList_MissingConfig(t *testing.T) { - _, err := executeCommand(t, "list", "--config", "/nonexistent/config.yaml") - - require.Error(t, err) - assert.Contains(t, err.Error(), "loading config") -} - // --- run --- -func TestRun_MissingConfig(t *testing.T) { - _, err := executeCommand(t, "run", "--config", "/nonexistent/config.yaml") - - require.Error(t, err) - assert.Contains(t, err.Error(), "loading config") -} - -func TestRun_CreateLoggerError(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). - Source("/home").Target("/backup"). - AddJob("docs", "/home/docs/", "/backup/docs/"). - Build()) - - // Use a read-only filesystem to block log directory creation - fs := afero.NewReadOnlyFs(afero.NewMemMapFs()) - - shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} - - _, err := executeCommandWithDeps(t, fs, shell, "run", "--config", cfgPath) - - require.Error(t, err) - assert.Contains(t, err.Error(), "creating logger") -} - func TestRun_ValidConfig(t *testing.T) { cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). Source("/home").Target("/backup"). @@ -180,30 +194,6 @@ func TestRun_ValidConfig(t *testing.T) { // --- simulate --- -func TestSimulate_MissingConfig(t *testing.T) { - _, err := executeCommand(t, "simulate", "--config", "/nonexistent/config.yaml") - - require.Error(t, err) - assert.Contains(t, err.Error(), "loading config") -} - -func TestSimulate_CreateLoggerError(t *testing.T) { - cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). - Source("/home").Target("/backup"). - AddJob("docs", "/home/docs/", "/backup/docs/"). - Build()) - - // Use a read-only filesystem to block log directory creation - fs := afero.NewReadOnlyFs(afero.NewMemMapFs()) - - shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} - - _, err := executeCommandWithDeps(t, fs, shell, "simulate", "--config", cfgPath) - - require.Error(t, err) - assert.Contains(t, err.Error(), "creating logger") -} - func TestSimulate_ValidConfig(t *testing.T) { cfgPath := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). Source("/home").Target("/backup"). @@ -221,18 +211,22 @@ func TestSimulate_ValidConfig(t *testing.T) { // --- version --- -func TestVersion_InvalidRsyncPath(t *testing.T) { - _, err := executeCommand(t, "version", "--rsync-path", "not-absolute") - - require.Error(t, err) - assert.Contains(t, err.Error(), "getting version info") -} +func TestVersion_ErrorPaths(t *testing.T) { + tests := []struct { + name, rsyncPath string + }{ + {"InvalidRsyncPath", "not-absolute"}, + {"NonExistentRsyncPath", "/nonexistent/rsync"}, + } -func TestVersion_NonExistentRsyncPath(t *testing.T) { - _, err := executeCommand(t, "version", "--rsync-path", "/nonexistent/rsync") + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + _, err := executeCommand(t, "version", "--rsync-path", test.rsyncPath) - require.Error(t, err) - assert.Contains(t, err.Error(), "getting version info") + require.Error(t, err) + assert.Contains(t, err.Error(), "getting version info") + }) + } } // --- check-coverage --- diff --git a/backup/internal/test/check_test.go b/backup/internal/test/check_test.go index 236cee6..d549276 100644 --- a/backup/internal/test/check_test.go +++ b/backup/internal/test/check_test.go @@ -28,72 +28,38 @@ func newSilentChecker(fs afero.Fs) *CoverageChecker { } } -func TestIsExcludedGlobally_PathGloballyExcluded(t *testing.T) { +func TestIsExcludedGlobally(t *testing.T) { sources := []Path{ - { - Path: "/home/data/", - Exclusions: []string{"/projects/P1/", "/media/"}, - }, - { - Path: "/home/user/", - Exclusions: []string{"/cache/", "/npm/"}, - }, + {Path: "/home/data/", Exclusions: []string{"/projects/P1/", "/media/"}}, + {Path: "/home/user/", Exclusions: []string{"/cache/", "/npm/"}}, } - var logBuffer bytes.Buffer - - checker := newTestChecker(nil, &logBuffer) - - path := "/home/data/projects/P1" - expectedLog := "Path '/home/data/projects/P1' is globally excluded by '/projects/P1/' in source '/home/data/'" - - result := checker.IsExcludedGlobally(path, sources) - assert.True(t, result) - assert.Contains(t, logBuffer.String(), expectedLog) -} - -func TestIsExcludedGlobally_PathNotExcluded(t *testing.T) { - sources := []Path{ - { - Path: "/home/data/", - Exclusions: []string{"/projects/P1/", "/media/"}, - }, - { - Path: "/home/user/", - Exclusions: []string{"/cache/", "/npm/"}, - }, + tests := []struct { + name, path, wantLog string + want bool + }{ + {"PathGloballyExcluded", "/home/data/projects/P1", + "globally excluded by '/projects/P1/' in source '/home/data/'", true}, + {"PathNotExcluded", "/home/data/projects/Other", "", false}, + {"PathExcludedInAnotherSource", "/home/user/cache", + "globally excluded by '/cache/' in source '/home/user/'", true}, } - checker := newSilentChecker(nil) - - path := "/home/data/projects/Other" - - result := checker.IsExcludedGlobally(path, sources) - assert.False(t, result) -} - -func TestIsExcludedGlobally_PathExcludedInAnotherSource(t *testing.T) { - sources := []Path{ - { - Path: "/home/data/", - Exclusions: []string{"/projects/P1/", "/media/"}, - }, - { - Path: "/home/user/", - Exclusions: []string{"/cache/", "/npm/"}, - }, - } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + var logBuf bytes.Buffer - var logBuffer bytes.Buffer + checker := newTestChecker(nil, &logBuf) - checker := newTestChecker(nil, &logBuffer) + result := checker.IsExcludedGlobally(test.path, sources) - path := "/home/user/cache" - expectedLog := "Path '/home/user/cache' is globally excluded by '/cache/' in source '/home/user/'" + assert.Equal(t, test.want, result) - result := checker.IsExcludedGlobally(path, sources) - assert.True(t, result) - assert.Contains(t, logBuffer.String(), expectedLog) + if test.wantLog != "" { + assert.Contains(t, logBuf.String(), test.wantLog) + } + }) + } } func runListUncoveredPathsTest( @@ -128,87 +94,62 @@ func runListUncoveredPathsTest( assert.ElementsMatch(t, expectedUncoveredPaths, uncoveredPaths) } -// Variation: all paths used. -func TestListUncoveredPathsVariationsAllCovered(t *testing.T) { - runListUncoveredPathsTest(t, - map[string][]string{ - "/var/log": {"app1", "app2"}, - "/tmp": {"cache", "temp"}, - }, - Config{ - Sources: []Path{ - {Path: "/var/log"}, - {Path: "/tmp"}, - }, - Jobs: []Job{ - {Name: "Job1", Source: "/var/log"}, - {Name: "Job2", Source: "/tmp"}, +func TestListUncoveredPathsVariations(t *testing.T) { + tests := []struct { + name string + fakeFS map[string][]string + cfg Config + wantPaths []string + }{ + { + name: "AllCovered", + fakeFS: map[string][]string{"/var/log": {"app1", "app2"}, "/tmp": {"cache", "temp"}}, + cfg: Config{ + Sources: []Path{{Path: "/var/log"}, {Path: "/tmp"}}, + Jobs: []Job{{Name: "Job1", Source: "/var/log"}, {Name: "Job2", Source: "/tmp"}}, }, }, - []string{}, - ) -} - -// Variation: one source covered, one uncovered. -func TestListUncoveredPathsVariationsOneCoveredOneUncovered(t *testing.T) { - runListUncoveredPathsTest(t, - map[string][]string{ - "/home/data": {"projects", "media"}, - "/home/user": {"cache", "npm"}, - "/home/user/cache": {}, - "/home/user/npm": {}, - }, - Config{ - Sources: []Path{ - {Path: "/home/data"}, - {Path: "/home/user"}, + { + name: "OneCoveredOneUncovered", + fakeFS: map[string][]string{ + "/home/data": {"projects", "media"}, "/home/user": {"cache", "npm"}, + "/home/user/cache": {}, "/home/user/npm": {}, }, - Jobs: []Job{ - {Name: "Job1", Source: "/home/data"}, + cfg: Config{ + Sources: []Path{{Path: "/home/data"}, {Path: "/home/user"}}, + Jobs: []Job{{Name: "Job1", Source: "/home/data"}}, }, + wantPaths: []string{"/home/user"}, }, - []string{"/home/user"}, - ) -} - -// Variation: one source covered, one uncovered but excluded. -func TestListUncoveredPathsVariationsUncoveredExcluded(t *testing.T) { - runListUncoveredPathsTest(t, - map[string][]string{ - "/home/data": {"projects", "media"}, - }, - Config{ - Sources: []Path{ - {Path: "/home/data", Exclusions: []string{"media"}}, - }, - Jobs: []Job{ - {Name: "Job1", Source: "/home/data/projects"}, + { + name: "UncoveredExcluded", + fakeFS: map[string][]string{"/home/data": {"projects", "media"}}, + cfg: Config{ + Sources: []Path{{Path: "/home/data", Exclusions: []string{"media"}}}, + Jobs: []Job{{Name: "Job1", Source: "/home/data/projects"}}, }, }, - []string{}, - ) -} - -// Variation: one source covered, subfolders covered. -func TestListUncoveredPathsVariationsSubfoldersCovered(t *testing.T) { - runListUncoveredPathsTest(t, - map[string][]string{ - "/home/data": {"family"}, - "/home/data/family": {"me", "you"}, - "/home/data/family/me": {"a"}, - "/home/data/family/you": {"a"}, - }, - Config{ - Sources: []Path{ - {Path: "/home/data"}, + { + name: "SubfoldersCovered", + fakeFS: map[string][]string{ + "/home/data": {"family"}, "/home/data/family": {"me", "you"}, + "/home/data/family/me": {"a"}, "/home/data/family/you": {"a"}, }, - Jobs: []Job{ - {Name: "JobMe", Source: "/home/data/family/me"}, - {Name: "JobYou", Source: "/home/data/family/you"}, + cfg: Config{ + Sources: []Path{{Path: "/home/data"}}, + Jobs: []Job{ + {Name: "JobMe", Source: "/home/data/family/me"}, + {Name: "JobYou", Source: "/home/data/family/you"}, + }, }, }, - []string{}, - ) + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + runListUncoveredPathsTest(t, test.fakeFS, test.cfg, test.wantPaths) + }) + } } func TestListUncoveredPathsVariationsSubfoldersPartiallyCovered(t *testing.T) { diff --git a/backup/internal/test/config_test.go b/backup/internal/test/config_test.go index cee781b..670c58e 100644 --- a/backup/internal/test/config_test.go +++ b/backup/internal/test/config_test.go @@ -79,70 +79,62 @@ jobs: } } -func TestYAMLUnmarshalingDefaults_FieldsOmitted(t *testing.T) { - yamlData := ` +func TestYAMLUnmarshalingDefaults(t *testing.T) { + tests := []struct { + name string + yaml string + expected Job + }{ + { + name: "FieldsOmitted", + yaml: ` name: "test_job" source: "/source" target: "/target" -` - expected := Job{ - Name: "test_job", - Source: "/source", - Target: "/target", - Delete: true, - Enabled: true, - } - - var job Job - - err := yaml.Unmarshal([]byte(yamlData), &job) - require.NoError(t, err) - assert.Equal(t, expected, job) -} - -func TestYAMLUnmarshalingDefaults_ExplicitFalseValues(t *testing.T) { - yamlData := ` +`, + expected: Job{ + Name: "test_job", Source: "/source", Target: "/target", + Delete: true, Enabled: true, + }, + }, + { + name: "ExplicitFalseValues", + yaml: ` name: "test_job" source: "/source" target: "/target" delete: false enabled: false -` - expected := Job{ - Name: "test_job", - Source: "/source", - Target: "/target", - Delete: false, - Enabled: false, - } - - var job Job - - err := yaml.Unmarshal([]byte(yamlData), &job) - require.NoError(t, err) - assert.Equal(t, expected, job) -} - -func TestYAMLUnmarshalingDefaults_MixedValues(t *testing.T) { - yamlData := ` +`, + expected: Job{ + Name: "test_job", Source: "/source", Target: "/target", + Delete: false, Enabled: false, + }, + }, + { + name: "MixedValues", + yaml: ` name: "test_job" source: "/source" target: "/target" delete: false -` - expected := Job{ - Name: "test_job", - Source: "/source", - Target: "/target", - Delete: false, - Enabled: true, // default +`, + expected: Job{ + Name: "test_job", Source: "/source", Target: "/target", + Delete: false, Enabled: true, + }, + }, } - var job Job + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + var job Job - err := yaml.Unmarshal([]byte(yamlData), &job) - require.NoError(t, err) - assert.Equal(t, expected, job) + err := yaml.Unmarshal([]byte(test.yaml), &job) + require.NoError(t, err) + assert.Equal(t, test.expected, job) + }) + } } func TestSubstituteVariables(t *testing.T) { @@ -156,170 +148,119 @@ func TestSubstituteVariables(t *testing.T) { assert.Equal(t, expected, result, "SubstituteVariables result mismatch") } -func TestValidateJobNames_ValidJobNames(t *testing.T) { - jobs := []Job{ - {Name: "job1"}, - {Name: "job2"}, - } - - err := ValidateJobNames(jobs) - assert.NoError(t, err) -} - -func TestValidateJobNames_DuplicateJobNames(t *testing.T) { - jobs := []Job{ - {Name: "job1"}, - {Name: "job1"}, - } - - err := ValidateJobNames(jobs) - require.Error(t, err) - assert.Contains(t, err.Error(), "duplicate job name: job1") -} - -func TestValidateJobNames_InvalidCharactersInJobName(t *testing.T) { - jobs := []Job{ - {Name: "job 1"}, - } - - err := ValidateJobNames(jobs) - require.Error(t, err) - assert.Contains(t, err.Error(), "invalid characters in job name: job 1") -} - -func TestValidateJobNames_MixedErrors(t *testing.T) { - jobs := []Job{ - {Name: "job1"}, - {Name: "job 1"}, - {Name: "job1"}, - } - - err := ValidateJobNames(jobs) - require.Error(t, err) - assert.Contains(t, err.Error(), "duplicate job name: job1") -} - -func TestValidatePath_ValidSourcePath(t *testing.T) { - test := struct { - jobPath string - paths []Path - pathType string +func TestValidateJobNames(t *testing.T) { + tests := []struct { + name string + jobs []Job + wantErr string }{ - jobPath: "/home/user/documents", - paths: []Path{{Path: "/home/user"}}, - pathType: "source", + {"ValidJobNames", []Job{{Name: "job1"}, {Name: "job2"}}, ""}, + {"DuplicateJobNames", []Job{{Name: "job1"}, {Name: "job1"}}, "duplicate job name: job1"}, + {"InvalidCharacters", []Job{{Name: "job 1"}}, "invalid characters in job name: job 1"}, + {"MixedErrors", []Job{{Name: "job1"}, {Name: "job 1"}, {Name: "job1"}}, "duplicate job name: job1"}, } - err := ValidatePath(test.jobPath, test.paths, test.pathType, "job1") - - assert.NoError(t, err) -} - -func TestValidatePath_InvalidSourcePath(t *testing.T) { - test := struct { - jobPath string - paths []Path - pathType string - }{ - jobPath: "/invalid/source", - paths: []Path{{Path: "/home/user"}}, - pathType: "source", + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + err := ValidateJobNames(test.jobs) + + if test.wantErr != "" { + require.Error(t, err) + assert.Contains(t, err.Error(), test.wantErr) + } else { + assert.NoError(t, err) + } + }) } - - err := ValidatePath(test.jobPath, test.paths, test.pathType, "job1") - - require.Error(t, err) - assert.EqualError(t, err, "invalid path for job 'job1': source /invalid/source") } -func TestValidatePath_ValidTargetPath(t *testing.T) { - test := struct { +func TestValidatePath(t *testing.T) { + tests := []struct { + name string jobPath string paths []Path pathType string + wantErr string }{ - jobPath: "/mnt/backup/documents", - paths: []Path{{Path: "/mnt/backup"}}, - pathType: "target", + { + name: "ValidSourcePath", + jobPath: "/home/user/documents", + paths: []Path{{Path: "/home/user"}}, + pathType: "source", + }, + { + name: "InvalidSourcePath", + jobPath: "/invalid/source", + paths: []Path{{Path: "/home/user"}}, + pathType: "source", + wantErr: "invalid path for job 'job1': source /invalid/source", + }, + { + name: "ValidTargetPath", + jobPath: "/mnt/backup/documents", + paths: []Path{{Path: "/mnt/backup"}}, + pathType: "target", + }, + { + name: "InvalidTargetPath", + jobPath: "/invalid/target", + paths: []Path{{Path: "/mnt/backup"}}, + pathType: "target", + wantErr: "invalid path for job 'job1': target /invalid/target", + }, } - err := ValidatePath(test.jobPath, test.paths, test.pathType, "job1") - - assert.NoError(t, err) -} - -func TestValidatePath_InvalidTargetPath(t *testing.T) { - test := struct { - jobPath string - paths []Path - pathType string - }{ - jobPath: "/invalid/target", - paths: []Path{{Path: "/mnt/backup"}}, - pathType: "target", + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + err := ValidatePath(test.jobPath, test.paths, test.pathType, "job1") + + if test.wantErr != "" { + require.Error(t, err) + assert.EqualError(t, err, test.wantErr) + } else { + assert.NoError(t, err) + } + }) } - - err := ValidatePath(test.jobPath, test.paths, test.pathType, "job1") - - require.Error(t, err) - assert.EqualError(t, err, "invalid path for job 'job1': target /invalid/target") } -func TestValidatePaths_ValidPaths(t *testing.T) { - test := struct { - name string - cfg Config - expectsError bool +func TestValidatePaths(t *testing.T) { + tests := []struct { + name string + cfg Config + wantErr string }{ - name: "Valid paths", - cfg: Config{ - Sources: []Path{ - {Path: "/home/user"}, - }, - Targets: []Path{ - {Path: "/mnt/backup"}, - }, - Jobs: []Job{ - {Name: "job1", Source: "/home/user/documents", Target: "/mnt/backup/documents"}, + { + name: "ValidPaths", + cfg: Config{ + Sources: []Path{{Path: "/home/user"}}, + Targets: []Path{{Path: "/mnt/backup"}}, + Jobs: []Job{{Name: "job1", Source: "/home/user/documents", Target: "/mnt/backup/documents"}}, }, }, - } - - t.Run(test.name, func(t *testing.T) { - err := ValidatePaths(test.cfg) - assert.NoError(t, err) - }) -} - -func TestValidatePaths_InvalidPaths(t *testing.T) { - test := struct { - name string - cfg Config - expectsError bool - errorMessage string - }{ - name: "Invalid paths", - cfg: Config{ - Sources: []Path{ - {Path: "/home/user"}, - }, - Targets: []Path{ - {Path: "/mnt/backup"}, - }, - Jobs: []Job{ - {Name: "job1", Source: "/invalid/source", Target: "/invalid/target"}, + { + name: "InvalidPaths", + cfg: Config{ + Sources: []Path{{Path: "/home/user"}}, + Targets: []Path{{Path: "/mnt/backup"}}, + Jobs: []Job{{Name: "job1", Source: "/invalid/source", Target: "/invalid/target"}}, }, + wantErr: "path validation failed", }, - errorMessage: "path validation failed: [" + - "invalid path for job 'job1': source /invalid/source " + - "invalid path for job 'job1': target /invalid/target]", } - t.Run(test.name, func(t *testing.T) { - err := ValidatePaths(test.cfg) - require.Error(t, err) - assert.EqualError(t, err, test.errorMessage) - }) + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + err := ValidatePaths(test.cfg) + + if test.wantErr != "" { + require.Error(t, err) + assert.Contains(t, err.Error(), test.wantErr) + } else { + assert.NoError(t, err) + } + }) + } } func TestConfigString_ValidConfig(t *testing.T) { @@ -364,91 +305,63 @@ func TestResolveConfig(t *testing.T) { assert.Equal(t, "/backup/user/Pictures", resolvedCfg.Jobs[1].Target) } -func TestLoadResolvedConfig_FileNotFound(t *testing.T) { - _, err := LoadResolvedConfig("/nonexistent/path/config.yaml") - require.Error(t, err) - assert.Contains(t, err.Error(), "failed to open config") -} - -func TestLoadResolvedConfig_InvalidYAML(t *testing.T) { - path := testutil.WriteConfigFile(t, "{{invalid yaml") - - _, err := LoadResolvedConfig(path) - require.Error(t, err) - assert.Contains(t, err.Error(), "failed to parse YAML") -} - -func TestLoadResolvedConfig_DuplicateJobNames(t *testing.T) { - path := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). - Source("/src").Target("/tgt"). - AddJob("dup", "/src/a", "/tgt/a"). - AddJob("dup", "/src/b", "/tgt/b"). - Build()) - - _, err := LoadResolvedConfig(path) - require.Error(t, err) - assert.Contains(t, err.Error(), "job validation failed") - assert.Contains(t, err.Error(), "duplicate job name: dup") -} - -func TestLoadResolvedConfig_InvalidSourcePath(t *testing.T) { - path := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). - Source("/home").Target("/backup"). - AddJob("job1", "/invalid/source", "/backup/stuff"). - Build()) - - _, err := LoadResolvedConfig(path) - require.Error(t, err) - assert.Contains(t, err.Error(), "path validation failed") -} - -func TestLoadResolvedConfig_OverlappingSourcePaths(t *testing.T) { - path := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). - Source("/home").Target("/backup"). - AddJob("parent", "/home/user", "/backup/user"). - AddJob("child", "/home/user/docs", "/backup/docs"). - Build()) +func TestLoadResolvedConfig(t *testing.T) { + tests := []struct { + name, config, wantErr, wantTarget string + wantJobs int + }{ + {name: "FileNotFound", wantErr: "failed to open config"}, + {name: "InvalidYAML", config: "{{invalid yaml", wantErr: "failed to parse YAML"}, + {name: "DuplicateJobNames", wantErr: "duplicate job name: dup", + config: testutil.NewConfigBuilder().Source("/src").Target("/tgt"). + AddJob("dup", "/src/a", "/tgt/a").AddJob("dup", "/src/b", "/tgt/b").Build()}, + {name: "InvalidSourcePath", wantErr: "path validation failed", + config: testutil.NewConfigBuilder().Source("/home").Target("/backup"). + AddJob("job1", "/invalid/source", "/backup/stuff").Build()}, + {name: "OverlappingSourcePaths", wantErr: "job source path validation failed", + config: testutil.NewConfigBuilder().Source("/home").Target("/backup"). + AddJob("parent", "/home/user", "/backup/user"). + AddJob("child", "/home/user/docs", "/backup/docs").Build()}, + {name: "OverlappingAllowedByExclusion", wantJobs: 2, + config: testutil.NewConfigBuilder().Source("/home").Target("/backup"). + AddJob("parent", "/home/user", "/backup/user", testutil.Exclusions("docs")). + AddJob("child", "/home/user/docs", "/backup/docs").Build()}, + {name: "OverlappingTargetPaths", wantErr: "job target path validation failed", + config: testutil.NewConfigBuilder().Source("/home").Target("/backup"). + AddJob("job1", "/home/docs", "/backup/all"). + AddJob("job2", "/home/photos", "/backup/all/photos").Build()}, + {name: "ValidConfig", wantJobs: 1, wantTarget: "/backup/docs", + config: testutil.NewConfigBuilder().Source("/home").Target("/backup"). + Variable("base", "/backup").AddJob("docs", "/home/docs", "${base}/docs").Build()}, + } - _, err := LoadResolvedConfig(path) - require.Error(t, err) - assert.Contains(t, err.Error(), "job source path validation failed") -} + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + path := "/nonexistent/path/config.yaml" + if test.config != "" { + path = testutil.WriteConfigFile(t, test.config) + } -func TestLoadResolvedConfig_OverlappingSourcePathsAllowedByExclusion(t *testing.T) { - path := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). - Source("/home").Target("/backup"). - AddJob("parent", "/home/user", "/backup/user", testutil.Exclusions("docs")). - AddJob("child", "/home/user/docs", "/backup/docs"). - Build()) + cfg, err := LoadResolvedConfig(path) - cfg, err := LoadResolvedConfig(path) - require.NoError(t, err) - assert.Len(t, cfg.Jobs, 2) -} + if test.wantErr != "" { + require.Error(t, err) + assert.Contains(t, err.Error(), test.wantErr) -func TestLoadResolvedConfig_OverlappingTargetPaths(t *testing.T) { - path := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). - Source("/home").Target("/backup"). - AddJob("job1", "/home/docs", "/backup/all"). - AddJob("job2", "/home/photos", "/backup/all/photos"). - Build()) + return + } - _, err := LoadResolvedConfig(path) - require.Error(t, err) - assert.Contains(t, err.Error(), "job target path validation failed") -} + require.NoError(t, err) -func TestLoadResolvedConfig_ValidConfig(t *testing.T) { - path := testutil.WriteConfigFile(t, testutil.NewConfigBuilder(). - Source("/home").Target("/backup"). - Variable("base", "/backup"). - AddJob("docs", "/home/docs", "${base}/docs"). - Build()) + if test.wantJobs > 0 { + assert.Len(t, cfg.Jobs, test.wantJobs) + } - cfg, err := LoadResolvedConfig(path) - require.NoError(t, err) - assert.Len(t, cfg.Jobs, 1) - assert.Equal(t, "/backup/docs", cfg.Jobs[0].Target) + if test.wantTarget != "" { + assert.Equal(t, test.wantTarget, cfg.Jobs[0].Target) + } + }) + } } func TestConfigApply_VersionInfoSuccess(t *testing.T) { diff --git a/backup/internal/test/job_test.go b/backup/internal/test/job_test.go index 5cb851d..2c1a403 100644 --- a/backup/internal/test/job_test.go +++ b/backup/internal/test/job_test.go @@ -10,42 +10,49 @@ import ( "gopkg.in/yaml.v3" ) -func TestApply_DisabledJob_ReturnsSkippedAndRunIsNotCalled(t *testing.T) { - mockJobCommand := NewMockJobCommand(t) - - disabledJob := testutil.NewTestJob(testutil.WithEnabled(false)) - - // No expectations set - Run should NOT be called for disabled jobs - - status := disabledJob.Apply(mockJobCommand) - - assert.Equal(t, Skipped, status) -} - -func TestApply_JobFailing_RunIsCalledAndReturnsFailure(t *testing.T) { - mockJobCommand := NewMockJobCommand(t) - - job := testutil.NewTestJob() - - // Set expectation that Run will be called and return Failure - mockJobCommand.EXPECT().Run(job).Return(Failure).Once() - - status := job.Apply(mockJobCommand) - - assert.Equal(t, Failure, status) -} - -func TestApply_JobSucceeds_RunIsCalledAndReturnsSuccess(t *testing.T) { - mockJobCommand := NewMockJobCommand(t) +func TestApply(t *testing.T) { + tests := []struct { + name string + enabled bool + mockReturn JobStatus + wantStatus JobStatus + expectRun bool + }{ + { + name: "DisabledJob_ReturnsSkipped", + enabled: false, + wantStatus: Skipped, + }, + { + name: "JobFailing_ReturnsFailure", + enabled: true, + mockReturn: Failure, + wantStatus: Failure, + expectRun: true, + }, + { + name: "JobSucceeds_ReturnsSuccess", + enabled: true, + mockReturn: Success, + wantStatus: Success, + expectRun: true, + }, + } - job := testutil.NewTestJob() + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + mockJobCommand := NewMockJobCommand(t) + job := testutil.NewTestJob(testutil.WithEnabled(test.enabled)) - // Set expectation that Run will be called and return Success - mockJobCommand.EXPECT().Run(job).Return(Success).Once() + if test.expectRun { + mockJobCommand.EXPECT().Run(job).Return(test.mockReturn).Once() + } - status := job.Apply(mockJobCommand) + status := job.Apply(mockJobCommand) - assert.Equal(t, Success, status) + assert.Equal(t, test.wantStatus, status) + }) + } } func TestUnmarshalYAML_InvalidNode(t *testing.T) { diff --git a/backup/internal/test/rsync_test.go b/backup/internal/test/rsync_test.go index d2b7f67..19f7a60 100644 --- a/backup/internal/test/rsync_test.go +++ b/backup/internal/test/rsync_test.go @@ -19,136 +19,99 @@ var errCommandNotFound = errors.New("command not found") const rsyncPath = "/usr/bin/rsync" func TestArgumentsForJob(t *testing.T) { - job := Job{ - Delete: true, - Source: "/home/user/Music/", - Target: "/target/user/music/home", - Exclusions: []string{"*.tmp", "node_modules/"}, + tests := []struct { + name string + job Job + logPath string + simulate bool + wantArgs []string + }{ + { + name: "SimulateWithDelete", + job: Job{ + Delete: true, Source: "/home/user/Music/", Target: "/target/user/music/home", + Exclusions: []string{"*.tmp", "node_modules/"}, + }, + simulate: true, + wantArgs: []string{ + "--dry-run", "-aiv", "--stats", "--delete", + "--exclude=*.tmp", "--exclude=node_modules/", + "/home/user/Music/", "/target/user/music/home", + }, + }, + { + name: "RealWithLogPath", + job: Job{ + Delete: false, Source: "/home/user/Documents/", Target: "/backup/user/documents", + Exclusions: []string{"*.log", "temp/"}, + }, + logPath: "/var/log/rsync.log", + wantArgs: []string{ + "-aiv", "--stats", + "--log-file=/var/log/rsync.log", + "--exclude=*.log", "--exclude=temp/", + "/home/user/Documents/", "/backup/user/documents", + }, + }, } - args := ArgumentsForJob(job, "", true) - expectedArgs := []string{ - "--dry-run", "-aiv", "--stats", "--delete", - "--exclude=*.tmp", "--exclude=node_modules/", - "/home/user/Music/", "/target/user/music/home", - } - - assert.Equal(t, strings.Join(expectedArgs, " "), strings.Join(args, " ")) -} - -func TestArgumentsForJob_WithLogPath_(t *testing.T) { - job := Job{ - Delete: false, - Source: "/home/user/Documents/", - Target: "/backup/user/documents", - Exclusions: []string{"*.log", "temp/"}, - } - args := ArgumentsForJob(job, "/var/log/rsync.log", false) - - expectedArgs := []string{ - "-aiv", "--stats", - "--log-file=/var/log/rsync.log", - "--exclude=*.log", "--exclude=temp/", - "/home/user/Documents/", "/backup/user/documents", - } - - assert.Equal(t, strings.Join(expectedArgs, " "), strings.Join(args, " ")) -} - -func TestGetVersionInfo_Success(t *testing.T) { - mockExec := NewMockExec(t) - rsync := SharedCommand{ - BinPath: rsyncPath, - Shell: mockExec, - Output: io.Discard, - } - - // Set expectation for Execute call - mockExec.EXPECT().Execute(rsyncPath, mock.MatchedBy(func(args []string) bool { - return len(args) == 1 && args[0] == RsyncVersionFlag - })).Return([]byte("rsync version 3.2.3 protocol version 31\n"), nil).Once() - - versionInfo, fullpath, err := rsync.GetVersionInfo() - - require.NoError(t, err) - assert.Equal(t, rsyncPath, fullpath) - assert.Equal(t, "rsync version 3.2.3 protocol version 31\n", versionInfo) -} - -func TestGetVersionInfo_CommandError(t *testing.T) { - mockExec := NewMockExec(t) - rsync := SharedCommand{ - BinPath: rsyncPath, - Shell: mockExec, - Output: io.Discard, - } - - // Set expectation for Execute call to return error - mockExec.EXPECT().Execute(rsyncPath, mock.MatchedBy(func(args []string) bool { - return len(args) == 1 && args[0] == RsyncVersionFlag - })).Return(nil, errCommandNotFound).Once() - - versionInfo, fullpath, err := rsync.GetVersionInfo() - - require.Error(t, err) - assert.Empty(t, fullpath) - assert.Empty(t, versionInfo) -} + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + args := ArgumentsForJob(test.job, test.logPath, test.simulate) -func TestGetVersionInfo_InvalidOutput(t *testing.T) { - mockExec := NewMockExec(t) - rsync := SharedCommand{ - BinPath: rsyncPath, - Shell: mockExec, - Output: io.Discard, + assert.Equal(t, strings.Join(test.wantArgs, " "), strings.Join(args, " ")) + }) } - - // Set expectation for Execute call to return invalid output - mockExec.EXPECT().Execute(rsyncPath, mock.MatchedBy(func(args []string) bool { - return len(args) == 1 && args[0] == RsyncVersionFlag - })).Return([]byte("invalid output"), nil).Once() - - versionInfo, fullpath, err := rsync.GetVersionInfo() - - require.Error(t, err) - assert.Empty(t, fullpath) - assert.Empty(t, versionInfo) } -func TestGetVersionInfo_EmptyPath(t *testing.T) { - mockExec := NewMockExec(t) - rsync := SharedCommand{ - BinPath: "", - Shell: mockExec, - Output: io.Discard, +func TestGetVersionInfo(t *testing.T) { + tests := []struct { + name, binPath, wantVersion, wantPath, wantErr string + mockOutput []byte + mockErr error + }{ + {name: "Success", binPath: rsyncPath, + mockOutput: []byte("rsync version 3.2.3 protocol version 31\n"), + wantVersion: "rsync version 3.2.3 protocol version 31\n", wantPath: rsyncPath}, + {name: "CommandError", binPath: rsyncPath, + mockErr: errCommandNotFound, wantErr: "error fetching rsync version"}, + {name: "InvalidOutput", binPath: rsyncPath, + mockOutput: []byte("invalid output"), wantErr: "invalid rsync version output"}, + {name: "EmptyPath", + wantErr: `rsync path must be an absolute path: ""`}, + {name: "IncompletePath", binPath: "bin/rsync", + wantErr: `rsync path must be an absolute path: "bin/rsync"`}, } - // No expectations set - should fail before calling Execute due to path validation - - versionInfo, fullpath, err := rsync.GetVersionInfo() - - require.Error(t, err) - require.EqualError(t, err, "rsync path must be an absolute path: \"\"") - assert.Empty(t, versionInfo) - assert.Empty(t, fullpath) -} - -func TestGetVersionInfo_IncompletePath(t *testing.T) { - mockExec := NewMockExec(t) - rsync := SharedCommand{ - BinPath: "bin/rsync", - Shell: mockExec, - Output: io.Discard, + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + mockExec := NewMockExec(t) + rsync := SharedCommand{ + BinPath: test.binPath, + Shell: mockExec, + Output: io.Discard, + } + + if strings.HasPrefix(test.binPath, "/") { + mockExec.EXPECT().Execute(rsyncPath, mock.MatchedBy(func(args []string) bool { + return len(args) == 1 && args[0] == RsyncVersionFlag + })).Return(test.mockOutput, test.mockErr).Once() + } + + versionInfo, fullpath, err := rsync.GetVersionInfo() + + if test.wantErr != "" { + require.Error(t, err) + assert.Contains(t, err.Error(), test.wantErr) + assert.Empty(t, versionInfo) + assert.Empty(t, fullpath) + } else { + require.NoError(t, err) + assert.Equal(t, test.wantPath, fullpath) + assert.Equal(t, test.wantVersion, versionInfo) + } + }) } - - // No expectations set - should fail before calling Execute due to path validation - - versionInfo, fullpath, err := rsync.GetVersionInfo() - - require.Error(t, err) - require.EqualError(t, err, "rsync path must be an absolute path: \"bin/rsync\"") - assert.Empty(t, versionInfo) - assert.Empty(t, fullpath) } func TestNewSharedCommand(t *testing.T) { From 03c9a98112a4a4c48e7ce852c58e3e3f4d4a56d4 Mon Sep 17 00:00:00 2001 From: Jaap de Haan <261428+jdehaan@users.noreply.github.com> Date: Sat, 21 Mar 2026 17:40:27 +0000 Subject: [PATCH 6/6] docs: A more abstract testing guide --- TESTING_GUIDE.md | 244 +++++++++++------------------------------------ 1 file changed, 54 insertions(+), 190 deletions(-) diff --git a/TESTING_GUIDE.md b/TESTING_GUIDE.md index 74b549a..17762f3 100644 --- a/TESTING_GUIDE.md +++ b/TESTING_GUIDE.md @@ -1,227 +1,91 @@ # Testing Guide -## Overview - -All tests use dependency injection — no global state mutation. Key patterns: - -- **`testify`** for assertions (`require` for fatal checks, `assert` for non-fatal) -- **`mockery`** for generated mocks (`MockExec`, `MockJobCommand`) -- **`afero`** for in-memory filesystem abstraction -- **`bytes.Buffer`** for capturing output -- **Table-driven tests** for multiple input scenarios +## Approach + +- **Dependency injection** over global state — every external dependency is injected via an interface or value +- **`testify`** for assertions — `require` (fatal) and `assert` (non-fatal) +- **`mockery`** for generated interface mocks +- **`afero`** for in-memory filesystem abstraction in unit tests +- **Table-driven tests** — canonical Go pattern; use whenever 2+ cases share the same structure +- **Declarative test data builders** in a shared `testutil` package — avoid inline YAML and repeated struct literals - Test files live in `/test/` subdirectories -## Test Architecture - -``` -backup/ - cmd/test/ - commands_test.go # CLI command tests (all commands, stubbed exec) - integration_test.go # Integration tests with real rsync (build tag: integration) - root_test.go # Root command help output - internal/test/ - check_test.go # CoverageChecker tests (afero-based) - config_test.go # Config loading, validation, Apply - helper_test.go # NormalizePath, CreateMainLogger - job_test.go # Job.Apply with MockJobCommand - rsync_test.go # Command constructors, Run methods, GetVersionInfo - mock_exec_test.go # Generated mock for Exec interface - mock_jobcommand_test.go # Generated mock for JobCommand interface -``` - -## Dependency Injection Points - -| Dependency | Interface/Type | Real | Test | -| ----------------- | --------------------- | ----------------------------------------------- | ------------------------ | -| Command execution | `internal.Exec` | `OsExec` | `MockExec` or `stubExec` | -| Job runner | `internal.JobCommand` | `ListCommand`, `SyncCommand`, `SimulateCommand` | `MockJobCommand` | -| Filesystem | `afero.Fs` | `afero.NewOsFs()` | `afero.NewMemMapFs()` | -| Output | `io.Writer` | `os.Stdout` / `cmd.OutOrStdout()` | `bytes.Buffer` | -| Logging | `*log.Logger` | File-backed logger | `log.New(&buf, "", 0)` | -| Time | `time.Time` | `time.Now()` | Fixed `time.Date(...)` | - -## Command-Level Tests (cmd/test/) - -Commands are tested through cobra's `Execute()` with captured stdout: - -```go -// Stub for Exec interface — lightweight alternative to MockExec for cmd tests -type stubExec struct { - output []byte - err error -} - -func (s *stubExec) Execute(_ string, _ ...string) ([]byte, error) { - return s.output, s.err -} - -// Helper: run a command with full dependency injection -func executeCommandWithDeps(t *testing.T, fs afero.Fs, shell internal.Exec, args ...string) (string, error) { - t.Helper() - rootCmd := cmd.BuildRootCommandWithDeps(fs, shell) - var stdout bytes.Buffer - rootCmd.SetOut(&stdout) - rootCmd.SetErr(&bytes.Buffer{}) - rootCmd.SetArgs(args) - err := rootCmd.Execute() - return stdout.String(), err -} -``` +## Dependency Injection -Usage: +All external dependencies are abstracted behind interfaces or injected types. In tests, swap real implementations for mocks, stubs, or in-memory alternatives: -```go -func TestRun_ValidConfig(t *testing.T) { - cfgPath := writeConfigFile(t, `...yaml...`) - shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} +| What | Abstraction | In Tests | +| ----------------- | ---------------- | ---------------------------------------------- | +| Command execution | Interface | Generated mock or lightweight stub | +| Job runner | Interface | Generated mock | +| Filesystem | `afero.Fs` | `afero.NewMemMapFs()` | +| Output | `io.Writer` | `bytes.Buffer` or `io.Discard` | +| Logging | `*log.Logger` | Logger writing to a `bytes.Buffer` | +| Time | `time.Time` | Fixed value via `time.Date(...)` | - stdout, err := executeCommandWithDeps(t, afero.NewMemMapFs(), shell, "run", "--config", cfgPath) +See `internal/exec.go` and `internal/job_command.go` for the interface definitions. See `cmd/root.go` for the builder functions that wire dependencies. - require.NoError(t, err) - assert.Contains(t, stdout, "Job: docs") - assert.Contains(t, stdout, "Status [docs]: SUCCESS") -} -``` +## Test Data Builders -Three builder levels available: +A shared `internal/testutil/` package provides declarative helpers to reduce boilerplate: -- `BuildRootCommand()` — production defaults (real OS filesystem, real exec) -- `BuildRootCommandWithFs(fs)` — custom filesystem, real exec -- `BuildRootCommandWithDeps(fs, shell)` — full control for testing +- **Config builder** — fluent API to generate YAML config strings without raw string literals +- **Config file writer** — writes content to a temp file and returns the path +- **Job builder** — creates a `Job` struct with sensible defaults; override individual fields via functional options -## Internal Tests — Mockery Mocks +See `internal/testutil/*.go` for the full API and available options. -Generated mocks use the expectation pattern: +## Table-Driven Tests -```go -func TestConfigApply_VersionInfoSuccess(t *testing.T) { - mockCmd := NewMockJobCommand(t) - var output bytes.Buffer - logger := log.New(&bytes.Buffer{}, "", 0) +Use table-driven tests whenever multiple cases share the same test structure and differ only in inputs and expectations. Define a slice of test structs, iterate with `t.Run()`. - cfg := Config{ - Jobs: []Job{ - {Name: "job1", Source: "/src/", Target: "/dst/", Enabled: true}, - }, - } +**When NOT to use**: when cases need fundamentally different mock wiring, different assertion logic, or complex per-case setup. If you'd need a `func(...)` closure field in the table struct, keep tests separate. - mockCmd.EXPECT().GetVersionInfo().Return("rsync version 3.2.3", "/usr/bin/rsync", nil).Once() - mockCmd.EXPECT().Run(mock.AnythingOfType("internal.Job")).Return(Success).Once() +Browse the test files for examples — most validation, path-checking, and argument-building tests follow this pattern. - err := cfg.Apply(mockCmd, logger, &output) - require.NoError(t, err) -} -``` +## Command-Level Tests -## CoverageChecker Tests (afero) +CLI commands are tested through cobra's `Execute()` with captured stdout/stderr. Helper functions in the test files wrap the root command builder at different injection levels (default deps, custom filesystem, or full control). -The `CoverageChecker` uses `afero.Fs` so tests never hit the real filesystem: +A lightweight exec stub (implementing the `Exec` interface inline) is used instead of full mocks for command-level tests where only the output matters. -```go -func newTestChecker() (*CoverageChecker, *bytes.Buffer) { - var buf bytes.Buffer - checker := &CoverageChecker{ - Logger: log.New(&buf, "", 0), - Fs: afero.NewMemMapFs(), - } - return checker, &buf -} -``` +## Generated Mocks -## Deterministic Time +Generated mocks (via `mockery`) use the `.EXPECT()` pattern for setting expectations. Each test creates its own mock instance — no shared state between tests. -`CreateMainLogger` accepts `time.Time` for predictable log paths: - -```go -func fixedTime() time.Time { - return time.Date(2025, 6, 15, 14, 30, 45, 0, time.UTC) -} - -func TestCreateMainLogger_DeterministicLogPath(t *testing.T) { - _, logPath, cleanup, err := CreateMainLogger("backup.yaml", true, fixedTime()) - require.NoError(t, err) - defer cleanup() - assert.Equal(t, "logs/sync-2025-06-15T14-30-45-backup-sim", logPath) -} -``` +Mock configuration: `.mockery.yml`. See `MOCKERY_INTEGRATION.md` for regeneration instructions. ## Integration Tests -Integration tests live in `cmd/test/integration_test.go` behind the `//go:build integration` tag. They exercise the full CLI with **real rsync** against temp directories — no mocks or stubs. - -### Build Tag - -```go -//go:build integration -``` - -Tests are excluded from `make test` and `make check-coverage`. Run them separately: +Integration tests are gated behind a build tag (`//go:build integration`). They exercise the full CLI with real rsync against temp directories — no mocks or stubs. ```sh -make test-integration # go test -race -tags=integration ./... -v +make test-integration ``` -### Design Principles - -- **Real rsync** — uses `/usr/bin/rsync` via `BuildRootCommand()` (production defaults) -- **Real filesystem** — creates temp directories via `t.TempDir()`, cleaned up automatically -- **Reproducible** — each test sets up its own isolated source/target directory pair -- **No mocks** — validates actual rsync behavior (file transfer, deletion, exclusions) - -### Scenarios Covered - -| Category | Tests | What's Verified | -| -------------------- | -------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | -| **run — basic** | `BasicSync`, `IdempotentSync`, `PartialChanges`, `EmptySource`, `DeepHierarchy` | Files are synced correctly; re-sync is idempotent; only modified files transfer | -| **run — delete** | `DeleteRemovesExtraFiles`, `NoDeletePreservesExtraFiles` | `--delete` flag removes stale files; omitting it preserves them | -| **run — exclusions** | `Exclusions` | `--exclude` patterns prevent syncing of matching paths | -| **run — jobs** | `DisabledJobSkipped`, `MultipleJobs`, `MixedJobsSummary`, `VariableSubstitution` | Multi-job orchestration, enabled/disabled, `${var}` resolution | -| **simulate** | `NoChanges`, `ShowsChanges`, `SimulateThenRun` | Dry-run produces no side effects; subsequent run works normally | -| **list** | `ShowsCommands` | Prints rsync commands without executing them | -| **check-coverage** | `FullCoverage`, `IncompleteCoverage` | Coverage checker on real directory trees | -| **config** | `ConfigShow`, `ConfigValidate_Valid`, `ConfigValidate_OverlappingSources` | End-to-end config parsing and validation | -| **version** | `Version` | Real rsync version output | - -### Example - -```go -func TestIntegration_Run_BasicSync(t *testing.T) { - src, dst := setupDirs(t) - writeFile(t, filepath.Join(src, "hello.txt"), "hello world") - - cfgPath := writeIntegrationConfig(t, ` -sources: - - path: "`+src+`" -targets: - - path: "`+dst+`" -jobs: - - name: "basic" - source: "`+src+`/" - target: "`+dst+`/" - delete: false -`) - - stdout, err := executeIntegrationCommand(t, "run", "--config", cfgPath) - require.NoError(t, err) - assert.Contains(t, stdout, "Status [basic]: SUCCESS") - assert.Equal(t, "hello world", readFileContent(t, filepath.Join(dst, "hello.txt"))) -} -``` +Design principles: +- Real filesystem via `t.TempDir()`, real rsync via production command builder +- Each test sets up its own isolated directory pair +- Config built using the shared `testutil` builder + +Scenarios covered: sync (basic, idempotent, partial, empty, deep), delete/preserve, exclusions, disabled/multiple jobs, variable substitution, simulate, list, check-coverage, config show/validate, version. ## Running Tests ```sh -make test # go test -race ./... -v (unit tests only) -make test-integration # go test -race -tags=integration ./... -v (includes integration) -make check-coverage # Fail if coverage < threshold (unit tests only) -make report-coverage # Generate HTML coverage report +make test # Unit tests +make test-integration # Integration tests (requires rsync) +make check-coverage # Fail if below threshold +make report-coverage # HTML coverage report ``` ## Key Principles 1. **Inject, don't hardcode** — all external dependencies go through interfaces -2. **Never hit the real filesystem** in unit tests — use `afero.NewMemMapFs()` -3. **Use `require` for errors, `assert` for values** — `require` stops the test on failure -4. **Table-driven tests** for multiple input/output scenarios -5. **Scope mocks to individual tests** — each test creates its own mock instance -6. **Defer cleanup** — `CreateMainLogger` returns a cleanup function; always `defer` it +2. **Never hit the real filesystem** in unit tests — use in-memory filesystem +3. **`require` for errors, `assert` for values** — `require` stops the test on failure +4. **Table-driven tests** for 2+ cases with same structure +5. **Use shared builders** — avoid inline YAML and repeated struct literals +6. **Scope mocks per test** — no shared mock state +7. **Defer cleanup** for resources that return a cleanup function +8. **Keep functions short** — use compact table entries and data-driven fields over closures