diff --git a/cmd/compliance/README.md b/cmd/compliance/README.md new file mode 100644 index 00000000..25708948 --- /dev/null +++ b/cmd/compliance/README.md @@ -0,0 +1,61 @@ +# Compliance + +The `compliance` command provides evidence collection and control evaluation workflows for Identity Security Cloud tenants. + +## Commands + +### Collect evidence + +```shell +sail compliance collect --output evidence.json --period 90 --pretty +``` + +This command gathers governance and security-relevant data into a single evidence bundle. + +### Evaluate controls + +```shell +sail compliance evaluate --input evidence.json --controls nist-800-53 --output findings.json +``` + +This command evaluates the evidence bundle against a control pack and emits findings. + +You can also write a markdown report: + +```shell +sail compliance evaluate --input evidence.json --output findings.json --output-md findings.md +``` + +## Control packs + +The default embedded control pack is `nist-800-53`. + +You can provide a custom control pack path: + +```shell +sail compliance evaluate --input evidence.json --controls ./controls/custom.yaml +``` + +## Output schema + +### Evidence bundle + +The evidence bundle includes: + +- `metadata`: schema and generation metadata +- `data`: raw API payloads by collector +- `summary`: collector success/failure summary + +### Evaluation result + +The evaluation result includes: + +- `metadata`: copied from evidence bundle metadata +- `controls`: per-control and per-check status +- `findings`: failed check findings +- `summary`: roll-up counts including critical/high findings + +## CI behavior + +- `sail compliance collect` writes output even when some collectors fail, and returns non-zero if any collector fails. +- `sail compliance evaluate` writes outputs and returns non-zero when any checks fail. diff --git a/cmd/compliance/collect.go b/cmd/compliance/collect.go new file mode 100644 index 00000000..7af5f156 --- /dev/null +++ b/cmd/compliance/collect.go @@ -0,0 +1,452 @@ +package compliance + +import ( + "context" + _ "embed" + "encoding/json" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "time" + + "github.com/charmbracelet/log" + sailpoint "github.com/sailpoint-oss/golang-sdk/v2" + beta "github.com/sailpoint-oss/golang-sdk/v2/api_beta" + api_v2024 "github.com/sailpoint-oss/golang-sdk/v2/api_v2024" + v3 "github.com/sailpoint-oss/golang-sdk/v2/api_v3" + "github.com/sailpoint-oss/sailpoint-cli/internal/client" + "github.com/sailpoint-oss/sailpoint-cli/internal/config" + "github.com/sailpoint-oss/sailpoint-cli/internal/util" + "github.com/spf13/cobra" +) + +//go:embed collect.md +var collectHelp string + +type collectorContext struct { + apiClient *sailpoint.APIClient + rawClient client.Client + period int +} + +func newCollectCommand() *cobra.Command { + help := util.ParseHelp(collectHelp) + + var outputFile string + var periodDays int + var pretty bool + + cmd := &cobra.Command{ + Use: "collect", + Short: "Collect tenant evidence into a compliance bundle", + Long: help.Long, + Example: help.Example, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + if periodDays < 1 { + return fmt.Errorf("period must be greater than 0") + } + + if err := config.InitConfig(); err != nil { + return err + } + + apiClient, err := config.InitAPIClient(false) + if err != nil { + return err + } + + cfg, err := config.GetConfig() + if err != nil { + return err + } + + metadataTenant := config.GetTenantUrl() + if strings.TrimSpace(metadataTenant) == "" { + metadataTenant = config.GetActiveEnvironment() + } + + bundle := EvidenceBundle{ + Metadata: Metadata{ + SchemaVersion: "1.0.0", + GeneratedAt: time.Now().UTC(), + SailCLIVersion: sailVersion(cmd), + PeriodDays: periodDays, + Tenant: metadataTenant, + }, + Data: EvidenceData{ + Events: EventSummary{PeriodDays: periodDays}, + }, + Summary: CollectionSummary{ + Errors: []string{}, + }, + } + + collectors := []struct { + name string + run func(context.Context, *collectorContext, *EvidenceBundle) error + }{ + {name: "auth_org_config", run: collectAuthOrgConfig}, + {name: "password_policies", run: collectPasswordPolicies}, + {name: "sod_policies", run: collectSODPolicies}, + {name: "certifications", run: collectCertifications}, + {name: "identities", run: collectIdentities}, + {name: "roles", run: collectRoles}, + {name: "access_profiles", run: collectAccessProfiles}, + {name: "sources", run: collectSources}, + {name: "lifecycle_states", run: collectLifecycleStates}, + {name: "workflows", run: collectWorkflows}, + {name: "governance_groups", run: collectGovernanceGroups}, + {name: "provisioning_events", run: collectProvisioningEvents}, + {name: "password_events", run: collectPasswordEvents}, + } + + bundle.Summary.TotalCollectors = len(collectors) + + ctx := context.Background() + state := &collectorContext{ + apiClient: apiClient, + rawClient: client.NewSpClient(cfg), + period: periodDays, + } + + for _, collector := range collectors { + log.Info("Running compliance collector", "collector", collector.name) + err := collector.run(ctx, state, &bundle) + if err != nil { + bundle.Summary.Failed++ + message := fmt.Sprintf("%s: %v", collector.name, err) + bundle.Summary.Errors = append(bundle.Summary.Errors, message) + log.Error("Collector failed", "collector", collector.name, "error", err) + continue + } + bundle.Summary.Succeeded++ + } + + if len(bundle.Summary.Errors) == 0 { + bundle.Summary.Errors = nil + } + + if err := writeJSONOutput(outputFile, bundle, pretty); err != nil { + return err + } + + log.Info("Compliance evidence bundle written", "output", outputFile) + + if bundle.Summary.Failed > 0 { + return fmt.Errorf("collection completed with %d failed collectors", bundle.Summary.Failed) + } + + return nil + }, + } + + cmd.Flags().StringVarP(&outputFile, "output", "o", "evidence.json", "Output file path for evidence bundle") + cmd.Flags().IntVarP(&periodDays, "period", "p", 90, "Lookback period in days for event collectors") + cmd.Flags().BoolVar(&pretty, "pretty", false, "Pretty-print output JSON") + + return cmd +} + +func collectAuthOrgConfig(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + resp, err := state.rawClient.Get(ctx, "/v3/auth-org", map[string]string{"Accept": "application/json"}) + if err == nil { + defer resp.Body.Close() + body, readErr := io.ReadAll(resp.Body) + if readErr == nil && resp.StatusCode >= 200 && resp.StatusCode < 300 { + bundle.Data.AuthOrgConfig = json.RawMessage(body) + return nil + } + } + + objects, exportErr := exportSPConfigObjects(ctx, state.apiClient, "AUTH_ORG") + if exportErr != nil { + if err != nil { + return fmt.Errorf("raw auth-org request failed (%v); fallback export failed (%w)", err, exportErr) + } + return fmt.Errorf("fallback export failed: %w", exportErr) + } + if len(objects) == 0 { + return fmt.Errorf("auth org export returned no objects") + } + if len(objects) > 1 { + log.Warn("AUTH_ORG export returned multiple objects, selecting first", "count", len(objects)) + } + + raw, err := marshalRawMessage(objects[0]) + if err != nil { + return err + } + bundle.Data.AuthOrgConfig = raw + return nil +} + +func collectPasswordPolicies(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + results, _, err := sailpoint.PaginateWithDefaults[beta.PasswordPolicyV3Dto](state.apiClient.Beta.PasswordPoliciesAPI.ListPasswordPolicies(ctx)) + if err != nil { + return err + } + raw, err := marshalRawMessage(results) + if err != nil { + return err + } + bundle.Data.PasswordPolicies = raw + return nil +} + +func collectSODPolicies(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + results, _, err := sailpoint.PaginateWithDefaults[beta.SodPolicy](state.apiClient.Beta.SODPoliciesAPI.ListSodPolicies(ctx)) + if err != nil { + return err + } + raw, err := marshalRawMessage(results) + if err != nil { + return err + } + bundle.Data.SODPolicies = raw + return nil +} + +func collectCertifications(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + results, _, err := sailpoint.PaginateWithDefaults[beta.CertificationDto](state.apiClient.Beta.CertificationsAPI.ListCertifications(ctx)) + if err != nil { + return err + } + raw, err := marshalRawMessage(results) + if err != nil { + return err + } + bundle.Data.Certifications = raw + return nil +} + +func collectIdentities(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + results, err := searchAll(ctx, state.apiClient, "*", v3.INDEX_IDENTITIES) + if err != nil { + return err + } + raw, err := marshalRawMessage(results) + if err != nil { + return err + } + bundle.Data.Identities = raw + return nil +} + +func collectRoles(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + results, err := searchAll(ctx, state.apiClient, "*", v3.INDEX_ROLES) + if err != nil { + return err + } + raw, err := marshalRawMessage(results) + if err != nil { + return err + } + bundle.Data.Roles = raw + return nil +} + +func collectAccessProfiles(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + results, err := searchAll(ctx, state.apiClient, "*", v3.INDEX_ACCESSPROFILES) + if err != nil { + return err + } + raw, err := marshalRawMessage(results) + if err != nil { + return err + } + bundle.Data.AccessProfiles = raw + return nil +} + +func collectSources(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + results, _, err := sailpoint.PaginateWithDefaults[api_v2024.Source](state.apiClient.V2024.SourcesAPI.ListSources(ctx)) + if err != nil { + return err + } + raw, err := marshalRawMessage(results) + if err != nil { + return err + } + bundle.Data.Sources = raw + return nil +} + +func collectLifecycleStates(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + objects, err := exportSPConfigObjects(ctx, state.apiClient, "LIFECYCLE_STATE") + if err != nil { + return err + } + raw, err := marshalRawMessage(objects) + if err != nil { + return err + } + bundle.Data.LifecycleStates = raw + return nil +} + +func collectWorkflows(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + results, _, err := sailpoint.PaginateWithDefaults[beta.Workflow](state.apiClient.Beta.WorkflowsAPI.ListWorkflows(ctx)) + if err != nil { + return err + } + raw, err := marshalRawMessage(results) + if err != nil { + return err + } + bundle.Data.Workflows = raw + return nil +} + +func collectGovernanceGroups(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + results, _, err := sailpoint.PaginateWithDefaults[api_v2024.WorkgroupDto](state.apiClient.V2024.GovernanceGroupsAPI.ListWorkgroups(ctx)) + if err != nil { + return err + } + raw, err := marshalRawMessage(results) + if err != nil { + return err + } + bundle.Data.GovernanceGroups = raw + return nil +} + +func collectProvisioningEvents(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + query := fmt.Sprintf("(type:provisioning AND created:[now-%dd TO now])", state.period) + results, err := searchAll(ctx, state.apiClient, query, v3.INDEX_EVENTS) + if err != nil { + return err + } + bundle.Data.Events.ProvisioningCount = len(results) + return nil +} + +func collectPasswordEvents(ctx context.Context, state *collectorContext, bundle *EvidenceBundle) error { + query := fmt.Sprintf("(type:PASSWORD_ACTION AND created:[now-%dd TO now])", state.period) + results, err := searchAll(ctx, state.apiClient, query, v3.INDEX_EVENTS) + if err != nil { + return err + } + bundle.Data.Events.PasswordCount = len(results) + return nil +} + +func exportSPConfigObjects(ctx context.Context, apiClient *sailpoint.APIClient, includeType string) ([]map[string]interface{}, error) { + description := fmt.Sprintf("compliance collect %s", includeType) + job, _, err := apiClient.Beta.SPConfigAPI.ExportSpConfig(ctx).ExportPayload(beta.ExportPayload{ + Description: &description, + IncludeTypes: []string{includeType}, + }).Execute() + if err != nil { + return nil, err + } + + for attempt := 0; attempt < 90; attempt++ { + status, _, err := apiClient.Beta.SPConfigAPI.GetSpConfigExportStatus(ctx, job.JobId).Execute() + if err != nil { + return nil, err + } + + switch status.Status { + case "NOT_STARTED", "IN_PROGRESS": + time.Sleep(2 * time.Second) + continue + case "COMPLETE": + exported, _, err := apiClient.Beta.SPConfigAPI.GetSpConfigExport(ctx, job.JobId).Execute() + if err != nil { + return nil, err + } + objects := make([]map[string]interface{}, 0, len(exported.Objects)) + for _, obj := range exported.Objects { + if obj.Self != nil && obj.Self.Type != nil && *obj.Self.Type != includeType { + continue + } + if obj.Object != nil { + objects = append(objects, obj.Object) + } + } + return objects, nil + case "FAILED": + return nil, fmt.Errorf("spconfig export failed for %s", includeType) + case "CANCELLED": + return nil, fmt.Errorf("spconfig export cancelled for %s", includeType) + default: + return nil, fmt.Errorf("unexpected spconfig export status for %s: %s", includeType, status.Status) + } + } + + return nil, fmt.Errorf("timed out waiting for spconfig export for %s", includeType) +} + +func searchAll(ctx context.Context, apiClient *sailpoint.APIClient, query string, index v3.Index) ([]map[string]interface{}, error) { + search := v3.NewSearch() + search.SetIndices([]v3.Index{index}) + queryObj := v3.NewQuery() + queryObj.SetQuery(query) + search.SetQuery(*queryObj) + + const limit int32 = 250 + var offset int32 + results := make([]map[string]interface{}, 0) + + for { + page, resp, err := apiClient.V3.SearchAPI.SearchPost(ctx).Search(*search).Limit(limit).Offset(offset).Execute() + if err != nil { + if resp != nil { + return nil, fmt.Errorf("search failed for index %s query %q: %s: %w", index, query, resp.Status, err) + } + return nil, fmt.Errorf("search failed for index %s query %q: %w", index, query, err) + } + + results = append(results, page...) + if len(page) < int(limit) { + break + } + offset += limit + } + + return results, nil +} + +func marshalRawMessage(value interface{}) (json.RawMessage, error) { + payload, err := json.Marshal(value) + if err != nil { + return nil, err + } + return json.RawMessage(payload), nil +} + +func writeJSONOutput(outputPath string, value interface{}, pretty bool) error { + dir := filepath.Dir(outputPath) + if dir != "." && dir != "" { + if err := os.MkdirAll(dir, 0o755); err != nil { + return err + } + } + + var payload []byte + var err error + if pretty { + payload, err = json.MarshalIndent(value, "", " ") + } else { + payload, err = json.Marshal(value) + } + if err != nil { + return err + } + + return os.WriteFile(outputPath, payload, 0o644) +} + +func sailVersion(cmd *cobra.Command) string { + root := cmd.Root() + if root == nil { + return "unknown" + } + if strings.TrimSpace(root.Version) == "" { + return "unknown" + } + return root.Version +} diff --git a/cmd/compliance/collect.md b/cmd/compliance/collect.md new file mode 100644 index 00000000..1eeda106 --- /dev/null +++ b/cmd/compliance/collect.md @@ -0,0 +1,15 @@ +==Long== +# Collect + +Collect compliance-relevant SailPoint tenant evidence and write it to a single JSON evidence bundle. + +The command attempts all collectors, records failures in the summary, writes output, and returns a non-zero exit code when any collector fails. + +==== + +==Example== +```bash +sail compliance collect --output evidence.json --period 90 +sail compliance collect -o artifacts/evidence.json -p 30 --pretty +``` +==== diff --git a/cmd/compliance/compliance.go b/cmd/compliance/compliance.go new file mode 100644 index 00000000..5d577564 --- /dev/null +++ b/cmd/compliance/compliance.go @@ -0,0 +1,32 @@ +package compliance + +import ( + _ "embed" + + "github.com/sailpoint-oss/sailpoint-cli/internal/util" + "github.com/spf13/cobra" +) + +//go:embed compliance.md +var complianceHelp string + +func NewComplianceCommand() *cobra.Command { + help := util.ParseHelp(complianceHelp) + + cmd := &cobra.Command{ + Use: "compliance", + Short: "Collect compliance evidence and evaluate security controls", + Long: help.Long, + Example: help.Example, + Run: func(cmd *cobra.Command, args []string) { + cmd.Help() + }, + } + + cmd.AddCommand( + newCollectCommand(), + newEvaluateCommand(), + ) + + return cmd +} diff --git a/cmd/compliance/compliance.md b/cmd/compliance/compliance.md new file mode 100644 index 00000000..af457af7 --- /dev/null +++ b/cmd/compliance/compliance.md @@ -0,0 +1,13 @@ +==Long== +# Compliance + +Collect compliance evidence and evaluate against security control frameworks. + +==== + +==Example== +```bash +sail compliance collect --output evidence.json --period 90 +sail compliance evaluate --input evidence.json --controls nist-800-53 +``` +==== diff --git a/cmd/compliance/compliance_test.go b/cmd/compliance/compliance_test.go new file mode 100644 index 00000000..7e868173 --- /dev/null +++ b/cmd/compliance/compliance_test.go @@ -0,0 +1,141 @@ +package compliance + +import ( + "bytes" + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" + "time" +) + +func TestNewComplianceCommand(t *testing.T) { + cmd := NewComplianceCommand() + + subcommands := map[string]bool{} + for _, subcommand := range cmd.Commands() { + subcommands[subcommand.Name()] = true + } + + if !subcommands["collect"] { + t.Fatalf("expected collect subcommand") + } + if !subcommands["evaluate"] { + t.Fatalf("expected evaluate subcommand") + } + + buf := new(bytes.Buffer) + cmd.SetOut(buf) + cmd.SetErr(buf) + cmd.SetArgs([]string{}) + if err := cmd.Execute(); err != nil { + t.Fatalf("expected command to execute without args: %v", err) + } +} + +func TestEvaluateWithEmbeddedControls(t *testing.T) { + tmp := t.TempDir() + inputPath := filepath.Join(tmp, "evidence.json") + outputPath := filepath.Join(tmp, "findings.json") + + bundle := EvidenceBundle{ + Metadata: Metadata{ + SchemaVersion: "1.0.0", + GeneratedAt: time.Now().UTC(), + SailCLIVersion: "test", + PeriodDays: 90, + Tenant: "https://example.identitynow.com", + }, + Data: EvidenceData{ + AuthOrgConfig: json.RawMessage(`{"lockoutThreshold": 5, "mfaEnabled": true}`), + PasswordPolicies: json.RawMessage(`[{"minLength": 14}]`), + SODPolicies: json.RawMessage(`[{"id":"sod-1"}]`), + Certifications: json.RawMessage(`[{"id":"cert-1"}]`), + Sources: json.RawMessage(`[{"owner":{"id":"owner-1"},"authoritative":true}]`), + LifecycleStates: json.RawMessage(`[{"technicalName":"terminated"}]`), + Events: EventSummary{ + ProvisioningCount: 2, + PasswordCount: 1, + PeriodDays: 90, + }, + }, + Summary: CollectionSummary{ + TotalCollectors: 13, + Succeeded: 13, + Failed: 0, + }, + } + + data, err := json.Marshal(bundle) + if err != nil { + t.Fatalf("failed to marshal evidence bundle: %v", err) + } + if err := os.WriteFile(inputPath, data, 0o644); err != nil { + t.Fatalf("failed to write evidence fixture: %v", err) + } + + cmd := newEvaluateCommand() + buf := new(bytes.Buffer) + cmd.SetOut(buf) + cmd.SetErr(buf) + cmd.SetArgs([]string{"--input", inputPath, "--output", outputPath}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("expected evaluation to pass, got error: %v\noutput: %s", err, buf.String()) + } + + resultData, err := os.ReadFile(outputPath) + if err != nil { + t.Fatalf("failed to read output file: %v", err) + } + + var result EvaluationResult + if err := json.Unmarshal(resultData, &result); err != nil { + t.Fatalf("failed to parse evaluation output: %v", err) + } + + if len(result.Controls) == 0 { + t.Fatalf("expected at least one control result") + } + if result.Summary.Failed != 0 { + t.Fatalf("expected no failed controls, got %d", result.Summary.Failed) + } + if len(result.Findings) != 0 { + t.Fatalf("expected no findings, got %d", len(result.Findings)) + } +} + +func TestEvaluateMalformedInput(t *testing.T) { + tmp := t.TempDir() + inputPath := filepath.Join(tmp, "bad-evidence.json") + outputPath := filepath.Join(tmp, "findings.json") + + if err := os.WriteFile(inputPath, []byte("{not-valid-json"), 0o644); err != nil { + t.Fatalf("failed to write malformed fixture: %v", err) + } + + cmd := newEvaluateCommand() + cmd.SetArgs([]string{"--input", inputPath, "--output", outputPath}) + + err := cmd.Execute() + if err == nil { + t.Fatalf("expected malformed input to return error") + } + if !strings.Contains(err.Error(), "failed to parse evidence bundle") { + t.Fatalf("unexpected error message: %v", err) + } +} + +func TestEvaluateMissingInputFlag(t *testing.T) { + cmd := newEvaluateCommand() + cmd.SetArgs([]string{"--output", "findings.json"}) + + err := cmd.Execute() + if err == nil { + t.Fatalf("expected missing input flag to return error") + } + if !strings.Contains(err.Error(), "required flag(s) \"input\" not set") { + t.Fatalf("unexpected error for missing input flag: %v", err) + } +} diff --git a/cmd/compliance/controls.go b/cmd/compliance/controls.go new file mode 100644 index 00000000..0cfae4d5 --- /dev/null +++ b/cmd/compliance/controls.go @@ -0,0 +1,494 @@ +package compliance + +import ( + _ "embed" + "encoding/json" + "fmt" + "os" + "regexp" + "strconv" + "strings" + + "github.com/sailpoint-oss/sailpoint-cli/internal/jsonpath" + "gopkg.in/yaml.v2" +) + +//go:embed controls/nist_800_53.yaml +var defaultControlPackYAML []byte + +func loadControlPack(controlsArg string) (ControlPack, string, error) { + var payload []byte + resolved := strings.TrimSpace(controlsArg) + if resolved == "" { + resolved = "nist-800-53" + } + + if isDefaultControlPack(resolved) { + payload = defaultControlPackYAML + resolved = "nist-800-53" + } else { + custom, err := osReadFile(resolved) + if err != nil { + return ControlPack{}, "", fmt.Errorf("failed to read controls file %q: %w", resolved, err) + } + payload = custom + } + + var pack ControlPack + if err := yaml.Unmarshal(payload, &pack); err != nil { + return ControlPack{}, "", fmt.Errorf("failed to parse control pack: %w", err) + } + + if err := validateControlPack(pack); err != nil { + return ControlPack{}, "", err + } + + return pack, resolved, nil +} + +func evaluateControlPack(evidenceRaw []byte, evidence EvidenceBundle, pack ControlPack) EvaluationResult { + result := EvaluationResult{ + Metadata: evidence.Metadata, + Controls: make([]ControlResult, 0, len(pack.Controls)), + Findings: []Finding{}, + } + + for _, control := range pack.Controls { + controlResult := ControlResult{ + ControlID: control.ControlID, + ControlTitle: control.ControlTitle, + Checks: make([]CheckResult, 0, len(control.Checks)), + } + + for _, check := range control.Checks { + checkResult := evaluateCheck(evidenceRaw, check) + controlResult.Checks = append(controlResult.Checks, checkResult) + + if checkResult.Status == "FAIL" { + result.Findings = append(result.Findings, Finding{ + ControlID: control.ControlID, + CheckID: check.CheckID, + Severity: normalizeSeverity(check.Severity), + Title: fmt.Sprintf("%s %s failed", control.ControlID, check.CheckID), + Description: check.Description, + }) + } + } + + controlResult.Status = deriveControlStatus(controlResult.Checks) + result.Controls = append(result.Controls, controlResult) + } + + result.Summary = summarizeEvaluation(result) + return result +} + +func evaluateCheck(evidenceRaw []byte, check CheckDefinition) CheckResult { + severity := normalizeSeverity(check.Severity) + result := CheckResult{ + CheckID: check.CheckID, + Description: check.Description, + Severity: severity, + Expected: check.Expected, + Remediation: check.Remediation, + } + + targetRaw, err := jsonpath.EvaluateJSONPath(evidenceRaw, check.JSONPath) + if err != nil { + result.Status = "NOT_ASSESSED" + result.Actual = err.Error() + return result + } + + var target interface{} + if err := json.Unmarshal(targetRaw, &target); err != nil { + result.Status = "NOT_ASSESSED" + result.Actual = fmt.Sprintf("invalid JSONPath result: %v", err) + return result + } + + pass, actual, err := runRule(target, check) + if actual != nil { + result.Actual = actual + } + if err != nil { + result.Status = "NOT_ASSESSED" + if result.Actual == nil { + result.Actual = err.Error() + } + return result + } + + if pass { + result.Status = "PASS" + } else { + result.Status = "FAIL" + } + + return result +} + +func runRule(target interface{}, check CheckDefinition) (bool, interface{}, error) { + switch strings.ToLower(strings.TrimSpace(check.Rule)) { + case "all_have_field": + items, ok := asSlice(target) + if !ok { + return false, nil, fmt.Errorf("rule all_have_field requires array target") + } + matching := 0 + for _, item := range items { + if _, ok := getFieldValue(item, check.Field); ok { + matching++ + } + } + actual := map[string]interface{}{"total": len(items), "matching": matching} + return len(items) > 0 && matching == len(items), actual, nil + + case "any_match": + items, ok := asSlice(target) + if !ok { + return false, nil, fmt.Errorf("rule any_match requires array target") + } + matching := 0 + for _, item := range items { + value, exists := getFieldValue(item, check.Field) + if exists && valuesEqual(value, check.Expected) { + matching++ + } + } + actual := map[string]interface{}{"total": len(items), "matching": matching} + return matching > 0, actual, nil + + case "count_gte": + expected, ok := toFloat64(check.Expected) + if !ok { + return false, nil, fmt.Errorf("rule count_gte requires numeric expected") + } + count, ok := collectionCount(target) + if !ok { + return false, nil, fmt.Errorf("rule count_gte requires array/object target") + } + return float64(count) >= expected, count, nil + + case "field_exists": + value, ok := getFieldValue(target, check.Field) + if ok { + return true, value, nil + } + return false, nil, nil + + case "field_equals": + value, ok := getFieldValue(target, check.Field) + if !ok { + return false, nil, nil + } + return valuesEqual(value, check.Expected), value, nil + + case "all_field_gte": + expected, ok := toFloat64(check.Expected) + if !ok { + return false, nil, fmt.Errorf("rule all_field_gte requires numeric expected") + } + items, ok := asSlice(target) + if !ok { + return false, nil, fmt.Errorf("rule all_field_gte requires array target") + } + matching := 0 + for _, item := range items { + value, exists := getFieldValue(item, check.Field) + if !exists { + continue + } + numeric, isNumeric := toFloat64(value) + if !isNumeric { + return false, nil, fmt.Errorf("field %q must be numeric for all_field_gte", check.Field) + } + if numeric >= expected { + matching++ + } + } + actual := map[string]interface{}{"total": len(items), "matching": matching} + return len(items) > 0 && matching == len(items), actual, nil + + case "any_field_matches": + if check.Pattern == "" { + return false, nil, fmt.Errorf("rule any_field_matches requires pattern") + } + re, err := regexp.Compile(check.Pattern) + if err != nil { + return false, nil, fmt.Errorf("invalid regex pattern: %w", err) + } + items, ok := asSlice(target) + if !ok { + return false, nil, fmt.Errorf("rule any_field_matches requires array target") + } + matching := 0 + for _, item := range items { + value, exists := getFieldValue(item, check.Field) + if !exists { + continue + } + if re.MatchString(fmt.Sprintf("%v", value)) { + matching++ + } + } + actual := map[string]interface{}{"total": len(items), "matching": matching} + return matching > 0, actual, nil + + case "value_gte": + expected, ok := toFloat64(check.Expected) + if !ok { + return false, nil, fmt.Errorf("rule value_gte requires numeric expected") + } + actual, ok := toFloat64(target) + if !ok { + return false, nil, fmt.Errorf("rule value_gte requires numeric target") + } + return actual >= expected, actual, nil + + default: + return false, nil, fmt.Errorf("unsupported rule %q", check.Rule) + } +} + +func validateControlPack(pack ControlPack) error { + if len(pack.Controls) == 0 { + return fmt.Errorf("control pack has no controls") + } + + for _, control := range pack.Controls { + if strings.TrimSpace(control.ControlID) == "" { + return fmt.Errorf("control is missing control_id") + } + if strings.TrimSpace(control.ControlTitle) == "" { + return fmt.Errorf("control %s is missing control_title", control.ControlID) + } + if len(control.Checks) == 0 { + return fmt.Errorf("control %s has no checks", control.ControlID) + } + for _, check := range control.Checks { + if err := validateCheck(check, control.ControlID); err != nil { + return err + } + } + } + + return nil +} + +func validateCheck(check CheckDefinition, controlID string) error { + if strings.TrimSpace(check.CheckID) == "" { + return fmt.Errorf("control %s has check with missing check_id", controlID) + } + if strings.TrimSpace(check.Description) == "" { + return fmt.Errorf("control %s check %s is missing description", controlID, check.CheckID) + } + if strings.TrimSpace(check.Severity) == "" { + return fmt.Errorf("control %s check %s is missing severity", controlID, check.CheckID) + } + if strings.TrimSpace(check.JSONPath) == "" { + return fmt.Errorf("control %s check %s is missing json_path", controlID, check.CheckID) + } + if strings.TrimSpace(check.Rule) == "" { + return fmt.Errorf("control %s check %s is missing rule", controlID, check.CheckID) + } + + rule := strings.ToLower(strings.TrimSpace(check.Rule)) + requiresField := map[string]bool{ + "all_have_field": true, + "any_match": true, + "field_exists": true, + "field_equals": true, + "all_field_gte": true, + "any_field_matches": true, + } + if requiresField[rule] && strings.TrimSpace(check.Field) == "" { + return fmt.Errorf("control %s check %s rule %s requires field", controlID, check.CheckID, rule) + } + + requiresExpected := map[string]bool{ + "any_match": true, + "count_gte": true, + "field_equals": true, + "all_field_gte": true, + "value_gte": true, + } + if requiresExpected[rule] && check.Expected == nil { + return fmt.Errorf("control %s check %s rule %s requires expected", controlID, check.CheckID, rule) + } + + if rule == "any_field_matches" && strings.TrimSpace(check.Pattern) == "" { + return fmt.Errorf("control %s check %s rule any_field_matches requires pattern", controlID, check.CheckID) + } + + return nil +} + +func deriveControlStatus(checks []CheckResult) string { + if len(checks) == 0 { + return "NOT_ASSESSED" + } + + pass := 0 + fail := 0 + notAssessed := 0 + + for _, check := range checks { + switch check.Status { + case "PASS": + pass++ + case "FAIL": + fail++ + default: + notAssessed++ + } + } + + if pass == len(checks) { + return "PASS" + } + if notAssessed == len(checks) { + return "NOT_ASSESSED" + } + if pass == 0 && fail > 0 { + return "FAIL" + } + return "PARTIAL" +} + +func summarizeEvaluation(result EvaluationResult) EvalSummary { + summary := EvalSummary{TotalControls: len(result.Controls)} + + for _, control := range result.Controls { + switch control.Status { + case "PASS": + summary.Passed++ + case "FAIL", "PARTIAL": + summary.Failed++ + } + } + + for _, finding := range result.Findings { + switch normalizeSeverity(finding.Severity) { + case "critical": + summary.CriticalFindings++ + case "high": + summary.HighFindings++ + } + } + + return summary +} + +func asSlice(value interface{}) ([]interface{}, bool) { + items, ok := value.([]interface{}) + return items, ok +} + +func collectionCount(value interface{}) (int, bool) { + switch typed := value.(type) { + case []interface{}: + return len(typed), true + case map[string]interface{}: + return len(typed), true + default: + return 0, false + } +} + +func getFieldValue(value interface{}, fieldPath string) (interface{}, bool) { + if strings.TrimSpace(fieldPath) == "" { + return nil, false + } + + parts := strings.Split(fieldPath, ".") + current := value + for _, part := range parts { + obj, ok := current.(map[string]interface{}) + if !ok { + return nil, false + } + next, exists := obj[part] + if !exists || next == nil { + return nil, false + } + current = next + } + + if str, ok := current.(string); ok && strings.TrimSpace(str) == "" { + return nil, false + } + + return current, true +} + +func toFloat64(value interface{}) (float64, bool) { + switch typed := value.(type) { + case float64: + return typed, true + case float32: + return float64(typed), true + case int: + return float64(typed), true + case int8: + return float64(typed), true + case int16: + return float64(typed), true + case int32: + return float64(typed), true + case int64: + return float64(typed), true + case uint: + return float64(typed), true + case uint8: + return float64(typed), true + case uint16: + return float64(typed), true + case uint32: + return float64(typed), true + case uint64: + return float64(typed), true + case json.Number: + parsed, err := typed.Float64() + return parsed, err == nil + case string: + parsed, err := strconv.ParseFloat(strings.TrimSpace(typed), 64) + return parsed, err == nil + default: + return 0, false + } +} + +func valuesEqual(actual interface{}, expected interface{}) bool { + if actualNum, ok := toFloat64(actual); ok { + if expectedNum, ok := toFloat64(expected); ok { + return actualNum == expectedNum + } + } + + return fmt.Sprintf("%v", actual) == fmt.Sprintf("%v", expected) +} + +func normalizeSeverity(severity string) string { + normalized := strings.ToLower(strings.TrimSpace(severity)) + switch normalized { + case "critical", "high", "medium", "low", "info": + return normalized + default: + return "medium" + } +} + +func isDefaultControlPack(input string) bool { + switch strings.ToLower(strings.TrimSpace(input)) { + case "nist-800-53", "nist_800_53", "nist80053", "nist-800-53-r5", "default": + return true + default: + return false + } +} + +// osReadFile is wrapped for easy stubbing in tests. +var osReadFile = func(path string) ([]byte, error) { + return os.ReadFile(path) +} diff --git a/cmd/compliance/controls/nist_800_53.yaml b/cmd/compliance/controls/nist_800_53.yaml new file mode 100644 index 00000000..0772ca08 --- /dev/null +++ b/cmd/compliance/controls/nist_800_53.yaml @@ -0,0 +1,83 @@ +controls: + - control_id: AC-2 + control_title: Account Management + checks: + - check_id: ac2-01 + description: "All sources have a designated owner" + severity: high + json_path: "$.data.sources" + rule: "all_have_field" + field: "owner.id" + - check_id: ac2-02 + description: "Authoritative source exists" + severity: high + json_path: "$.data.sources" + rule: "any_match" + field: "authoritative" + expected: true + - control_id: AC-5 + control_title: Separation of Duties + checks: + - check_id: ac5-01 + description: "SOD policies exist and are enforced" + severity: critical + json_path: "$.data.sod_policies" + rule: "count_gte" + expected: 1 + - control_id: AC-7 + control_title: Unsuccessful Logon Attempts + checks: + - check_id: ac7-01 + description: "Account lockout is configured" + severity: high + json_path: "$.data.auth_org_config" + rule: "field_exists" + field: "lockoutThreshold" + - control_id: IA-2(1) + control_title: MFA for Privileged Accounts + checks: + - check_id: ia2_1-01 + description: "MFA is enabled" + severity: critical + json_path: "$.data.auth_org_config" + rule: "field_equals" + field: "mfaEnabled" + expected: true + - control_id: IA-5(1) + control_title: Password-Based Authentication + checks: + - check_id: ia5_1-01 + description: "Password min length >= 12" + severity: high + json_path: "$.data.password_policies" + rule: "all_field_gte" + field: "minLength" + expected: 12 + - control_id: PS-4 + control_title: Personnel Termination + checks: + - check_id: ps4-01 + description: "Leaver/terminated lifecycle state exists" + severity: critical + json_path: "$.data.lifecycle_states" + rule: "any_field_matches" + field: "technicalName" + pattern: "inactive|terminated|leaver" + - control_id: AU-2 + control_title: Event Logging + checks: + - check_id: au2-01 + description: "Provisioning events are being generated" + severity: high + json_path: "$.data.events.provisioning_count" + rule: "value_gte" + expected: 1 + - control_id: AC-6(7) + control_title: Review of User Privileges + checks: + - check_id: ac6_7-01 + description: "Access certifications exist" + severity: critical + json_path: "$.data.certifications" + rule: "count_gte" + expected: 1 diff --git a/cmd/compliance/evaluate.go b/cmd/compliance/evaluate.go new file mode 100644 index 00000000..561944ef --- /dev/null +++ b/cmd/compliance/evaluate.go @@ -0,0 +1,140 @@ +package compliance + +import ( + _ "embed" + "encoding/json" + "fmt" + "os" + "path/filepath" + "strings" + "time" + + "github.com/sailpoint-oss/sailpoint-cli/internal/util" + "github.com/spf13/cobra" +) + +//go:embed evaluate.md +var evaluateHelp string + +func newEvaluateCommand() *cobra.Command { + help := util.ParseHelp(evaluateHelp) + + var inputFile string + var controls string + var outputFile string + var outputMarkdown string + + cmd := &cobra.Command{ + Use: "evaluate", + Short: "Evaluate an evidence bundle against a control pack", + Long: help.Long, + Example: help.Example, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + evidenceRaw, err := os.ReadFile(inputFile) + if err != nil { + return fmt.Errorf("failed to read evidence file %q: %w", inputFile, err) + } + + var evidence EvidenceBundle + if err := json.Unmarshal(evidenceRaw, &evidence); err != nil { + return fmt.Errorf("failed to parse evidence bundle: %w", err) + } + + pack, resolvedControls, err := loadControlPack(controls) + if err != nil { + return err + } + + result := evaluateControlPack(evidenceRaw, evidence, pack) + if result.Metadata.GeneratedAt.IsZero() { + result.Metadata.GeneratedAt = time.Now().UTC() + } + + if err := writeJSONOutput(outputFile, result, true); err != nil { + return err + } + + if strings.TrimSpace(outputMarkdown) != "" { + report := buildMarkdownReport(result, resolvedControls) + if err := writeTextOutput(outputMarkdown, report); err != nil { + return err + } + } + + if result.Summary.Failed > 0 || len(result.Findings) > 0 { + return fmt.Errorf("evaluation failed: %d controls failed/partial, %d findings", result.Summary.Failed, len(result.Findings)) + } + + return nil + }, + } + + cmd.Flags().StringVarP(&inputFile, "input", "i", "", "Path to input evidence JSON") + cmd.Flags().StringVarP(&controls, "controls", "c", "nist-800-53", "Control pack name or path to control YAML") + cmd.Flags().StringVarP(&outputFile, "output", "o", "findings.json", "Path to output findings JSON") + cmd.Flags().StringVar(&outputMarkdown, "output-md", "", "Optional path to output markdown report") + cmd.MarkFlagRequired("input") + + return cmd +} + +func buildMarkdownReport(result EvaluationResult, controlPack string) string { + var b strings.Builder + + b.WriteString("# Compliance Evaluation Report\n\n") + b.WriteString(fmt.Sprintf("- Generated At: `%s`\n", time.Now().UTC().Format(time.RFC3339))) + b.WriteString(fmt.Sprintf("- Tenant: `%s`\n", result.Metadata.Tenant)) + b.WriteString(fmt.Sprintf("- Controls: `%s`\n", controlPack)) + b.WriteString(fmt.Sprintf("- Evidence Generated At: `%s`\n\n", result.Metadata.GeneratedAt.Format(time.RFC3339))) + + b.WriteString("## Summary\n\n") + b.WriteString(fmt.Sprintf("- Total controls: %d\n", result.Summary.TotalControls)) + b.WriteString(fmt.Sprintf("- Passed controls: %d\n", result.Summary.Passed)) + b.WriteString(fmt.Sprintf("- Failed/PARTIAL controls: %d\n", result.Summary.Failed)) + b.WriteString(fmt.Sprintf("- Critical findings: %d\n", result.Summary.CriticalFindings)) + b.WriteString(fmt.Sprintf("- High findings: %d\n\n", result.Summary.HighFindings)) + + b.WriteString("## Controls\n\n") + b.WriteString("| Control ID | Title | Status |\n") + b.WriteString("| --- | --- | --- |\n") + for _, control := range result.Controls { + b.WriteString(fmt.Sprintf("| %s | %s | %s |\n", control.ControlID, control.ControlTitle, control.Status)) + } + b.WriteString("\n") + + b.WriteString("## Findings\n\n") + if len(result.Findings) == 0 { + b.WriteString("No findings.\n") + return b.String() + } + + severityOrder := []string{"critical", "high", "medium", "low", "info"} + for _, severity := range severityOrder { + b.WriteString(fmt.Sprintf("### %s\n\n", strings.Title(severity))) + count := 0 + for _, finding := range result.Findings { + if normalizeSeverity(finding.Severity) != severity { + continue + } + count++ + b.WriteString(fmt.Sprintf("- `%s/%s` %s\n", finding.ControlID, finding.CheckID, finding.Description)) + } + if count == 0 { + b.WriteString("- None\n") + } + b.WriteString("\n") + } + + return b.String() +} + +func writeTextOutput(outputPath string, content string) error { + dir := filepath.Dir(outputPath) + if dir != "." && dir != "" { + if err := os.MkdirAll(dir, 0o755); err != nil { + return err + } + } + return os.WriteFile(outputPath, []byte(content), 0o644) +} diff --git a/cmd/compliance/evaluate.md b/cmd/compliance/evaluate.md new file mode 100644 index 00000000..e5904a47 --- /dev/null +++ b/cmd/compliance/evaluate.md @@ -0,0 +1,17 @@ +==Long== +# Evaluate + +Evaluate an evidence bundle against a control pack and emit findings. + +By default this command uses the embedded NIST 800-53 control pack. You can also pass a custom YAML control pack path. + +The command writes output files first, then returns a non-zero exit code when controls fail. + +==== + +==Example== +```bash +sail compliance evaluate --input evidence.json --controls nist-800-53 +sail compliance evaluate -i evidence.json -c ./custom-controls.yaml -o findings.json --output-md findings.md +``` +==== diff --git a/cmd/compliance/schema.go b/cmd/compliance/schema.go new file mode 100644 index 00000000..ac735722 --- /dev/null +++ b/cmd/compliance/schema.go @@ -0,0 +1,110 @@ +package compliance + +import ( + "encoding/json" + "time" +) + +type EvidenceBundle struct { + Metadata Metadata `json:"metadata"` + Data EvidenceData `json:"data"` + Summary CollectionSummary `json:"summary"` +} + +type Metadata struct { + SchemaVersion string `json:"schema_version"` + GeneratedAt time.Time `json:"generated_at"` + SailCLIVersion string `json:"sail_cli_version"` + PeriodDays int `json:"period_days"` + Tenant string `json:"tenant"` +} + +type EvidenceData struct { + AuthOrgConfig json.RawMessage `json:"auth_org_config,omitempty"` + PasswordPolicies json.RawMessage `json:"password_policies,omitempty"` + SODPolicies json.RawMessage `json:"sod_policies,omitempty"` + Certifications json.RawMessage `json:"certifications,omitempty"` + Identities json.RawMessage `json:"identities,omitempty"` + Roles json.RawMessage `json:"roles,omitempty"` + AccessProfiles json.RawMessage `json:"access_profiles,omitempty"` + Sources json.RawMessage `json:"sources,omitempty"` + LifecycleStates json.RawMessage `json:"lifecycle_states,omitempty"` + Workflows json.RawMessage `json:"workflows,omitempty"` + GovernanceGroups json.RawMessage `json:"governance_groups,omitempty"` + Events EventSummary `json:"events"` +} + +type EventSummary struct { + ProvisioningCount int `json:"provisioning_count"` + PasswordCount int `json:"password_count"` + PeriodDays int `json:"period_days"` +} + +type CollectionSummary struct { + TotalCollectors int `json:"total_collectors"` + Succeeded int `json:"succeeded"` + Failed int `json:"failed"` + Errors []string `json:"errors,omitempty"` +} + +type EvaluationResult struct { + Metadata Metadata `json:"metadata"` + Controls []ControlResult `json:"controls"` + Findings []Finding `json:"findings"` + Summary EvalSummary `json:"summary"` +} + +type ControlResult struct { + ControlID string `json:"control_id"` + ControlTitle string `json:"control_title"` + Status string `json:"status"` + Checks []CheckResult `json:"checks"` +} + +type CheckResult struct { + CheckID string `json:"check_id"` + Description string `json:"description"` + Status string `json:"status"` + Severity string `json:"severity"` + Expected interface{} `json:"expected,omitempty"` + Actual interface{} `json:"actual,omitempty"` + Remediation string `json:"remediation,omitempty"` +} + +type Finding struct { + ControlID string `json:"control_id"` + CheckID string `json:"check_id"` + Severity string `json:"severity"` + Title string `json:"title"` + Description string `json:"description"` +} + +type EvalSummary struct { + TotalControls int `json:"total_controls"` + Passed int `json:"passed"` + Failed int `json:"failed"` + CriticalFindings int `json:"critical_findings"` + HighFindings int `json:"high_findings"` +} + +type ControlPack struct { + Controls []ControlDefinition `yaml:"controls"` +} + +type ControlDefinition struct { + ControlID string `yaml:"control_id"` + ControlTitle string `yaml:"control_title"` + Checks []CheckDefinition `yaml:"checks"` +} + +type CheckDefinition struct { + CheckID string `yaml:"check_id"` + Description string `yaml:"description"` + Severity string `yaml:"severity"` + JSONPath string `yaml:"json_path"` + Rule string `yaml:"rule"` + Field string `yaml:"field,omitempty"` + Expected interface{} `yaml:"expected,omitempty"` + Pattern string `yaml:"pattern,omitempty"` + Remediation string `yaml:"remediation,omitempty"` +} diff --git a/cmd/root/root.go b/cmd/root/root.go index 990318ab..5b2c2d71 100644 --- a/cmd/root/root.go +++ b/cmd/root/root.go @@ -5,6 +5,7 @@ import ( "github.com/sailpoint-oss/sailpoint-cli/cmd/api" "github.com/sailpoint-oss/sailpoint-cli/cmd/cluster" + "github.com/sailpoint-oss/sailpoint-cli/cmd/compliance" "github.com/sailpoint-oss/sailpoint-cli/cmd/connector" "github.com/sailpoint-oss/sailpoint-cli/cmd/environment" "github.com/sailpoint-oss/sailpoint-cli/cmd/jsonpath" @@ -55,6 +56,7 @@ func NewRootCommand() *cobra.Command { root.AddCommand( api.NewAPICommand(), cluster.NewClusterCommand(), + compliance.NewComplianceCommand(), connector.NewConnCmd(t), environment.NewEnvironmentCommand(), jsonpath.NewJSONPathCmd(), diff --git a/cmd/root/root_test.go b/cmd/root/root_test.go index a1f34e86..121ca492 100644 --- a/cmd/root/root_test.go +++ b/cmd/root/root_test.go @@ -13,7 +13,7 @@ import ( // Expected number of subcommands to `sail` root command const ( - numRootSubcommands = 16 + numRootSubcommands = 17 ) func TestNewRootCmd_noArgs(t *testing.T) {