Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 8 additions & 1 deletion .github/workflows/example-slinky.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,15 @@ jobs:
- name: Checkout
uses: actions/checkout@v4

- name: Run Slinky link checker
- name: Run Slinky link checker (Example - failures won't block)
uses: ./
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
fail-on-failures: false # Disable blocking failures for example/demo purposes
comment-pr: true # Enable PR comments to showcase the feature
md-out: results.md
json-out: results.json

- name: Upload results
if: always()
Expand Down
4 changes: 3 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@ COPY . .
RUN CGO_ENABLED=0 go build -o /usr/local/bin/slinky ./

FROM alpine:3.20
RUN apk add --no-cache curl jq ca-certificates
# jq is used in entrypoint.sh for parsing GitHub event JSON
# ca-certificates is needed for HTTPS requests
RUN apk add --no-cache jq ca-certificates
COPY --from=build /usr/local/bin/slinky /usr/local/bin/slinky
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
Expand Down
7 changes: 6 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,20 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
pull-requests: write # Only needed if comment-pr is enabled
steps:
- uses: actions/checkout@v4
- name: Run Slinky
uses: LukeHagar/slinky@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Required for PR comments
with:
targets: "docs/,README.md,**/*.md"
# comment-pr: true # Optional: post results as PR comment (default: true)
```

**Note:** The `GITHUB_TOKEN` is automatically provided by GitHub Actions via `secrets.GITHUB_TOKEN` and is only required for PR comment functionality. Core link checking works without it. If you disable PR comments (`comment-pr: false`), you can remove the `pull-requests: write` permission and the `GITHUB_TOKEN` env variable.

### Inputs

- **targets**: Comma-separated paths and patterns to scan. Can be directories, files, or glob patterns (e.g. `docs/,api-specs/**/*.yaml,README.md`). Default: `**/*`
Expand Down
32 changes: 15 additions & 17 deletions action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,25 +16,25 @@ inputs:
timeout:
description: "HTTP timeout seconds"
required: false
respect_gitignore:
respect-gitignore:
description: "Respect .gitignore while scanning"
required: false
json_out:
json-out:
description: "Optional path to write JSON results"
required: false
md_out:
md-out:
description: "Optional path to write Markdown report for PR comment"
required: false
repo_blob_base:
repo-blob-base:
description: "Override GitHub blob base URL (https://github.com/<owner>/<repo>/blob/<sha>)"
required: false
fail_on_failures:
fail-on-failures:
description: "Fail the job if any links fail"
required: false
comment_pr:
description: "If running on a PR, post a comment with the report"
comment-pr:
description: "If running on a PR, post a comment with the report. Default: true (enabled when GITHUB_TOKEN is present)"
required: false
step_summary:
step-summary:
description: "Append the report to the GitHub Step Summary"
required: false

Expand All @@ -46,18 +46,16 @@ runs:
INPUT_TARGETS: ${{ inputs.targets }}
INPUT_CONCURRENCY: ${{ inputs.concurrency }}
INPUT_TIMEOUT: ${{ inputs.timeout }}
INPUT_RESPECT_GITIGNORE: ${{ inputs.respect_gitignore }}
INPUT_JSON_OUT: ${{ inputs.json_out }}
INPUT_MD_OUT: ${{ inputs.md_out }}
INPUT_REPO_BLOB_BASE: ${{ inputs.repo_blob_base }}
INPUT_FAIL_ON_FAILURES: ${{ inputs.fail_on_failures }}
INPUT_COMMENT_PR: ${{ inputs.comment_pr }}
INPUT_STEP_SUMMARY: ${{ inputs.step_summary }}
INPUT_RESPECT_GITIGNORE: ${{ inputs.respect-gitignore }}
INPUT_JSON_OUT: ${{ inputs.json-out }}
INPUT_MD_OUT: ${{ inputs.md-out }}
INPUT_REPO_BLOB_BASE: ${{ inputs.repo-blob-base }}
INPUT_FAIL_ON_FAILURES: ${{ inputs.fail-on-failures }}
INPUT_COMMENT_PR: ${{ inputs.comment-pr }}
INPUT_STEP_SUMMARY: ${{ inputs.step-summary }}

outputs:
json_path:
description: "Path to JSON results file"
md_path:
description: "Path to Markdown report file"


128 changes: 114 additions & 14 deletions cmd/check.go
Original file line number Diff line number Diff line change
Expand Up @@ -158,9 +158,51 @@ func init() {
fmt.Printf("::debug:: Root: %s\n", displayRoot)
}

// Validate and clamp numeric inputs
if maxConcurrency < 1 {
maxConcurrency = 1
} else if maxConcurrency > 100 {
maxConcurrency = 100
}
if timeoutSeconds < 1 {
timeoutSeconds = 1
} else if timeoutSeconds > 300 {
timeoutSeconds = 300 // Max 5 minutes
}

// Build config
timeout := time.Duration(timeoutSeconds) * time.Second
cfg := web.Config{MaxConcurrency: maxConcurrency, RequestTimeout: timeout}

// Set up URL cache if cache path is provided via environment variable
var urlCache *web.URLCache
if cachePath := os.Getenv("SLINKY_CACHE_PATH"); cachePath != "" {
cacheTTL := 24 // Default 24 hours
if ttlStr := os.Getenv("SLINKY_CACHE_TTL_HOURS"); ttlStr != "" {
if ttl, err := time.ParseDuration(ttlStr + "h"); err == nil && ttl > 0 {
cacheTTL = int(ttl.Hours())
}
}
urlCache = web.NewURLCache(cachePath, cacheTTL)
if err := urlCache.Load(); err != nil {
if shouldDebug() {
fmt.Printf("::debug:: Failed to load cache: %v\n", err)
}
}
// Save cache when done
defer func() {
if err := urlCache.Save(); err != nil {
if shouldDebug() {
fmt.Printf("::debug:: Failed to save cache: %v\n", err)
}
}
}()
}

cfg := web.Config{
MaxConcurrency: maxConcurrency,
RequestTimeout: timeout,
Cache: urlCache,
}

// Prepare URL list
var urls []string
Expand Down Expand Up @@ -275,9 +317,19 @@ func init() {
}

// If running on a PR, post or update the comment(s), chunking as needed
if ghOK && strings.TrimSpace(finalMDPath) != "" {
// PR comments are enabled by default when token is present
// Only disable if explicitly set to "false"
commentPR := true // Default: enabled
if val := os.Getenv("INPUT_COMMENT_PR"); val != "" {
// Explicitly check for "false" to disable, everything else enables
commentPR = !strings.EqualFold(strings.TrimSpace(val), "false")
}
// Only post comments if: GitHub PR detected, commenting enabled, and report exists
if ghOK && commentPR && strings.TrimSpace(finalMDPath) != "" {
b, rerr := os.ReadFile(finalMDPath)
if rerr == nil {
if rerr != nil {
fmt.Printf("::warning:: Failed to read markdown report for PR comment: %v\n", rerr)
} else {
full := string(b)
if shouldDebug() {
fmt.Printf("::debug:: Report size (chars): %d\n", len(full))
Expand All @@ -286,7 +338,10 @@ func init() {
if shouldDebug() {
fmt.Printf("::debug:: Posting %d chunk(s)\n", len(chunks))
}
_ = upsertPRComments(ghRepo, ghPR, ghToken, chunks)
if err := upsertPRComments(ghRepo, ghPR, ghToken, chunks); err != nil {
// Non-critical error: log warning but don't fail the run
fmt.Printf("::warning:: Failed to post PR comment: %v\n", err)
}
}
}

Expand Down Expand Up @@ -440,47 +495,92 @@ func chunkMarkdownByURL(body string) []string {
}

// upsertPRComments deletes any existing slinky comments and posts the new chunked comments in order.
// Returns error if critical failures occur, but individual comment failures are logged and ignored.
func upsertPRComments(repo string, prNumber int, token string, chunks []string) error {
apiBase := "https://api.github.com"
listURL := fmt.Sprintf("%s/repos/%s/issues/%d/comments?per_page=100", apiBase, repo, prNumber)
req, _ := http.NewRequest(http.MethodGet, listURL, nil)
req, err := http.NewRequest(http.MethodGet, listURL, nil)
if err != nil {
return fmt.Errorf("failed to create request: %w", err)
}
req.Header.Set("Authorization", "Bearer "+token)
req.Header.Set("Accept", "application/vnd.github+json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return err
return fmt.Errorf("failed to list comments: %w", err)
}
defer resp.Body.Close()

if resp.StatusCode >= 400 {
return fmt.Errorf("failed to list comments: HTTP %d", resp.StatusCode)
}

var comments []struct {
ID int `json:"id"`
Body string `json:"body"`
}
b, _ := io.ReadAll(resp.Body)
_ = json.Unmarshal(b, &comments)
b, err := io.ReadAll(resp.Body)
if err != nil {
return fmt.Errorf("failed to read comments response: %w", err)
}
if err := json.Unmarshal(b, &comments); err != nil {
// Non-critical: continue even if we can't parse existing comments
if shouldDebug() {
fmt.Printf("::debug:: Failed to parse comments: %v\n", err)
}
}

// Delete all existing slinky-report comments to avoid stale entries
for _, c := range comments {
if strings.Contains(c.Body, "<!-- slinky-report -->") {
delURL := fmt.Sprintf("%s/repos/%s/issues/comments/%d", apiBase, repo, c.ID)
dReq, _ := http.NewRequest(http.MethodDelete, delURL, nil)
dReq, err := http.NewRequest(http.MethodDelete, delURL, nil)
if err != nil {
continue // Skip if we can't create request
}
dReq.Header.Set("Authorization", "Bearer "+token)
dReq.Header.Set("Accept", "application/vnd.github+json")
_, _ = http.DefaultClient.Do(dReq)
_, _ = http.DefaultClient.Do(dReq) // Non-critical: ignore delete errors
}
}

// Post new comments in order
for idx, chunk := range chunks {
body := fmt.Sprintf("%s\n%s", "<!-- slinky-report -->", chunk)
postURL := fmt.Sprintf("%s/repos/%s/issues/%d/comments", apiBase, repo, prNumber)
payload, _ := json.Marshal(map[string]string{"body": body})
req, _ = http.NewRequest(http.MethodPost, postURL, bytes.NewReader(payload))
payload, err := json.Marshal(map[string]string{"body": body})
if err != nil {
if shouldDebug() {
fmt.Printf("::debug:: Failed to marshal comment payload: %v\n", err)
}
continue
}
req, err := http.NewRequest(http.MethodPost, postURL, bytes.NewReader(payload))
if err != nil {
if shouldDebug() {
fmt.Printf("::debug:: Failed to create POST request: %v\n", err)
}
continue
}
req.Header.Set("Authorization", "Bearer "+token)
req.Header.Set("Accept", "application/vnd.github+json")
req.Header.Set("Content-Type", "application/json")
res, _ := http.DefaultClient.Do(req)
res, err := http.DefaultClient.Do(req)
if err != nil {
if shouldDebug() {
fmt.Printf("::debug:: Failed to post chunk %d/%d: %v\n", idx+1, len(chunks), err)
}
continue
}
res.Body.Close()
if res.StatusCode >= 400 {
if shouldDebug() {
fmt.Printf("::debug:: Failed to post chunk %d/%d: HTTP %d\n", idx+1, len(chunks), res.StatusCode)
}
continue
}
if shouldDebug() {
fmt.Printf("::debug:: Posted chunk %d/%d: %v\n", idx+1, len(chunks), res)
fmt.Printf("::debug:: Posted chunk %d/%d successfully\n", idx+1, len(chunks))
}
}
return nil
Expand Down
Loading
Loading