Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions .github/workflows/links.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
---
name: Lychee link checker

on:
workflow_dispatch:
schedule:
# Run weekly on Sundays at 3 AM UTC to catch broken links
- cron: '0 3 * * 0'

permissions:
contents: read

jobs:
link-checker:
name: Lychee link checker
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@v6

- name: Link Checker
id: lychee
uses: lycheeverse/lychee-action@v2.8.0
with:
# Check all Markdown files in the repository
args: --config lychee.toml --verbose --no-progress --max-redirects 10 '**/*.md'
fail: true
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
coverage
node_modules
.directory
.github/copilot-instructions.md
.idea
dist/
/venv/
Expand Down
39 changes: 39 additions & 0 deletions lychee.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# Lychee link checker configuration
# This file configures how lychee validates links in the repository
#
# For more information about lychee configuration, see:
# https://github.com/lycheeverse/lychee#configuration
#
# This configuration addresses false positive errors from the link checker:
# - 403 Forbidden: Some sites (shields.io, qlty.sh) block automated requests
# but work fine in browsers
# - 429 Too Many Requests: Rate limiting doesn't mean the link is broken

# Accept these HTTP status codes as valid
# 200: OK (standard success)
# 204: No Content (valid response with no body)
# 206: Partial Content (valid for range requests)
# 403: Forbidden (some sites block automated requests but links work in browsers)
# 429: Too Many Requests (rate limiting, link may be valid)
accept = [200, 204, 206, 403, 429]

# Timeout for requests (in seconds)
timeout = 30

# Maximum number of retries per link
max_retries = 3

# Maximum number of concurrent network requests
max_concurrency = 8

# User agent string to use for requests
# Some sites require a browser-like user agent to avoid blocking
user_agent = "Mozilla/5.0 (X11; Linux x86_64; rv:100.0) Gecko/20100101 Firefox/100.0"

# Exclude URL patterns from link checking
exclude = [
'^http://127\.0\.0\.',
'^http://localhost',
'^https://localhost',
'^file://',
]