diff --git a/.claude-plugin/plugin.json b/.claude-plugin/plugin.json index ea713e1..bf417bf 100644 --- a/.claude-plugin/plugin.json +++ b/.claude-plugin/plugin.json @@ -1,7 +1,7 @@ { "name": "make-no-mistakes", - "version": "1.4.1", - "description": "The disciplined dev lifecycle — implement issues, review PRs, sync releases, test E2E, manage sessions, and stash secrets via OS-native prompts. One plugin to make no mistakes.", + "version": "1.5.0", + "description": "The disciplined dev lifecycle — implement issues, review PRs, sync releases, test E2E, manage sessions, stash secrets, and enforce manifest-driven tool-call hooks. One plugin to make no mistakes.", "author": { "name": "Luis Andres Pena Castillo", "url": "https://github.com/lapc506" diff --git a/.github/workflows/test-hooks.yml b/.github/workflows/test-hooks.yml new file mode 100644 index 0000000..269f3fb --- /dev/null +++ b/.github/workflows/test-hooks.yml @@ -0,0 +1,37 @@ +name: Test hooks + +on: + pull_request: + paths: + - 'hooks/**' + - 'scripts/build-rules.mjs' + - '.github/workflows/test-hooks.yml' + push: + branches: [main] + paths: + - 'hooks/**' + - 'scripts/build-rules.mjs' + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: '22' + + - name: Install dependencies + run: npm ci + + - name: Verify rules.json is in sync with rules.yaml + run: | + npm run build-rules + if ! git diff --exit-code hooks/rules/rules.json; then + echo "::error::hooks/rules/rules.json is stale. Run 'npm run build-rules' and commit the result." + exit 1 + fi + + - name: Run hook smoke tests + run: bash hooks/test-hooks.sh diff --git a/.gitignore b/.gitignore index 4ebe72b..604db9d 100644 --- a/.gitignore +++ b/.gitignore @@ -9,5 +9,7 @@ dist/ slack-config.json # Local-only design history and IP-leak guard inputs. -# Each contributor / fork keeps their own copy here. See README.md. +# Each contributor / fork keeps their own copy here. See README.md and +# hooks/rules/README.md (the latter documents the IP-leak guard format — +# opt-in list of client/org names that must never appear in rules.yaml). .private/ diff --git a/README.md b/README.md index 3f0bb72..64aec19 100644 --- a/README.md +++ b/README.md @@ -217,12 +217,76 @@ make-no-mistakes-toolkit/ ├── agents/ # 2 specialized subagents ├── skills/ # 6 auto-activating skills │ └── */SKILL.md -├── scripts/ # Shared bash utilities +├── hooks/ # Manifest-driven PreToolUse + PostToolUse hooks (v1.5.0+) +│ ├── hooks.json # Claude Code wiring (thin) +│ ├── pre-bash.sh # Bash dispatcher +│ ├── pre-edit.sh # Edit/Write/MultiEdit dispatcher +│ ├── post-slack.sh # Slack message dispatcher (warn-only) +│ ├── test-hooks.sh # Parametrized test runner +│ ├── lib/ # Generic helpers (parse-input, eval-rule) +│ └── rules/ +│ ├── rules.yaml # Rules SSoT — humans edit +│ ├── rules.json # Build artifact — runtime reads +│ └── README.md # Contributor guide +├── scripts/ # Shared bash + node utilities +│ └── build-rules.mjs # rules.yaml -> rules.json compiler ├── package.json └── README.md ``` -**Design principle:** Commands for destructive/token-intensive actions (you decide when). Skills for read-only analysis (context-aware, auto-activate). Agents for heavy orchestration (own context window). +**Design principle:** Commands for destructive/token-intensive actions (you decide when). Skills for read-only analysis (context-aware, auto-activate). Agents for heavy orchestration (own context window). Hooks for deterministic guardrails on every tool call (no human in the loop). + +## Hooks (v1.5.0+) + +When this plugin is enabled, every tool call you make in any repo runs through +the manifest-driven hooks in `hooks/rules/rules.yaml`. The Tier 1 ruleset +ships 10 rules: + +**PreToolUse on `Bash` (block by default):** +- `ssh-db-mutation` — blocks `gcloud compute ssh ... --command="...php -r/mysql/set_config..."` (forces use of versioned scripts) +- `prod-ops-no-approval` — blocks `--project=*-prod` operations without explicit acknowledgement +- `destructive-db-ops` — blocks `supabase db reset|push|repair` and inline `DROP/TRUNCATE/DELETE FROM` +- `manual-edge-fn-deploy` — blocks `supabase functions deploy` (forces CI-only deploys) +- `gcloud-missing-project` — warns when a `gcloud` subcommand is missing `--project=` + +**PreToolUse on `Edit | Write | MultiEdit` (block):** +- `minified-build-output` — blocks writing minified content to `amd/build/*.min.js` or `dist/*.min.{js,css}` +- `secrets-hardcoded` — blocks hardcoded `password|secret|token|api_key|...` patterns in source files (env.example/test/spec/README/fixtures/mocks paths exempted) + +**PostToolUse on Slack messages (warn-only):** +- `slack-unicode-bullets` — warns when `•◦▪▫` bullets are used (use `-` instead) +- `slack-tables-no-codeblock` — warns when markdown tables ship outside ` ``` ` fences (Slack mrkdwn doesn't render bare tables) +- `slack-spanish-tildes` — warns on common Spanish words missing tildes (`migracion` → `migración`, etc.) + +### Bypassing a rule + +Each blocking rule has a `bypass_marker`. Add the literal string +`// hook-bypass: ` (or `# hook-bypass: `) anywhere in the +command or content to acknowledge the rule and proceed: + +```bash +# Bypass markers shipped in Tier 1: +# // hook-bypass: ssh-db-rule +# // hook-bypass: prod-ops +# // hook-bypass: db-destructive +# // hook-bypass: edge-fn-manual +# // hook-bypass: minified-build +# // hook-bypass: secret-leak +``` + +Bypasses are explicit acknowledgements — they sit inside the command/content +itself, not as silent flags. + +### Adding your own rules + +Edit `hooks/rules/rules.yaml`, run `npm run build-rules`, run +`npm run test-hooks` to verify, and commit. See `hooks/rules/README.md` for +the schema and Tier 2 decomposition techniques. + +### Disabling all hooks + +Remove the plugin from `~/.claude/settings.json` `enabledPlugins`, or set +`CLAUDE_DISABLE_PLUGIN_HOOKS=1` in your shell. ## Bilingual Format diff --git a/hooks/hooks.json b/hooks/hooks.json new file mode 100644 index 0000000..63f9466 --- /dev/null +++ b/hooks/hooks.json @@ -0,0 +1,39 @@ +{ + "description": "make-no-mistakes — manifest-driven PreToolUse + PostToolUse hooks. Rules live in hooks/rules/rules.yaml (compiled to rules.json). See hooks/rules/README.md.", + "hooks": { + "PreToolUse": [ + { + "matcher": "Bash", + "hooks": [ + { + "type": "command", + "command": "bash ${CLAUDE_PLUGIN_ROOT}/hooks/pre-bash.sh", + "timeout": 5 + } + ] + }, + { + "matcher": "Edit|Write|MultiEdit", + "hooks": [ + { + "type": "command", + "command": "bash ${CLAUDE_PLUGIN_ROOT}/hooks/pre-edit.sh", + "timeout": 5 + } + ] + } + ], + "PostToolUse": [ + { + "matcher": "mcp__claude_ai_Slack__slack_send_message|mcp__claude_ai_Slack__slack_send_message_draft", + "hooks": [ + { + "type": "command", + "command": "bash ${CLAUDE_PLUGIN_ROOT}/hooks/post-slack.sh", + "timeout": 3 + } + ] + } + ] + } +} diff --git a/hooks/lib/eval-rule.sh b/hooks/lib/eval-rule.sh new file mode 100755 index 0000000..26fa87d --- /dev/null +++ b/hooks/lib/eval-rule.sh @@ -0,0 +1,150 @@ +#!/usr/bin/env bash +# ============================================================================= +# eval-rule.sh — Evaluate a single rule from rules.json against the current +# tool_input (read from stdin in JSON form). +# +# Usage: +# printf '%s' "$TOOL_INPUT_JSON" | bash eval-rule.sh +# +# Exit codes: +# 0 rule did not fire, OR rule fired with action=warn (warning to stderr) +# 2 rule fired with action=block +# +# Behavior: +# 1. Source parse-input.sh to extract INPUT_COMMAND/FILE_PATH/CONTENT/TEXT +# from the tool_input JSON on stdin. +# 2. Look up the rule by id in rules.json. Missing rule = exit 0 (no-op). +# 3. If the rule has a bypass_marker and "// hook-bypass: " or +# "# hook-bypass: " appears anywhere in the raw input, exit 0. +# 4. For each match condition (AND-chain): check pattern (if present) and +# not_pattern (if present) on the chosen field. If any condition fails, +# the rule does not fire — exit 0. +# 5. All conditions held → write the rule message to stderr and exit +# according to action. +# +# Requires: jq, grep (BRE/ERE — POSIX). No yaml parser at runtime. +# ============================================================================= +set -uo pipefail + +# Honor the documented kill switch — defense in depth. The dispatchers also +# check this, but eval-rule.sh may be invoked directly (e.g., from tests or +# future tooling) so we re-check here to guarantee the env var disables ALL +# rule evaluation regardless of entry point. +if [ "${CLAUDE_DISABLE_PLUGIN_HOOKS:-0}" = "1" ]; then + exit 0 +fi + +RULE_ID="${1:?Usage: $0 }" +HOOKS_DIR="$(cd "$(dirname "$0")/.." && pwd)" +RULES_JSON="$HOOKS_DIR/rules/rules.json" + +if [ ! -f "$RULES_JSON" ]; then + # Missing manifest is a configuration error, not the user's fault. Surface it + # quietly so we don't block tool calls because of a build issue. + echo "make-no-mistakes: rules.json not found at $RULES_JSON" >&2 + exit 0 +fi + +if ! command -v jq >/dev/null 2>&1; then + echo "make-no-mistakes: jq not installed; hooks disabled" >&2 + exit 0 +fi + +# Source parse-input.sh from the same lib dir we live in. +LIB_DIR="$(cd "$(dirname "$0")" && pwd)" +# shellcheck source=/dev/null +source "$LIB_DIR/parse-input.sh" + +# Resolve the rule. If not found, no-op. +RULE_JSON="$(jq --arg id "$RULE_ID" '.[] | select(.id == $id)' "$RULES_JSON")" +if [ -z "$RULE_JSON" ]; then + exit 0 +fi + +# Bypass check first — a single explicit acknowledgement short-circuits all +# pattern matching. We accept both "//" and "#" comment styles since rules +# apply to commands (shell, #) and code/content (slash-slash). +# +# `build-rules.mjs` enforces bypass_marker matches ^[a-z0-9-]+$, so the +# value is safe to interpolate directly into an ERE. We re-validate here as +# defense in depth — if the schema gate ever loosens, an attacker rule +# author could otherwise inject ERE special chars (., +, [, etc.) and +# either silently mismatch real bypass comments or trigger a grep error. +BYPASS_MARKER="$(printf '%s' "$RULE_JSON" | jq -r '.bypass_marker // empty')" +if [ -n "$BYPASS_MARKER" ]; then + case "$BYPASS_MARKER" in + *[!a-z0-9-]*) + echo "make-no-mistakes: rule ${RULE_ID} has invalid bypass_marker (must be kebab-case); ignoring bypass." >&2 + ;; + *) + if printf '%s' "$INPUT_RAW" | grep -qE "(//|#)[[:space:]]*hook-bypass:[[:space:]]*${BYPASS_MARKER}\b"; then + exit 0 + fi + ;; + esac +fi + +# Iterate match conditions. ALL must hold for the rule to fire. +N_CONDITIONS="$(printf '%s' "$RULE_JSON" | jq '.match | length')" +i=0 +while [ "$i" -lt "$N_CONDITIONS" ]; do + COND="$(printf '%s' "$RULE_JSON" | jq ".match[$i]")" + FIELD="$(printf '%s' "$COND" | jq -r '.field')" + PATTERN="$(printf '%s' "$COND" | jq -r '.pattern // empty')" + NOT_PATTERN="$(printf '%s' "$COND" | jq -r '.not_pattern // empty')" + FLAGS="$(printf '%s' "$COND" | jq -r '.flags // empty')" + + # Resolve which input variable to inspect. + case "$FIELD" in + command) VALUE="$INPUT_COMMAND" ;; + file_path) VALUE="$INPUT_FILE_PATH" ;; + content) VALUE="$INPUT_CONTENT" ;; + text) VALUE="$INPUT_TEXT" ;; + *) + # Unknown field — treat as condition failure (rule won't fire). + exit 0 + ;; + esac + + # Build grep flags. -E always (we author rules in ERE). -q for silent. + GREP_OPTS="-Eq" + case "$FLAGS" in + *i*) GREP_OPTS="-Eqi" ;; + esac + + # Check positive pattern: if specified, value must match. + if [ -n "$PATTERN" ]; then + if ! printf '%s' "$VALUE" | grep $GREP_OPTS -- "$PATTERN"; then + exit 0 + fi + fi + + # Check negative pattern: if specified, value must NOT match. + if [ -n "$NOT_PATTERN" ]; then + if printf '%s' "$VALUE" | grep $GREP_OPTS -- "$NOT_PATTERN"; then + exit 0 + fi + fi + + i=$((i + 1)) +done + +# All conditions held — rule fires. +ACTION="$(printf '%s' "$RULE_JSON" | jq -r '.action')" +MESSAGE="$(printf '%s' "$RULE_JSON" | jq -r '.message')" + +# Header makes it easy to grep stderr for which rule fired. We deliberately +# do NOT print memory_ref — that field references private personal-memory +# filenames (~/.claude/.../memory/feedback_*.md) that are meaningless to other +# users of this public toolkit. The rule_id is the canonical cross-link. +{ + echo "" + echo "[make-no-mistakes:${RULE_ID}] action=${ACTION}" + printf '%s' "$MESSAGE" +} >&2 + +case "$ACTION" in + block) exit 2 ;; + warn) exit 0 ;; + *) exit 0 ;; +esac diff --git a/hooks/lib/parse-input.sh b/hooks/lib/parse-input.sh new file mode 100755 index 0000000..0ec67eb --- /dev/null +++ b/hooks/lib/parse-input.sh @@ -0,0 +1,44 @@ +#!/usr/bin/env bash +# ============================================================================= +# parse-input.sh — Read tool_input JSON from stdin and export field vars. +# +# Used by hooks/{pre-bash,pre-edit,post-slack}.sh dispatchers and the rule +# evaluator. Extracts the four fields rules can target: +# +# INPUT_RAW — the full JSON payload (lazily kept for bypass scan) +# INPUT_COMMAND — tool_input.command (Bash) +# INPUT_FILE_PATH — tool_input.file_path (Edit, Write, MultiEdit) +# INPUT_CONTENT — tool_input.content +# OR tool_input.new_string (Edit/MultiEdit fall back) +# INPUT_TEXT — tool_input.text (Slack) +# OR tool_input.message (Slack draft) +# OR tool_input.content (Slack canvas) +# +# Designed to be sourced, not executed: +# source "${HOOKS_LIB:-$(dirname "$0")/lib}/parse-input.sh" +# +# Requires `jq`. If jq is missing, fields are left empty rather than failing +# the hook — a missing parser shouldn't block the user. +# ============================================================================= + +# Read full stdin once. Hooks read stdin exactly once per invocation, and we +# need the JSON available for both field extraction and bypass-marker scans. +INPUT_RAW="$(cat)" + +INPUT_COMMAND="" +INPUT_FILE_PATH="" +INPUT_CONTENT="" +INPUT_TEXT="" + +if command -v jq >/dev/null 2>&1; then + INPUT_COMMAND="$(printf '%s' "$INPUT_RAW" \ + | jq -r '.tool_input.command // empty' 2>/dev/null || true)" + INPUT_FILE_PATH="$(printf '%s' "$INPUT_RAW" \ + | jq -r '.tool_input.file_path // .tool_input.path // empty' 2>/dev/null || true)" + INPUT_CONTENT="$(printf '%s' "$INPUT_RAW" \ + | jq -r '.tool_input.content // .tool_input.new_string // empty' 2>/dev/null || true)" + INPUT_TEXT="$(printf '%s' "$INPUT_RAW" \ + | jq -r '.tool_input.text // .tool_input.message // .tool_input.content // empty' 2>/dev/null || true)" +fi + +export INPUT_RAW INPUT_COMMAND INPUT_FILE_PATH INPUT_CONTENT INPUT_TEXT diff --git a/hooks/post-slack.sh b/hooks/post-slack.sh new file mode 100755 index 0000000..9fc55a2 --- /dev/null +++ b/hooks/post-slack.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +# ============================================================================= +# post-slack.sh — PostToolUse dispatcher for Slack message tools. +# +# Same dispatcher pattern, but PostToolUse runs AFTER the tool already +# executed — the message has already been sent. We can only emit warnings +# for the next message, never block the one just sent. Therefore we ignore +# any `block` action returned by eval-rule and always exit 0. All stderr +# warnings still surface to the user. +# +# Rules targeting Slack should usually have `action: warn` in rules.yaml. +# A rule with `action: block` here will print its message but won't change +# the outcome — keep that in mind when authoring rules for `applies_to: [Slack]`. +# ============================================================================= +set -uo pipefail + +if [ "${CLAUDE_DISABLE_PLUGIN_HOOKS:-0}" = "1" ]; then + exit 0 +fi + +HOOKS_DIR="$(cd "$(dirname "$0")" && pwd)" +RULES_JSON="$HOOKS_DIR/rules/rules.json" +EVAL_RULE="$HOOKS_DIR/lib/eval-rule.sh" + +if [ ! -f "$RULES_JSON" ] || ! command -v jq >/dev/null 2>&1; then + exit 0 +fi + +INPUT_RAW="$(cat)" + +while IFS= read -r RULE_ID; do + [ -z "$RULE_ID" ] && continue + # Discard exit code — PostToolUse cannot block a tool call that already ran. + printf '%s' "$INPUT_RAW" | bash "$EVAL_RULE" "$RULE_ID" || true +done < <(jq -r '.[] | select(.applies_to | index("Slack")) | .id' "$RULES_JSON") + +exit 0 diff --git a/hooks/pre-bash.sh b/hooks/pre-bash.sh new file mode 100755 index 0000000..dd62db6 --- /dev/null +++ b/hooks/pre-bash.sh @@ -0,0 +1,44 @@ +#!/usr/bin/env bash +# ============================================================================= +# pre-bash.sh — PreToolUse dispatcher for the Bash tool. +# +# Reads tool_input JSON from stdin once, captures the raw payload, then for +# each rule whose `applies_to` includes "Bash", invokes eval-rule.sh with the +# same payload. Aggregates exit codes: if ANY rule blocks, exit 2; otherwise +# exit 0. All stderr from rules is preserved. +# +# This is a THIN dispatcher — all rule logic lives in hooks/lib/eval-rule.sh +# and the rule definitions live in hooks/rules/rules.yaml (compiled to +# rules.json by scripts/build-rules.mjs). Adding a new Bash rule is editing +# rules.yaml + running `npm run build-rules`, never editing this file. +# ============================================================================= +set -uo pipefail + +# Honor the documented kill switch — CLAUDE_DISABLE_PLUGIN_HOOKS=1 lets a +# user temporarily bypass all manifest rules without uninstalling the plugin. +if [ "${CLAUDE_DISABLE_PLUGIN_HOOKS:-0}" = "1" ]; then + exit 0 +fi + +HOOKS_DIR="$(cd "$(dirname "$0")" && pwd)" +RULES_JSON="$HOOKS_DIR/rules/rules.json" +EVAL_RULE="$HOOKS_DIR/lib/eval-rule.sh" + +if [ ! -f "$RULES_JSON" ] || ! command -v jq >/dev/null 2>&1; then + # Manifest missing or jq absent — fail open. The hook should never block + # the user because of an installation issue with the plugin itself. + exit 0 +fi + +# Capture stdin once. Each rule invocation re-pipes this same payload. +INPUT_RAW="$(cat)" + +EXIT_CODE=0 +while IFS= read -r RULE_ID; do + [ -z "$RULE_ID" ] && continue + if ! printf '%s' "$INPUT_RAW" | bash "$EVAL_RULE" "$RULE_ID"; then + EXIT_CODE=2 + fi +done < <(jq -r '.[] | select(.applies_to | index("Bash")) | .id' "$RULES_JSON") + +exit "$EXIT_CODE" diff --git a/hooks/pre-edit.sh b/hooks/pre-edit.sh new file mode 100755 index 0000000..f2d59a4 --- /dev/null +++ b/hooks/pre-edit.sh @@ -0,0 +1,41 @@ +#!/usr/bin/env bash +# ============================================================================= +# pre-edit.sh — PreToolUse dispatcher for Edit, Write, and MultiEdit tools. +# +# Identical pattern to pre-bash.sh; selects rules whose `applies_to` matches +# any of Edit / Write / MultiEdit. The matcher in hooks.json (Edit|Write| +# MultiEdit) routes all three tool kinds to this single dispatcher. +# ============================================================================= +set -uo pipefail + +if [ "${CLAUDE_DISABLE_PLUGIN_HOOKS:-0}" = "1" ]; then + exit 0 +fi + +HOOKS_DIR="$(cd "$(dirname "$0")" && pwd)" +RULES_JSON="$HOOKS_DIR/rules/rules.json" +EVAL_RULE="$HOOKS_DIR/lib/eval-rule.sh" + +if [ ! -f "$RULES_JSON" ] || ! command -v jq >/dev/null 2>&1; then + exit 0 +fi + +INPUT_RAW="$(cat)" + +EXIT_CODE=0 +while IFS= read -r RULE_ID; do + [ -z "$RULE_ID" ] && continue + if ! printf '%s' "$INPUT_RAW" | bash "$EVAL_RULE" "$RULE_ID"; then + EXIT_CODE=2 + fi +done < <(jq -r ' + .[] + | select( + (.applies_to | index("Edit")) + or (.applies_to | index("Write")) + or (.applies_to | index("MultiEdit")) + ) + | .id +' "$RULES_JSON") + +exit "$EXIT_CODE" diff --git a/hooks/rules/README.md b/hooks/rules/README.md new file mode 100644 index 0000000..215739f --- /dev/null +++ b/hooks/rules/README.md @@ -0,0 +1,114 @@ +# Rules manifest — contributor guide + +This directory holds the **single source of truth** for every rule the +make-no-mistakes hooks enforce. + +- `rules.yaml` — humans edit this. One row per rule, atomic fields. +- `rules.json` — generated build artifact. Runtime (`hooks/lib/eval-rule.sh`) + reads this; never edit by hand. +- `scripts/build-rules.mjs` — `npm run build-rules` regenerates `rules.json` + and validates schema. CI fails if `rules.json` is stale. + +## Schema + +Every rule is one YAML row with these fields: + +```yaml +- id: # required, used to cite the rule + description: # required, shown in stderr on match + applies_to: [Bash | Edit | Write | MultiEdit | Slack] # required, non-empty + match: # required, non-empty array (AND-chain) + - field: command | file_path | content | text + pattern: '' # optional; if present, field MUST match + not_pattern: '' # optional; if present, field must NOT match + flags: i # optional; only "i" (case-insensitive) supported + action: block | warn # required + bypass_marker: | null # optional acknowledgement token + memory_ref: # optional metadata only — NOT printed at runtime + message: | # required, multi-line stderr output + Block / warning text shown when this rule fires. + tests: # required, non-empty array + - name: + input: + tool_input: + command: '...' # OR file_path / content / text + expected_exit: 0 | 2 # 0 = allow / warn; 2 = block + expected_stderr_contains: '' # optional +``` + +### Match semantics + +Conditions in `match` form an **AND-chain**: the rule fires only when ALL +conditions hold. Within a single condition: + +- `pattern` and `not_pattern` operate on the same field. +- If both are present, `pattern` must match AND `not_pattern` must NOT match. +- If only `not_pattern` is present, the condition is "this field must NOT + match this pattern" (no positive requirement). + +To express OR across conditions, write multiple rules with the same effect +(e.g., one rule per `applies_to` tool). To express OR within a single +condition, use ERE alternation in `pattern` (e.g., `(php -r|set_config)`). + +### Bypass markers + +If `bypass_marker: foo-rule`, the rule is skipped when the literal substring +`// hook-bypass: foo-rule` or `# hook-bypass: foo-rule` appears anywhere in +the raw tool_input JSON. Bypasses are explicit acknowledgements — they sit +inside the command/content itself, not as silent flags. + +`bypass_marker: null` (or omitted) means the rule cannot be bypassed. + +## Adding a rule + +1. Edit `rules.yaml` — append a new row following the schema. +2. Run `npm run build-rules` to regenerate `rules.json`. Schema errors + surface at this step (missing fields, invalid action, etc.). +3. Run `npm run test-hooks` to verify your `tests` array passes. +4. Commit both `rules.yaml` AND `rules.json` (CI verifies they're in sync). + +## IP-leak guard (opt-in, gitignored) + +If the build output says `IP-leak guard active`, you have a local +`.private/forbidden-names.txt` (one regex per line) that the build script +checks against the serialized rules. Use this to prevent committing client +or org names in test fixtures, references, or messages. The list lives +gitignored so the toolkit itself never publishes the names. + +Example `.private/forbidden-names.txt`: + +``` +# One regex per line, # for comments +mycompany +client-acme +client-beta +``` + +## Tier 2 — decomposing non-deterministic memories + +Many narrative-style guidelines can be converted to deterministic rules +using these techniques: + +- **T1 Narrative → structural assertion.** "Slack alerts group by project → + milestone → assignee" becomes a rule that parses the message and asserts + the labels appear in that order. +- **T2 Scope boundary → path-based block.** "Don't edit X owner's domain" + becomes a rule with `field: file_path` matching the owner's path glob. +- **T3 Behavioral mandate → Stop-event reminder.** "Never ask 'should I + continue?'" becomes a Stop hook that scans the recent assistant message + for stop-asking patterns and surfaces a reminder for the next turn. +- **T4 Preference → linter on alternative.** "Don't use 'refactor' in PR + titles" becomes a PreToolUse rule on the gh/Linear API that warns when + the title contains the discouraged word. +- **T5 Fall-through judgment → explicit gate.** "Don't mark complementary + issues as blocking" becomes a PreToolUse rule on save_issue that, when + `blockedBy` is set, requires a `blocking_reason:` key in the description. +- **T6 Context-sensitive → structural proxy.** "Filter PostHog from alerts" + becomes a PostToolUse rule that detects alert-shaped messages (3+ issue + refs in same body) and warns when posthog/dashboard/instrumentation + keywords appear. +- **T7 Multi-part → AND-chain of detectors.** Combine path + content + + exception detectors in `match` to express compound rules. + +Use these patterns when proposing new rules — most "soft" guidelines can be +converted to deterministic checks with a structural proxy. diff --git a/hooks/rules/rules.json b/hooks/rules/rules.json new file mode 100644 index 0000000..8a60be8 --- /dev/null +++ b/hooks/rules/rules.json @@ -0,0 +1,543 @@ +[ + { + "id": "ssh-db-mutation", + "description": "Block gcloud compute ssh with inline DB or Moodle config commands", + "applies_to": [ + "Bash" + ], + "match": [ + { + "field": "command", + "pattern": "gcloud[[:space:]]+compute[[:space:]]+ssh.*--command.*(php[[:space:]]+-r|set_config|mdl_|scorm_|mysql[[:space:]]+-|mysqldump|\\$DB->)" + } + ], + "action": "block", + "bypass_marker": "ssh-db-rule", + "memory_ref": "feedback_scripts_not_db.md", + "message": "BLOCKED: gcloud compute ssh + inline DB/Moodle command detected.\n\nUse a versioned script in scripts/ instead. If none fits, write+commit\none first. Bypass: add \"// hook-bypass: ssh-db-rule\" to the command.\n", + "tests": [ + { + "name": "blocks-ssh-php-r", + "input": { + "tool_input": { + "command": "gcloud compute ssh acme-vm --command=\"docker exec moodle php -r 'var_dump(1);'\"" + } + }, + "expected_exit": 2 + }, + { + "name": "blocks-ssh-set-config", + "input": { + "tool_input": { + "command": "gcloud compute ssh acme-vm --command=\"set_config('foo', 'bar', 'moodlecourse')\"" + } + }, + "expected_exit": 2 + }, + { + "name": "blocks-ssh-mysql-shell", + "input": { + "tool_input": { + "command": "gcloud compute ssh acme-vm --command=\"mysql -u root -e 'SELECT 1'\"" + } + }, + "expected_exit": 2 + }, + { + "name": "allows-regular-ssh", + "input": { + "tool_input": { + "command": "gcloud compute ssh acme-vm --command=\"ls /var/log\"" + } + }, + "expected_exit": 0 + }, + { + "name": "allows-bypass-marker", + "input": { + "tool_input": { + "command": "gcloud compute ssh acme-vm --command=\"php -r 'echo 1;'\" # hook-bypass: ssh-db-rule" + } + }, + "expected_exit": 0 + } + ] + }, + { + "id": "gcloud-missing-project", + "description": "Warn when gcloud command is missing --project= flag", + "applies_to": [ + "Bash" + ], + "match": [ + { + "field": "command", + "pattern": "^[[:space:]]*gcloud([[:space:]]+--[a-zA-Z0-9_-]+(=[^[:space:]]+)?)*[[:space:]]+(compute|sql|run|iam|secrets|projects|storage|functions|builds|artifacts|container|auth)[[:space:]]", + "not_pattern": "--project[= ]|gcloud([[:space:]]+--[a-zA-Z0-9_-]+(=[^[:space:]]+)?)*[[:space:]]+(version|help|info|config)[[:space:]]" + } + ], + "action": "warn", + "bypass_marker": null, + "memory_ref": "feedback_gcloud_explicit_flags.md", + "references": [], + "message": "WARNING: gcloud command without explicit --project= flag.\n\nActive config drift can cause cross-project mistakes. Pass --project=\non every gcloud command (and --account= for multi-account setups).\n", + "tests": [ + { + "name": "warns-on-compute-without-project", + "input": { + "tool_input": { + "command": "gcloud compute instances list" + } + }, + "expected_exit": 0, + "expected_stderr_contains": "gcloud-missing-project" + }, + { + "name": "allows-with-project", + "input": { + "tool_input": { + "command": "gcloud compute instances list --project=acme-dev" + } + }, + "expected_exit": 0 + }, + { + "name": "allows-version-subcommand", + "input": { + "tool_input": { + "command": "gcloud version" + } + }, + "expected_exit": 0 + }, + { + "name": "warns-with-global-flags-before-subcommand", + "input": { + "tool_input": { + "command": "gcloud --verbosity=debug compute instances list" + } + }, + "expected_exit": 0, + "expected_stderr_contains": "gcloud-missing-project" + } + ] + }, + { + "id": "prod-ops-no-approval", + "description": "Block production operations without explicit user acknowledgement", + "applies_to": [ + "Bash" + ], + "match": [ + { + "field": "command", + "pattern": "--project=([a-z0-9-]+-prod\\b|production\\b)|--env=prod\\b|\\bENV=prod\\b|--target=prod\\b" + } + ], + "action": "block", + "bypass_marker": "prod-ops", + "memory_ref": "feedback_never_touch_prod_without_approval.md", + "references": [], + "message": "BLOCKED: production operation detected.\n\nProduction changes require explicit user approval. Once confirmed, add\n\"// hook-bypass: prod-ops\" to the command to acknowledge.\n", + "tests": [ + { + "name": "blocks-prod-project-flag", + "input": { + "tool_input": { + "command": "gcloud --project=acme-prod compute instances list" + } + }, + "expected_exit": 2 + }, + { + "name": "blocks-env-prod", + "input": { + "tool_input": { + "command": "ENV=prod npm run deploy" + } + }, + "expected_exit": 2 + }, + { + "name": "allows-staging", + "input": { + "tool_input": { + "command": "gcloud --project=acme-dev compute instances list" + } + }, + "expected_exit": 0 + }, + { + "name": "allows-bypass", + "input": { + "tool_input": { + "command": "gcloud --project=acme-prod compute instances list # hook-bypass: prod-ops" + } + }, + "expected_exit": 0 + } + ] + }, + { + "id": "destructive-db-ops", + "description": "Block destructive DB operations even with --local", + "applies_to": [ + "Bash" + ], + "match": [ + { + "field": "command", + "pattern": "supabase[[:space:]]+db[[:space:]]+(reset|push|repair)\\b|psql[^|]*\\b(DROP[[:space:]]+(TABLE|DATABASE|SCHEMA)|TRUNCATE|DELETE[[:space:]]+FROM)\\b" + } + ], + "action": "block", + "bypass_marker": "db-destructive", + "memory_ref": "feedback_db_ops_explicit_permission.md", + "references": [], + "message": "BLOCKED: destructive DB operation.\n\nEven with --local, ask explicit user permission first. Once confirmed,\nadd \"// hook-bypass: db-destructive\" to the command.\n", + "tests": [ + { + "name": "blocks-supabase-reset", + "input": { + "tool_input": { + "command": "supabase db reset --local" + } + }, + "expected_exit": 2 + }, + { + "name": "blocks-psql-drop-table", + "input": { + "tool_input": { + "command": "psql -c \"DROP TABLE users\"" + } + }, + "expected_exit": 2 + }, + { + "name": "blocks-psql-truncate", + "input": { + "tool_input": { + "command": "psql -c \"TRUNCATE users\"" + } + }, + "expected_exit": 2 + }, + { + "name": "allows-supabase-status", + "input": { + "tool_input": { + "command": "supabase status" + } + }, + "expected_exit": 0 + }, + { + "name": "allows-bypass", + "input": { + "tool_input": { + "command": "supabase db reset --local # hook-bypass: db-destructive" + } + }, + "expected_exit": 0 + } + ] + }, + { + "id": "manual-edge-fn-deploy", + "description": "Block manual Edge Function deploy (CI-only)", + "applies_to": [ + "Bash" + ], + "match": [ + { + "field": "command", + "pattern": "supabase[[:space:]]+functions[[:space:]]+deploy\\b" + } + ], + "action": "block", + "bypass_marker": "edge-fn-manual", + "memory_ref": "feedback_edge_functions_via_ci.md", + "references": [], + "message": "BLOCKED: manual Edge Function deploy.\n\nDeploy via CI workflow (push to develop for staging, main for prod).\nBypass: add \"// hook-bypass: edge-fn-manual\" if a hotfix justifies it.\n", + "tests": [ + { + "name": "blocks-functions-deploy", + "input": { + "tool_input": { + "command": "supabase functions deploy my-function" + } + }, + "expected_exit": 2 + }, + { + "name": "allows-functions-list", + "input": { + "tool_input": { + "command": "supabase functions list" + } + }, + "expected_exit": 0 + }, + { + "name": "allows-bypass", + "input": { + "tool_input": { + "command": "supabase functions deploy my-function # hook-bypass: edge-fn-manual" + } + }, + "expected_exit": 0 + } + ] + }, + { + "id": "minified-build-output", + "description": "Block writing minified content to build/ directories", + "applies_to": [ + "Edit", + "Write", + "MultiEdit" + ], + "match": [ + { + "field": "file_path", + "pattern": "amd/build/.*\\.min\\.js$|dist/.*\\.min\\.(js|css)$" + }, + { + "field": "content", + "pattern": "(var [a-z]=function|;function [a-z]\\(\\)|^[a-zA-Z0-9_$]+=function\\([a-zA-Z0-9_$,]*\\)\\{)" + } + ], + "action": "block", + "bypass_marker": "minified-build", + "memory_ref": "feedback_no_minified_build.md", + "references": [], + "message": "BLOCKED: minified content being written to build/.\n\nBuild artifacts must stay human-readable. Replace with a readable copy\nof src/. Bypass: \"// hook-bypass: minified-build\" inside the content.\n", + "tests": [ + { + "name": "blocks-minified-amd-build", + "input": { + "tool_input": { + "file_path": "plugins/foo/amd/build/main.min.js", + "content": "var a=function(b){return b};var c=function(){};" + } + }, + "expected_exit": 2 + }, + { + "name": "allows-readable-amd-build", + "input": { + "tool_input": { + "file_path": "plugins/foo/amd/build/main.min.js", + "content": "define(['jquery'], function($) {\n 'use strict';\n return {\n init: function() { console.log('hello'); }\n };\n});\n" + } + }, + "expected_exit": 0 + }, + { + "name": "allows-non-build-paths", + "input": { + "tool_input": { + "file_path": "src/utils/helper.js", + "content": "var a=function(b){return b};" + } + }, + "expected_exit": 0 + } + ] + }, + { + "id": "secrets-hardcoded", + "description": "Block hardcoded credential patterns being written to source files", + "applies_to": [ + "Edit", + "Write", + "MultiEdit" + ], + "match": [ + { + "field": "content", + "pattern": "(password|secret|api_key|apikey|token|access_key|private_key)[\"\\']?[[:space:]]*[:=][[:space:]]*[\"\\']?[A-Za-z0-9_!@#$%^&*+/=-]{12,}", + "flags": "i" + }, + { + "field": "file_path", + "not_pattern": "\\.env\\.example|\\.(test|spec)\\.|README|\\.md$|fixtures/|mocks?/" + } + ], + "action": "block", + "bypass_marker": "secret-leak", + "memory_ref": "feedback_never_leak_secrets.md", + "references": [], + "message": "POTENTIAL SECRET LEAK: hardcoded credential pattern detected.\n\nUse Secret Manager / .env / local memory for real values. If this is a\nplaceholder or test fixture, prefix with \"EXAMPLE_\" / \"CHANGEME_\", move\nto .env.example, or add \"// hook-bypass: secret-leak\" near the line.\n", + "tests": [ + { + "name": "blocks-hardcoded-password-in-src", + "input": { + "tool_input": { + "file_path": "src/config.ts", + "content": "const password = \"supersecret123ABC!\"" + } + }, + "expected_exit": 2 + }, + { + "name": "blocks-hardcoded-api-key", + "input": { + "tool_input": { + "file_path": "src/lib/api.ts", + "content": "const api_key: \"sk_live_abc123def456ghi789\"" + } + }, + "expected_exit": 2 + }, + { + "name": "allows-env-example", + "input": { + "tool_input": { + "file_path": ".env.example", + "content": "PASSWORD=changeme_supersecret_placeholder" + } + }, + "expected_exit": 0 + }, + { + "name": "allows-bypass", + "input": { + "tool_input": { + "file_path": "src/config.ts", + "content": "const password = \"supersecret123ABC!\" // hook-bypass: secret-leak" + } + }, + "expected_exit": 0 + }, + { + "name": "allows-test-file", + "input": { + "tool_input": { + "file_path": "src/auth.test.ts", + "content": "const password = \"supersecret123ABC!\"" + } + }, + "expected_exit": 0 + } + ] + }, + { + "id": "slack-unicode-bullets", + "description": "Warn on Unicode bullets in Slack messages (use - instead)", + "applies_to": [ + "Slack" + ], + "match": [ + { + "field": "text", + "pattern": "[•◦▪▫]" + } + ], + "action": "warn", + "bypass_marker": null, + "memory_ref": "feedback_slack_no_unicode_bullets.md", + "references": [], + "message": "Slack style: Unicode bullets (•◦▪▫) detected. Use \"-\" instead.\n", + "tests": [ + { + "name": "warns-on-bullet", + "input": { + "tool_input": { + "text": "Hola • mundo" + } + }, + "expected_exit": 0, + "expected_stderr_contains": "slack-unicode-bullets" + }, + { + "name": "allows-dash-bullet", + "input": { + "tool_input": { + "text": "Hola - mundo" + } + }, + "expected_exit": 0 + } + ] + }, + { + "id": "slack-tables-no-codeblock", + "description": "Warn when a markdown table separator row appears in Slack text", + "applies_to": [ + "Slack" + ], + "match": [ + { + "field": "text", + "pattern": "^\\|[-:|[:space:]]+\\|[[:space:]]*$" + } + ], + "action": "warn", + "bypass_marker": null, + "memory_ref": "feedback_slack_tables.md", + "message": "Slack style: markdown table syntax detected.\nSlack mrkdwn does not render bare pipe-tables — wrap the table in\ntriple-backtick fences for it to display correctly.\n", + "tests": [ + { + "name": "warns-on-bare-table", + "input": { + "tool_input": { + "text": "Resultados:\n| col1 | col2 | col3 |\n| ---- | ---- | ---- |\n| a | b | c |\n" + } + }, + "expected_exit": 0, + "expected_stderr_contains": "slack-tables-no-codeblock" + }, + { + "name": "allows-message-with-stray-pipes", + "input": { + "tool_input": { + "text": "Just a paragraph with | a stray pipe | character — no table here.\n" + } + }, + "expected_exit": 0 + } + ] + }, + { + "id": "slack-spanish-tildes", + "description": "Warn when common Spanish words appear without tildes", + "applies_to": [ + "Slack" + ], + "match": [ + { + "field": "text", + "pattern": "\\b(migracion|verificacion|analisis|patron|tambien|configuracion|implementacion|version|integracion|notificacion|aplicacion|comunicacion)\\b", + "flags": "i" + } + ], + "action": "warn", + "bypass_marker": null, + "memory_ref": "feedback_spanish_accents.md", + "references": [], + "message": "Slack style: Spanish word without tilde detected. Always write with accents\n(migración, verificación, análisis, patrón, también, configuración, ...).\n", + "tests": [ + { + "name": "warns-on-tildeless-migracion", + "input": { + "tool_input": { + "text": "La migracion fue exitosa" + } + }, + "expected_exit": 0, + "expected_stderr_contains": "slack-spanish-tildes" + }, + { + "name": "allows-with-tilde", + "input": { + "tool_input": { + "text": "La migración fue exitosa" + } + }, + "expected_exit": 0 + } + ] + } +] diff --git a/hooks/rules/rules.yaml b/hooks/rules/rules.yaml new file mode 100644 index 0000000..59e17b1 --- /dev/null +++ b/hooks/rules/rules.yaml @@ -0,0 +1,412 @@ +# ============================================================================= +# make-no-mistakes — Rules manifest (SSoT) +# +# This file is the single source of truth for all enforcement rules. +# Memories (~/.claude/.../memory/feedback_*.md) and per-repo CLAUDE.md +# citations should reference rule ids defined here, not paraphrase the rules. +# +# Schema (per row): +# id: kebab-case unique identifier +# description: one-line summary (shown in stderr on match) +# applies_to: [Bash | Edit | Write | MultiEdit | Slack] +# Determines which dispatcher (pre-bash.sh, pre-edit.sh, post-slack.sh) +# picks up the rule. +# match: array of conditions, ALL must hold (AND-chain) +# - field: command | file_path | content | text +# pattern: ERE regex; if present, field must match +# not_pattern: ERE regex; if present, field must NOT match +# flags: i (case-insensitive) | (omitted, case-sensitive) +# action: block | warn +# bypass_marker: kebab-case id used in "// hook-bypass: " comments; +# null if no bypass is offered. Bypass tokens are searched in any +# string field of tool_input. +# memory_ref: filename of the corresponding feedback memory +# references: array of external doc references (free text) +# message: multi-line stderr output shown on match +# tests: array of unit tests +# - name: kebab-case test id +# input: { tool_input: { ... } } # JSON passed to the hook on stdin +# expected_exit: 0 | 2 +# +# How to add a new rule: see hooks/rules/README.md +# ============================================================================= + +- id: ssh-db-mutation + description: Block gcloud compute ssh with inline DB or Moodle config commands + applies_to: [Bash] + match: + - field: command + pattern: 'gcloud[[:space:]]+compute[[:space:]]+ssh.*--command.*(php[[:space:]]+-r|set_config|mdl_|scorm_|mysql[[:space:]]+-|mysqldump|\$DB->)' + action: block + bypass_marker: ssh-db-rule + memory_ref: feedback_scripts_not_db.md + message: | + BLOCKED: gcloud compute ssh + inline DB/Moodle command detected. + + Use a versioned script in scripts/ instead. If none fits, write+commit + one first. Bypass: add "// hook-bypass: ssh-db-rule" to the command. + tests: + - name: blocks-ssh-php-r + input: + tool_input: + command: 'gcloud compute ssh acme-vm --command="docker exec moodle php -r ''var_dump(1);''"' + expected_exit: 2 + - name: blocks-ssh-set-config + input: + tool_input: + command: 'gcloud compute ssh acme-vm --command="set_config(''foo'', ''bar'', ''moodlecourse'')"' + expected_exit: 2 + - name: blocks-ssh-mysql-shell + input: + tool_input: + command: 'gcloud compute ssh acme-vm --command="mysql -u root -e ''SELECT 1''"' + expected_exit: 2 + - name: allows-regular-ssh + input: + tool_input: + command: 'gcloud compute ssh acme-vm --command="ls /var/log"' + expected_exit: 0 + - name: allows-bypass-marker + input: + tool_input: + command: 'gcloud compute ssh acme-vm --command="php -r ''echo 1;''" # hook-bypass: ssh-db-rule' + expected_exit: 0 + +- id: gcloud-missing-project + description: Warn when gcloud command is missing --project= flag + applies_to: [Bash] + match: + # Allow zero or more global flags (--verbosity=debug, --quiet, etc.) + # between `gcloud` and the subcommand. Without this allowance, a + # command like `gcloud --verbosity=debug compute instances list` + # would slip past the warning even though it lacks --project=. + - field: command + pattern: '^[[:space:]]*gcloud([[:space:]]+--[a-zA-Z0-9_-]+(=[^[:space:]]+)?)*[[:space:]]+(compute|sql|run|iam|secrets|projects|storage|functions|builds|artifacts|container|auth)[[:space:]]' + not_pattern: '--project[= ]|gcloud([[:space:]]+--[a-zA-Z0-9_-]+(=[^[:space:]]+)?)*[[:space:]]+(version|help|info|config)[[:space:]]' + action: warn + bypass_marker: null + memory_ref: feedback_gcloud_explicit_flags.md + references: [] + message: | + WARNING: gcloud command without explicit --project= flag. + + Active config drift can cause cross-project mistakes. Pass --project= + on every gcloud command (and --account= for multi-account setups). + tests: + - name: warns-on-compute-without-project + input: + tool_input: + command: 'gcloud compute instances list' + expected_exit: 0 + expected_stderr_contains: 'gcloud-missing-project' + - name: allows-with-project + input: + tool_input: + command: 'gcloud compute instances list --project=acme-dev' + expected_exit: 0 + - name: allows-version-subcommand + input: + tool_input: + command: 'gcloud version' + expected_exit: 0 + - name: warns-with-global-flags-before-subcommand + input: + tool_input: + command: 'gcloud --verbosity=debug compute instances list' + expected_exit: 0 + expected_stderr_contains: 'gcloud-missing-project' + +- id: prod-ops-no-approval + description: Block production operations without explicit user acknowledgement + applies_to: [Bash] + match: + - field: command + pattern: '--project=([a-z0-9-]+-prod\b|production\b)|--env=prod\b|\bENV=prod\b|--target=prod\b' + action: block + bypass_marker: prod-ops + memory_ref: feedback_never_touch_prod_without_approval.md + references: [] + message: | + BLOCKED: production operation detected. + + Production changes require explicit user approval. Once confirmed, add + "// hook-bypass: prod-ops" to the command to acknowledge. + tests: + - name: blocks-prod-project-flag + input: + tool_input: + command: 'gcloud --project=acme-prod compute instances list' + expected_exit: 2 + - name: blocks-env-prod + input: + tool_input: + command: 'ENV=prod npm run deploy' + expected_exit: 2 + - name: allows-staging + input: + tool_input: + command: 'gcloud --project=acme-dev compute instances list' + expected_exit: 0 + - name: allows-bypass + input: + tool_input: + command: 'gcloud --project=acme-prod compute instances list # hook-bypass: prod-ops' + expected_exit: 0 + +- id: destructive-db-ops + description: Block destructive DB operations even with --local + applies_to: [Bash] + match: + - field: command + pattern: 'supabase[[:space:]]+db[[:space:]]+(reset|push|repair)\b|psql[^|]*\b(DROP[[:space:]]+(TABLE|DATABASE|SCHEMA)|TRUNCATE|DELETE[[:space:]]+FROM)\b' + action: block + bypass_marker: db-destructive + memory_ref: feedback_db_ops_explicit_permission.md + references: [] + message: | + BLOCKED: destructive DB operation. + + Even with --local, ask explicit user permission first. Once confirmed, + add "// hook-bypass: db-destructive" to the command. + tests: + - name: blocks-supabase-reset + input: + tool_input: + command: 'supabase db reset --local' + expected_exit: 2 + - name: blocks-psql-drop-table + input: + tool_input: + command: 'psql -c "DROP TABLE users"' + expected_exit: 2 + - name: blocks-psql-truncate + input: + tool_input: + command: 'psql -c "TRUNCATE users"' + expected_exit: 2 + - name: allows-supabase-status + input: + tool_input: + command: 'supabase status' + expected_exit: 0 + - name: allows-bypass + input: + tool_input: + command: 'supabase db reset --local # hook-bypass: db-destructive' + expected_exit: 0 + +- id: manual-edge-fn-deploy + description: Block manual Edge Function deploy (CI-only) + applies_to: [Bash] + match: + - field: command + pattern: 'supabase[[:space:]]+functions[[:space:]]+deploy\b' + action: block + bypass_marker: edge-fn-manual + memory_ref: feedback_edge_functions_via_ci.md + references: [] + message: | + BLOCKED: manual Edge Function deploy. + + Deploy via CI workflow (push to develop for staging, main for prod). + Bypass: add "// hook-bypass: edge-fn-manual" if a hotfix justifies it. + tests: + - name: blocks-functions-deploy + input: + tool_input: + command: 'supabase functions deploy my-function' + expected_exit: 2 + - name: allows-functions-list + input: + tool_input: + command: 'supabase functions list' + expected_exit: 0 + - name: allows-bypass + input: + tool_input: + command: 'supabase functions deploy my-function # hook-bypass: edge-fn-manual' + expected_exit: 0 + +- id: minified-build-output + description: Block writing minified content to build/ directories + applies_to: [Edit, Write, MultiEdit] + match: + - field: file_path + pattern: 'amd/build/.*\.min\.js$|dist/.*\.min\.(js|css)$' + - field: content + pattern: '(var [a-z]=function|;function [a-z]\(\)|^[a-zA-Z0-9_$]+=function\([a-zA-Z0-9_$,]*\)\{)' + action: block + bypass_marker: minified-build + memory_ref: feedback_no_minified_build.md + references: [] + message: | + BLOCKED: minified content being written to build/. + + Build artifacts must stay human-readable. Replace with a readable copy + of src/. Bypass: "// hook-bypass: minified-build" inside the content. + tests: + - name: blocks-minified-amd-build + input: + tool_input: + file_path: 'plugins/foo/amd/build/main.min.js' + content: 'var a=function(b){return b};var c=function(){};' + expected_exit: 2 + - name: allows-readable-amd-build + input: + tool_input: + file_path: 'plugins/foo/amd/build/main.min.js' + content: | + define(['jquery'], function($) { + 'use strict'; + return { + init: function() { console.log('hello'); } + }; + }); + expected_exit: 0 + - name: allows-non-build-paths + input: + tool_input: + file_path: 'src/utils/helper.js' + content: 'var a=function(b){return b};' + expected_exit: 0 + +- id: secrets-hardcoded + description: Block hardcoded credential patterns being written to source files + applies_to: [Edit, Write, MultiEdit] + match: + - field: content + pattern: '(password|secret|api_key|apikey|token|access_key|private_key)["\'']?[[:space:]]*[:=][[:space:]]*["\'']?[A-Za-z0-9_!@#$%^&*+/=-]{12,}' + flags: i + - field: file_path + not_pattern: '\.env\.example|\.(test|spec)\.|README|\.md$|fixtures/|mocks?/' + action: block + bypass_marker: secret-leak + memory_ref: feedback_never_leak_secrets.md + references: [] + message: | + POTENTIAL SECRET LEAK: hardcoded credential pattern detected. + + Use Secret Manager / .env / local memory for real values. If this is a + placeholder or test fixture, prefix with "EXAMPLE_" / "CHANGEME_", move + to .env.example, or add "// hook-bypass: secret-leak" near the line. + tests: + - name: blocks-hardcoded-password-in-src + input: + tool_input: + file_path: 'src/config.ts' + content: 'const password = "supersecret123ABC!"' + expected_exit: 2 + - name: blocks-hardcoded-api-key + input: + tool_input: + file_path: 'src/lib/api.ts' + content: 'const api_key: "sk_live_abc123def456ghi789"' + expected_exit: 2 + - name: allows-env-example + input: + tool_input: + file_path: '.env.example' + content: 'PASSWORD=changeme_supersecret_placeholder' + expected_exit: 0 + - name: allows-bypass + input: + tool_input: + file_path: 'src/config.ts' + content: 'const password = "supersecret123ABC!" // hook-bypass: secret-leak' + expected_exit: 0 + - name: allows-test-file + input: + tool_input: + file_path: 'src/auth.test.ts' + content: 'const password = "supersecret123ABC!"' + expected_exit: 0 + +- id: slack-unicode-bullets + description: Warn on Unicode bullets in Slack messages (use - instead) + applies_to: [Slack] + match: + - field: text + pattern: '[•◦▪▫]' + action: warn + bypass_marker: null + memory_ref: feedback_slack_no_unicode_bullets.md + references: [] + message: | + Slack style: Unicode bullets (•◦▪▫) detected. Use "-" instead. + tests: + - name: warns-on-bullet + input: + tool_input: + text: 'Hola • mundo' + expected_exit: 0 + expected_stderr_contains: slack-unicode-bullets + - name: allows-dash-bullet + input: + tool_input: + text: 'Hola - mundo' + expected_exit: 0 + +- id: slack-tables-no-codeblock + description: Warn when a markdown table separator row appears in Slack text + applies_to: [Slack] + match: + # Match the distinctive markdown-table separator row (|---|---|). + # Plain pipe-prose without a separator is too noisy; the separator + # almost exclusively appears inside actual tables. False-positive: a + # table legitimately wrapped in a fenced code block still triggers the + # warning, but warn-only is tolerable. Detecting "inside a code block" + # reliably from a single regex is not feasible in bash; line-by-line + # parsing would be needed to do it perfectly. + - field: text + pattern: '^\|[-:|[:space:]]+\|[[:space:]]*$' + action: warn + bypass_marker: null + memory_ref: feedback_slack_tables.md + message: | + Slack style: markdown table syntax detected. + Slack mrkdwn does not render bare pipe-tables — wrap the table in + triple-backtick fences for it to display correctly. + tests: + - name: warns-on-bare-table + input: + tool_input: + text: | + Resultados: + | col1 | col2 | col3 | + | ---- | ---- | ---- | + | a | b | c | + expected_exit: 0 + expected_stderr_contains: slack-tables-no-codeblock + - name: allows-message-with-stray-pipes + input: + tool_input: + text: | + Just a paragraph with | a stray pipe | character — no table here. + expected_exit: 0 + +- id: slack-spanish-tildes + description: Warn when common Spanish words appear without tildes + applies_to: [Slack] + match: + - field: text + pattern: '\b(migracion|verificacion|analisis|patron|tambien|configuracion|implementacion|version|integracion|notificacion|aplicacion|comunicacion)\b' + flags: i + action: warn + bypass_marker: null + memory_ref: feedback_spanish_accents.md + references: [] + message: | + Slack style: Spanish word without tilde detected. Always write with accents + (migración, verificación, análisis, patrón, también, configuración, ...). + tests: + - name: warns-on-tildeless-migracion + input: + tool_input: + text: 'La migracion fue exitosa' + expected_exit: 0 + expected_stderr_contains: slack-spanish-tildes + - name: allows-with-tilde + input: + tool_input: + text: 'La migración fue exitosa' + expected_exit: 0 diff --git a/hooks/test-hooks.sh b/hooks/test-hooks.sh new file mode 100755 index 0000000..4c377e4 --- /dev/null +++ b/hooks/test-hooks.sh @@ -0,0 +1,102 @@ +#!/usr/bin/env bash +# ============================================================================= +# test-hooks.sh — Parametrized test runner for the rules manifest. +# +# Reads each rule's `tests` array from rules.json, materializes the input +# JSON in-process via jq, pipes it to eval-rule.sh, and asserts the exit +# code matches `expected_exit`. Optionally checks `expected_stderr_contains`. +# +# Why this design: test fixtures live in rules.json (a file), not as inline +# strings in this script. That way the test runner's command line only +# contains "jq ... | bash eval-rule.sh ..." — never the literal payload. +# This matters because installed PreToolUse hooks (user-level or other +# plugins) inspect the bash command line; if test fixtures lived inline, +# they'd trigger those hooks during testing. +# +# Usage: bash hooks/test-hooks.sh +# Exit: 0 if all pass; 1 if any fail. +# ============================================================================= +set -uo pipefail + +HOOKS_DIR="$(cd "$(dirname "$0")" && pwd)" +RULES_JSON="$HOOKS_DIR/rules/rules.json" +EVAL_RULE="$HOOKS_DIR/lib/eval-rule.sh" + +if [ ! -f "$RULES_JSON" ]; then + echo "ERROR: $RULES_JSON not found. Run 'npm run build-rules' first." >&2 + exit 1 +fi +if ! command -v jq >/dev/null 2>&1; then + echo "ERROR: jq is required to run hook tests." >&2 + exit 1 +fi + +PASS=0 +FAIL=0 +FAIL_DETAILS=() + +# Each line: " " +PAIRS="$(jq -r '.[] | .id as $id | (.tests | to_entries[] | "\($id)\t\(.key)")' "$RULES_JSON")" + +while IFS=$'\t' read -r RULE_ID TEST_IDX; do + [ -z "$RULE_ID" ] && continue + + TEST_NAME="$(jq -r --arg id "$RULE_ID" --argjson idx "$TEST_IDX" \ + '.[] | select(.id == $id) | .tests[$idx].name // ("test_" + ($idx | tostring))' \ + "$RULES_JSON")" + + EXPECTED_EXIT="$(jq -r --arg id "$RULE_ID" --argjson idx "$TEST_IDX" \ + '.[] | select(.id == $id) | .tests[$idx].expected_exit // 0' "$RULES_JSON")" + + EXPECTED_STDERR="$(jq -r --arg id "$RULE_ID" --argjson idx "$TEST_IDX" \ + '.[] | select(.id == $id) | .tests[$idx].expected_stderr_contains // ""' \ + "$RULES_JSON")" + + # Materialize input as JSON. The whole .input object is the tool_input + # envelope expected by eval-rule.sh's parse-input.sh. + INPUT="$(jq -c --arg id "$RULE_ID" --argjson idx "$TEST_IDX" \ + '.[] | select(.id == $id) | .tests[$idx].input' "$RULES_JSON")" + + # Capture stderr separately so we can grep it for expected_stderr_contains. + STDERR_FILE="$(mktemp)" + ACTUAL_EXIT=0 + printf '%s' "$INPUT" | bash "$EVAL_RULE" "$RULE_ID" 2>"$STDERR_FILE" || ACTUAL_EXIT=$? + + STATUS="PASS" + REASON="" + + if [ "$ACTUAL_EXIT" != "$EXPECTED_EXIT" ]; then + STATUS="FAIL" + REASON="exit expected=$EXPECTED_EXIT actual=$ACTUAL_EXIT" + elif [ -n "$EXPECTED_STDERR" ]; then + if ! grep -qF -- "$EXPECTED_STDERR" "$STDERR_FILE"; then + STATUS="FAIL" + REASON="stderr did not contain '$EXPECTED_STDERR'" + fi + fi + + if [ "$STATUS" = "PASS" ]; then + PASS=$((PASS + 1)) + echo " PASS ${RULE_ID} / ${TEST_NAME}" + else + FAIL=$((FAIL + 1)) + echo " FAIL ${RULE_ID} / ${TEST_NAME} -- ${REASON}" + FAIL_DETAILS+=("${RULE_ID}/${TEST_NAME}: ${REASON}") + fi + + rm -f "$STDERR_FILE" +done <<< "$PAIRS" + +echo "" +TOTAL=$((PASS + FAIL)) +echo "Results: ${PASS} / ${TOTAL} passed" + +if [ "$FAIL" -gt 0 ]; then + echo "" + echo "Failures:" + for line in "${FAIL_DETAILS[@]}"; do + echo " - $line" + done + exit 1 +fi +exit 0 diff --git a/package-lock.json b/package-lock.json index df56cbb..4c25823 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@lapc506/make-no-mistakes", - "version": "1.3.1", + "version": "1.5.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@lapc506/make-no-mistakes", - "version": "1.3.1", + "version": "1.5.0", "license": "BSL-1.1", "dependencies": { "@opencode-ai/plugin": "^0.7.0", @@ -17,6 +17,7 @@ }, "devDependencies": { "@types/node": "^24.0.0", + "js-yaml": "^4.1.1", "tsup": "^8.5.0", "typescript": "^5.9.2", "vitest": "^3.2.4" @@ -514,6 +515,18 @@ "typescript": "^5.5.3" } }, + "node_modules/@hey-api/openapi-ts/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, "node_modules/@jridgewell/gen-mapping": { "version": "0.3.13", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", @@ -1651,9 +1664,9 @@ "license": "MIT" }, "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", "license": "MIT", "dependencies": { "argparse": "^2.0.1" diff --git a/package.json b/package.json index 7548d72..b9db4a2 100644 --- a/package.json +++ b/package.json @@ -1,25 +1,58 @@ { "name": "@lapc506/make-no-mistakes", - "version": "1.4.1", - "description": "The disciplined dev lifecycle — implement issues, review PRs, sync releases, test E2E, manage sessions, and stash secrets via OS-native prompts. OpenCode + Claude Code plugin.", + "version": "1.5.0", + "description": "The disciplined dev lifecycle — implement issues, review PRs, sync releases, test E2E, manage sessions, stash secrets, and enforce manifest-driven tool-call hooks (no SSH+DB, no manual prod, no minified build, no secret leaks, Slack format). OpenCode + Claude Code plugin.", "type": "module", "main": "./dist/index.js", "types": "./dist/index.d.ts", - "bin": { "make-no-mistakes": "dist/cli.js" }, + "bin": { + "make-no-mistakes": "dist/cli.js" + }, "exports": { - ".": { "types": "./dist/index.d.ts", "import": "./dist/index.js" }, - "./cli": { "import": "./dist/cli.js" } + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js" + }, + "./cli": { + "import": "./dist/cli.js" + } + }, + "files": [ + "dist/", + "commands/", + "agents/", + "skills/", + "scripts/", + "hooks/", + "slack-config.example.json", + "README.md", + "LICENSE" + ], + "engines": { + "node": ">=20.11.0" + }, + "publishConfig": { + "access": "public" }, - "files": ["dist/", "commands/", "agents/", "skills/", "scripts/", "slack-config.example.json", "README.md", "LICENSE"], - "engines": { "node": ">=20.11.0" }, - "publishConfig": { "access": "public" }, "scripts": { "build": "tsup src/index.ts --format esm --dts --clean && tsup src/cli.ts --format esm --clean false", + "build-rules": "node scripts/build-rules.mjs", + "test-hooks": "bash hooks/test-hooks.sh", "test": "echo 'no tests yet' && exit 0", - "prepublishOnly": "npm run build" + "prepublishOnly": "npm run build && npm run build-rules" + }, + "keywords": [ + "opencode", + "plugin", + "claude-code", + "workflow", + "linear", + "hooks" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/DojoCodingLabs/make-no-mistakes-toolkit.git" }, - "keywords": ["opencode", "plugin", "claude-code", "workflow", "linear"], - "repository": { "type": "git", "url": "git+https://github.com/DojoCodingLabs/make-no-mistakes-toolkit.git" }, "author": "Luis Andres Pena Castillo", "license": "BSL-1.1", "dependencies": { @@ -28,6 +61,7 @@ }, "devDependencies": { "@types/node": "^24.0.0", + "js-yaml": "^4.1.1", "tsup": "^8.5.0", "typescript": "^5.9.2", "vitest": "^3.2.4" diff --git a/scripts/build-rules.mjs b/scripts/build-rules.mjs new file mode 100755 index 0000000..e10cf2d --- /dev/null +++ b/scripts/build-rules.mjs @@ -0,0 +1,117 @@ +#!/usr/bin/env node +// ============================================================================= +// build-rules.mjs — Convert hooks/rules/rules.yaml → hooks/rules/rules.json +// +// rules.yaml is the SSoT humans edit. rules.json is the runtime artifact +// hooks load via jq (no yaml parser needed at runtime). +// +// CI runs this script and fails if `git diff hooks/rules/rules.json` is +// non-empty — this guarantees rules.json never drifts from rules.yaml. +// +// Usage: node scripts/build-rules.mjs +// ============================================================================= +import { existsSync, readFileSync, writeFileSync } from 'node:fs'; +import { fileURLToPath } from 'node:url'; +import { dirname, join } from 'node:path'; +import yaml from 'js-yaml'; + +const repoRoot = join(dirname(fileURLToPath(import.meta.url)), '..'); +const yamlPath = join(repoRoot, 'hooks', 'rules', 'rules.yaml'); +const jsonPath = join(repoRoot, 'hooks', 'rules', 'rules.json'); + +// IP-leak guard. The plugin is BSL-1.1 public-source, so we cannot hardcode +// client / org names anywhere in committed code (that itself would be a leak). +// Instead, each dev maintains a gitignored .private/forbidden-names.txt with +// one regex per line. If the file exists, the build fails when any of those +// names appear in rules.yaml. If the file doesn't exist, the check is skipped. +// The check is opt-in and reusable for any user of this toolkit who works +// with private clients. See hooks/rules/README.md. +const forbiddenFile = join(repoRoot, '.private', 'forbidden-names.txt'); +let FORBIDDEN_PATTERNS = []; +if (existsSync(forbiddenFile)) { + FORBIDDEN_PATTERNS = readFileSync(forbiddenFile, 'utf8') + .split('\n') + .map((l) => l.trim()) + .filter((l) => l.length > 0 && !l.startsWith('#')) + .map((l) => new RegExp(l, 'i')); + console.log( + `IP-leak guard active (${FORBIDDEN_PATTERNS.length} pattern(s) loaded from .private/forbidden-names.txt)`, + ); +} + +const source = readFileSync(yamlPath, 'utf8'); +const rules = yaml.load(source); + +if (!Array.isArray(rules)) { + console.error('rules.yaml must be an array of rule objects'); + process.exit(1); +} + +const ids = new Set(); +for (const rule of rules) { + if (!rule.id) { + console.error('rule missing id field:', rule); + process.exit(1); + } + if (ids.has(rule.id)) { + console.error(`duplicate rule id: ${rule.id}`); + process.exit(1); + } + ids.add(rule.id); + + if (!rule.applies_to || !Array.isArray(rule.applies_to) || rule.applies_to.length === 0) { + console.error(`rule ${rule.id} missing applies_to (non-empty array required)`); + process.exit(1); + } + if (!rule.match || !Array.isArray(rule.match) || rule.match.length === 0) { + console.error(`rule ${rule.id} missing match (non-empty array required)`); + process.exit(1); + } + if (!['block', 'warn'].includes(rule.action)) { + console.error(`rule ${rule.id} action must be block or warn, got: ${rule.action}`); + process.exit(1); + } + if (!rule.tests || !Array.isArray(rule.tests) || rule.tests.length === 0) { + console.error(`rule ${rule.id} missing tests (non-empty array required)`); + process.exit(1); + } + if (typeof rule.message !== 'string' || rule.message.trim().length === 0) { + console.error(`rule ${rule.id} missing required message (non-empty string)`); + process.exit(1); + } + // bypass_marker is optional, but when present must be kebab-case. + // Without this constraint, an author could pick a marker containing + // ERE-special characters (., +, (, [, etc.), which eval-rule.sh would + // interpolate verbatim into a grep pattern — leading to silent + // mismatches or grep errors that escalate to unintended blocks. + if (rule.bypass_marker !== undefined && rule.bypass_marker !== null) { + if ( + typeof rule.bypass_marker !== 'string' || + !/^[a-z0-9-]+$/.test(rule.bypass_marker) + ) { + console.error( + `rule ${rule.id} has invalid bypass_marker: must be kebab-case (^[a-z0-9-]+$) or null, got: ${JSON.stringify(rule.bypass_marker)}`, + ); + process.exit(1); + } + } + + // IP-leak guard: scan the entire rule (serialized) for forbidden patterns. + // This catches leaks in references, messages, test fixtures, anywhere. + const serialized = JSON.stringify(rule); + for (const forbidden of FORBIDDEN_PATTERNS) { + if (forbidden.test(serialized)) { + console.error( + `rule ${rule.id} contains a forbidden client/org name (pattern: ${forbidden}).`, + ); + console.error( + ' Public toolkit rules must be agnostic. Use generic placeholders like', + ); + console.error(' acme-prod, myproject-dev, example.com, etc.'); + process.exit(1); + } + } +} + +writeFileSync(jsonPath, JSON.stringify(rules, null, 2) + '\n'); +console.log(`Generated ${jsonPath} (${rules.length} rules)`);