diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 4c44bc3..020f3f7 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -22,350 +22,152 @@ jobs:
contents: read
pull-requests: write
issues: write
-
steps:
- - name: Checkout Base Branch
+ - name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
-
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: "10.0.102"
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: "20"
-
- - name: Fetch PR Head Commit
- run: git fetch --no-tags --prune origin ${{ github.event.pull_request.head.sha }}
-
- - name: Derive Versioning Decision
- id: versioning
- run: |
- set +e
- guard_output=$(BASE_REF=origin/main HEAD_REF=${{ github.event.pull_request.head.sha }} tools/versioning/check-versioning.sh 2>&1)
- guard_exit=$?
- set -e
-
- echo "$guard_output"
-
- required=$(printf '%s\n' "$guard_output" | sed -n 's/.*required=\([a-z]*\).*/\1/p' | tail -n1)
- actual=$(printf '%s\n' "$guard_output" | sed -n 's/.*actual=\([a-z]*\).*/\1/p' | tail -n1)
-
- if [[ -z "$required" ]]; then
- required=$(MODE=required BASE_REF=origin/main HEAD_REF=${{ github.event.pull_request.head.sha }} tools/versioning/check-versioning.sh 2>/dev/null || echo "none")
- fi
-
- if [[ -z "$actual" ]]; then
- actual="none"
- fi
-
- reason="guard-derived"
- if [[ "$guard_exit" -ne 0 ]]; then
- reason="guard-unavailable-fallback"
- fi
-
- echo "required=$required" >> "$GITHUB_OUTPUT"
- echo "actual=$actual" >> "$GITHUB_OUTPUT"
- echo "reason=$reason" >> "$GITHUB_OUTPUT"
- echo "guard_exit=$guard_exit" >> "$GITHUB_OUTPUT"
-
- - name: Collect PR Metadata
- id: prmeta
- uses: actions/github-script@v7
- with:
- script: |
- const owner = context.repo.owner;
- const repo = context.repo.repo;
- const pr = context.payload.pull_request;
-
- const files = await github.paginate(
- github.rest.pulls.listFiles,
- { owner, repo, pull_number: pr.number, per_page: 100 }
- );
-
- const paths = files.map((f) => f.filename);
- const labels = pr.labels.map((l) => l.name);
-
- core.setOutput('files_json', JSON.stringify(paths));
- core.setOutput('existing_labels_json', JSON.stringify(labels));
- core.setOutput('pr_title', pr.title || '');
-
- - name: Compute Deterministic Labels (Dry Run)
+ - name: Run Entry Check
env:
- FILES_JSON: ${{ steps.prmeta.outputs.files_json }}
- EXISTING_LABELS_JSON: ${{ steps.prmeta.outputs.existing_labels_json }}
- PR_TITLE: ${{ steps.prmeta.outputs.pr_title }}
- VERSION_REQUIRED: ${{ steps.versioning.outputs.required }}
- VERSION_ACTUAL: ${{ steps.versioning.outputs.actual }}
- VERSION_REASON: ${{ steps.versioning.outputs.reason }}
- VERSION_GUARD_EXIT: ${{ steps.versioning.outputs.guard_exit }}
- OUTPUT_PATH: artifacts/labels/decision.json
- run: |
- mkdir -p artifacts/labels
- node tools/versioning/compute-pr-labels.js
- cat artifacts/labels/decision.json
-
- - name: Validate Label Decision Contract
- run: node tools/versioning/validate-label-decision.js tools/versioning/label-schema.json artifacts/labels/decision.json
-
- - name: Apply Deterministic Labels (Repo-only)
- id: apply_labels
- if: github.event.pull_request.head.repo.fork == false
- continue-on-error: true
- uses: actions/github-script@v7
- with:
- script: |
- const fs = require('fs');
- const decision = JSON.parse(fs.readFileSync('artifacts/labels/decision.json', 'utf-8'));
- const owner = context.repo.owner;
- const repo = context.repo.repo;
- const issue_number = context.payload.pull_request.number;
-
- for (const label of decision.labels_to_remove) {
- try {
- await github.rest.issues.removeLabel({ owner, repo, issue_number, name: label });
- } catch (error) {
- // ignore missing labels to keep idempotency
- }
- }
-
- await github.rest.issues.addLabels({
- owner,
- repo,
- issue_number,
- labels: decision.labels_to_add,
- });
-
- - name: Label Apply Warning
- if: always() && steps.apply_labels.outcome == 'failure'
- run: |
- echo "Auto-label apply failed; continuing (governance fail-open)." | tee artifacts/labels/apply-warning.txt
-
- - name: Labeling Summary
- run: |
- node -e 'const fs=require("fs");const d=JSON.parse(fs.readFileSync("artifacts/labels/decision.json","utf-8"));const lines=["## PR Labeling Summary","",`- version required: ${d.version_required}`,`- version actual: ${d.version_actual}`,`- version reason: ${d.version_reason}`,`- labels to add: ${d.labels_to_add.join(", ")}`,`- labels to remove: ${d.labels_to_remove.join(", ") || "none"}`,"","### Decision Trace",`- changed files: ${d.decision_trace.changed_files_count}`,`- selected primary: ${d.decision_trace.selected_primary}`,`- selected impl: ${d.decision_trace.selected_impl || "none"}`,`- selected areas: ${(d.decision_trace.selected_areas || []).join(", ") || "none"}`];fs.appendFileSync(process.env.GITHUB_STEP_SUMMARY, lines.join("\n")+"\n");'
-
- - name: Upload Label Decision Artifact
+ GH_TOKEN: ${{ github.token }}
+ run: bash tools/ci/bin/run.sh pr-labeling
+ - name: Upload Artifact
if: always()
uses: actions/upload-artifact@v4
with:
- name: pr-label-decision
- path: artifacts/labels/
+ name: ci-pr-labeling
+ path: artifacts/ci/pr-labeling/
if-no-files-found: error
preflight:
runs-on: ubuntu-latest
-
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
-
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
- dotnet-version: "10.0.x"
-
+ dotnet-version: "10.0.102"
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: "20"
-
- - name: Label Engine Golden Tests
- run: node tools/versioning/test-compute-pr-labels.js
-
- - name: Docs Check
- run: |
- set -euo pipefail
- mkdir -p artifacts/docs
- python3 tools/check-docs.py | tee artifacts/docs/doc-check.txt
-
- - name: Versioning Guard
- run: |
- set -euo pipefail
- mkdir -p artifacts/versioning
- bash tools/versioning/check-versioning.sh | tee artifacts/versioning/versioning-check.txt
-
- - name: Format Check
- run: |
- set -euo pipefail
- mkdir -p artifacts/format
- dotnet format FileClassifier.sln --verify-no-changes | tee artifacts/format/format-check.txt
-
- - name: Upload Docs Artifact
- if: always()
- uses: actions/upload-artifact@v4
- with:
- name: docs-check
- path: artifacts/docs/
- if-no-files-found: error
-
- - name: Upload Versioning Artifact
+ - name: Run Entry Check
+ run: bash tools/ci/bin/run.sh preflight
+ - name: Upload Artifact
if: always()
uses: actions/upload-artifact@v4
with:
- name: versioning-check
- path: artifacts/versioning/
- if-no-files-found: error
-
- - name: Upload Format Artifact
- if: always()
- uses: actions/upload-artifact@v4
- with:
- name: format-check
- path: artifacts/format/
+ name: ci-preflight
+ path: artifacts/ci/preflight/
if-no-files-found: error
build:
runs-on: ubuntu-latest
needs: preflight
-
steps:
- name: Checkout
uses: actions/checkout@v4
-
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
- dotnet-version: "10.0.x"
-
- - name: Restore
- run: dotnet restore FileClassifier.sln -v minimal
-
- - name: Build (Warnings As Errors)
- run: |
- set -euo pipefail
- mkdir -p artifacts/build
- dotnet build FileClassifier.sln --no-restore -warnaserror -v minimal | tee artifacts/build/build-log.txt
-
- - name: Upload Build Artifact
+ dotnet-version: "10.0.102"
+ - name: Run Entry Check
+ run: bash tools/ci/bin/run.sh build
+ - name: Upload Artifact
if: always()
uses: actions/upload-artifact@v4
with:
- name: build-log
- path: artifacts/build/
+ name: ci-build
+ path: artifacts/ci/build/
if-no-files-found: error
security-nuget:
runs-on: ubuntu-latest
needs: build
-
steps:
- name: Checkout
uses: actions/checkout@v4
-
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
- dotnet-version: "10.0.x"
-
- - name: Restore
- run: dotnet restore FileClassifier.sln -v minimal
-
- - name: NuGet Vulnerability Scan
- run: |
- set -euo pipefail
- mkdir -p artifacts/security
- dotnet list FileClassifier.sln package --vulnerable --include-transitive | tee artifacts/security/nuget-vuln.txt
- if grep -E "\b(High|Critical)\b" artifacts/security/nuget-vuln.txt; then
- echo "High/Critical vulnerabilities detected."
- exit 1
- fi
-
- - name: NuGet Deprecated Packages
- run: |
- set -euo pipefail
- mkdir -p artifacts/security
- dotnet list FileClassifier.sln package --deprecated | tee artifacts/security/nuget-deprecated.txt
-
- - name: Upload Security Artifacts
+ dotnet-version: "10.0.102"
+ - name: Run Entry Check
+ run: bash tools/ci/bin/run.sh security-nuget
+ - name: Upload Artifact
if: always()
uses: actions/upload-artifact@v4
with:
- name: nuget-security
- path: artifacts/security/
+ name: ci-security-nuget
+ path: artifacts/ci/security-nuget/
if-no-files-found: error
tests-bdd-coverage:
runs-on: ubuntu-latest
needs: build
-
steps:
- name: Checkout
uses: actions/checkout@v4
-
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
- dotnet-version: "10.0.x"
-
- - name: Restore
- run: dotnet restore FileClassifier.sln -v minimal
-
- - name: BDD Tests + Coverage Gate (Single Run)
- run: |
- set -euo pipefail
- mkdir -p artifacts/tests artifacts/coverage
- TEST_BDD_OUTPUT_DIR="${{ github.workspace }}/artifacts/tests" \
- bash tools/test-bdd-readable.sh -- \
- /p:CollectCoverage=true \
- /p:Include="[FileTypeDetectionLib]*" \
- /p:CoverletOutputFormat=cobertura \
- /p:CoverletOutput="${{ github.workspace }}/artifacts/coverage/coverage" \
- /p:Threshold=85%2c69 \
- /p:ThresholdType=line%2cbranch \
- /p:ThresholdStat=total
-
- - name: Upload Test Artifacts
+ dotnet-version: "10.0.102"
+ - name: Run Entry Check
+ run: bash tools/ci/bin/run.sh tests-bdd-coverage
+ - name: Upload Artifact
if: always()
uses: actions/upload-artifact@v4
with:
- name: bdd-test-results
- path: artifacts/tests/
- if-no-files-found: error
-
- - name: Upload Coverage Artifacts
- if: always()
- uses: actions/upload-artifact@v4
- with:
- name: coverage-report
- path: artifacts/coverage/
+ name: ci-tests-bdd-coverage
+ path: artifacts/ci/tests-bdd-coverage/
if-no-files-found: error
summary:
runs-on: ubuntu-latest
needs: [security-nuget, tests-bdd-coverage]
-
steps:
- - name: Download Coverage Artifact
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: "10.0.102"
+ - name: Download CI Artifacts
uses: actions/download-artifact@v4
with:
- name: coverage-report
- path: artifacts/coverage
-
+ name: ci-preflight
+ path: artifacts/ci/preflight
+ - name: Download Build Artifact
+ uses: actions/download-artifact@v4
+ with:
+ name: ci-build
+ path: artifacts/ci/build
- name: Download Security Artifact
uses: actions/download-artifact@v4
with:
- name: nuget-security
- path: artifacts/security
-
- - name: CI Summary
- run: |
- set -euo pipefail
- {
- echo "## CI Summary"
- echo ""
- echo "### Coverage"
- if [[ -f artifacts/coverage/coverage-summary.txt ]]; then
- cat artifacts/coverage/coverage-summary.txt
- else
- echo "Coverage summary not found."
- fi
- echo ""
- echo "### NuGet Vulnerabilities"
- if [[ -f artifacts/security/nuget-vuln.txt ]]; then
- tail -n 200 artifacts/security/nuget-vuln.txt
- else
- echo "NuGet vulnerability report not found."
- fi
- } >> "$GITHUB_STEP_SUMMARY"
+ name: ci-security-nuget
+ path: artifacts/ci/security-nuget
+ - name: Download Test Artifact
+ uses: actions/download-artifact@v4
+ with:
+ name: ci-tests-bdd-coverage
+ path: artifacts/ci/tests-bdd-coverage
+ - name: Run Entry Check
+ run: bash tools/ci/bin/run.sh summary
+ - name: Upload Artifact
+ if: always()
+ uses: actions/upload-artifact@v4
+ with:
+ name: ci-summary
+ path: artifacts/ci/summary/
+ if-no-files-found: error
diff --git a/.github/workflows/qodana.yml b/.github/workflows/qodana.yml
index b20b8be..45e145d 100644
--- a/.github/workflows/qodana.yml
+++ b/.github/workflows/qodana.yml
@@ -16,33 +16,35 @@ jobs:
runs-on: ubuntu-latest
env:
QODANA_TOKEN: ${{ secrets.QODANA_TOKEN }}
-
steps:
- name: Checkout
uses: actions/checkout@v4
-
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: "10.0.102"
- name: Run Qodana
if: env.QODANA_TOKEN != ''
uses: JetBrains/qodana-action@v2025.3
with:
args: --linter,jetbrains/qodana-dotnet:2025.3
- results-dir: qodana-results
+ results-dir: artifacts/ci/qodana
upload-result: false
-
+ - name: Skip Qodana (missing token)
+ if: env.QODANA_TOKEN == ''
+ run: echo "QODANA_TOKEN is not set; skipping Qodana scan."
+ - name: Run Entry Check
+ if: env.QODANA_TOKEN != ''
+ run: bash tools/ci/bin/run.sh qodana
- name: Upload SARIF To Code Scanning
- if: env.QODANA_TOKEN != '' && github.event_name != 'pull_request'
+ if: github.event_name != 'pull_request' && env.QODANA_TOKEN != ''
uses: github/codeql-action/upload-sarif@v3
with:
- sarif_file: qodana-results/qodana.sarif.json
-
- - name: Upload SARIF Artifact
- if: env.QODANA_TOKEN != ''
+ sarif_file: artifacts/ci/qodana/qodana.sarif.json
+ - name: Upload Artifact
+ if: always() && env.QODANA_TOKEN != ''
uses: actions/upload-artifact@v4
with:
- name: qodana-sarif
- path: qodana-results/qodana.sarif.json
+ name: ci-qodana
+ path: artifacts/ci/qodana/
if-no-files-found: error
-
- - name: Skip Message (No Token)
- if: env.QODANA_TOKEN == ''
- run: echo "QODANA_TOKEN not set; qodana scan skipped."
diff --git a/.gitignore b/.gitignore
index 9320573..a3c3aed 100644
--- a/.gitignore
+++ b/.gitignore
@@ -47,6 +47,10 @@ tools/*
!tools/check-quality.sh
!tools/run-coverage.sh
!tools/test-bdd-readable.sh
+!tools/ci/
+!tools/ci/**
+tools/ci/checks/**/bin/
+tools/ci/checks/**/obj/
!tools/versioning/
!tools/versioning/check-versioning.sh
!tools/versioning/labels.json
diff --git a/Directory.Build.props b/Directory.Build.props
index 0638759..1452dd9 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -1,11 +1,12 @@
- 4.0.0
- 4.0.0.0
- 4.0.0.0
- 4.0.0
+ 4.1.0
+ 4.1.0.0
+ 4.1.0.0
+ 4.1.0
true
true
+ true
diff --git a/Directory.Packages.props b/Directory.Packages.props
index 9a0a0b1..f165396 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -6,13 +6,14 @@
-
+
+
-
+
diff --git a/README.md b/README.md
index d845982..cc53d55 100644
--- a/README.md
+++ b/README.md
@@ -36,6 +36,7 @@ Die CI ist deterministisch und auditierbar aufgebaut. Alle Checks laufen in sepa
- Auto-Labeling & Auto-Versionierung: [docs/AUTO_LABELING_AND_VERSIONING.md](docs/AUTO_LABELING_AND_VERSIONING.md)
- BDD-Testkatalog und Testablauf: [docs/tests/README.md](docs/tests/README.md)
- Governance/Ownership: [docs/governance/LABELING_OWNERSHIP.md](docs/governance/LABELING_OWNERSHIP.md)
+- Qodana läuft als separater Workflow, schreibt Ergebnisse nach `artifacts/ci/qodana/` und veröffentlicht diese als Artefakt.
## 7. Runbook (reproduzierbar)
```bash
@@ -50,9 +51,8 @@ TEST_BDD_OUTPUT_DIR=artifacts/tests bash tools/test-bdd-readable.sh -- \
/p:Threshold=85%2c69 \
/p:ThresholdType=line%2cbranch \
/p:ThresholdStat=total
-bash tools/sync-portable-filetypedetection.sh
-bash tools/check-portable-filetypedetection.sh --clean
```
+Hinweis: Portable-Sync/Check-Tools sind aktuell nicht im Repository enthalten.
## 7.1 Versionierung (zentral)
- Zentrale Versionsquelle: `Directory.Build.props`.
@@ -66,7 +66,7 @@ bash tools/check-portable-filetypedetection.sh --clean
Im Root von `src/FileTypeDetection` liegen nur die Public APIs.
### 8.2 Portable
-Die portable Spiegelstruktur wird lokal über die Tools erzeugt und ist nicht Teil des veröffentlichten Repository-Inhalts.
+Die portable Spiegelstruktur ist nicht Teil des Repository-Inhalts; es gibt keine Sync/Check-Skripte im Repo.
### 8.3 Abstractions-Ordnerhierarchie
Die Modellschicht unter `src/FileTypeDetection/Abstractions` ist nach Verantwortlichkeiten getrennt:
diff --git a/docs/CI_PIPELINE.md b/docs/CI_PIPELINE.md
index 6e9839e..1fd8d1b 100644
--- a/docs/CI_PIPELINE.md
+++ b/docs/CI_PIPELINE.md
@@ -79,6 +79,18 @@ Für `pull_request` wird SARIF als Workflow-Artefakt veröffentlicht.
Code-Scanning-SARIF-Upload erfolgt nur auf non-PR-Runs, um PR-Noise zu vermeiden.
Profil-Hinweis: In `.qodana/profiles/fileclassifier.yaml` sind nur testpfad-spezifische Excludes für reine Redundanz-Inspections gesetzt (`tests/**`), Produktionscode bleibt unverändert streng.
+### 7.1 Qodana Ergebnisse & Artefakte
+- Wenn `QODANA_TOKEN` fehlt, wird Qodana im Workflow übersprungen.
+- Qodana schreibt Ergebnisse in `artifacts/ci/qodana/` (inkl. `qodana.sarif.json`).
+- Für `pull_request`-Runs wird das Verzeichnis als Artefakt veröffentlicht.
+- Für non-PR-Runs wird `artifacts/ci/qodana/qodana.sarif.json` in GitHub Code Scanning hochgeladen.
+- Artefakte:
+ - `artifacts/ci/qodana/qodana.sarif.json`
+ - Verzeichnis `artifacts/ci/qodana/`
+
+Branch-Protection-Hinweis:
+- Der Workflow-Status `qodana` muss in GitHub als Required Check konfiguriert werden, damit PR-Merges blockiert werden.
+
## 8. Lokale Reproduktion
```bash
node tools/versioning/test-compute-pr-labels.js
diff --git a/docs/DIN_SPECIFICATION_DE.md b/docs/DIN_SPECIFICATION_DE.md
index b603e32..1e63a78 100644
--- a/docs/DIN_SPECIFICATION_DE.md
+++ b/docs/DIN_SPECIFICATION_DE.md
@@ -48,11 +48,17 @@ Die technische Detailbeschreibung der öffentlichen Schnittstellen ist in `01_FU
## 7. Verifikation
Pflichtlauf für Freigabe:
```bash
+python3 tools/check-docs.py
dotnet restore FileClassifier.sln -v minimal
dotnet build FileClassifier.sln --no-restore -v minimal
-dotnet test tests/FileTypeDetectionLib.Tests/FileTypeDetectionLib.Tests.csproj --no-build -v minimal
-bash tools/sync-portable-filetypedetection.sh
-bash tools/sync-doc-conventions.sh
+TEST_BDD_OUTPUT_DIR=artifacts/tests bash tools/test-bdd-readable.sh -- \
+ /p:CollectCoverage=true \
+ /p:Include="[FileTypeDetectionLib]*" \
+ /p:CoverletOutputFormat=cobertura \
+ /p:CoverletOutput="$(pwd)/artifacts/coverage/coverage" \
+ /p:Threshold=85%2c69 \
+ /p:ThresholdType=line%2cbranch \
+ /p:ThresholdStat=total
```
## 8. Rückverfolgbarkeit
diff --git a/docs/governance/CI_PIPELINE.md b/docs/governance/CI_PIPELINE.md
new file mode 100644
index 0000000..cf07986
--- /dev/null
+++ b/docs/governance/CI_PIPELINE.md
@@ -0,0 +1,33 @@
+# CI Pipeline (SSOT)
+
+## Purpose
+Deterministic and auditable CI with contract-first artifacts and strict fail-closed execution.
+
+## Required Jobs
+- `preflight`
+- `build`
+- `security-nuget`
+- `tests-bdd-coverage`
+- `summary`
+- `qodana` (separate workflow)
+
+## Artifact Root (single SSOT)
+- `artifacts/ci//raw.log`
+- `artifacts/ci//summary.md`
+- `artifacts/ci//result.json`
+- `artifacts/ci/qodana/*.sarif`
+
+No alternative artifact roots are allowed.
+
+## Stage Order
+1. Preflight: governance-safe checks and policy guards.
+2. Build: restore + build with warnings as errors.
+3. Security: NuGet vulnerability/deprecation scan.
+4. Tests: BDD + coverage gate.
+5. Summary: aggregate and enforce artifact contract + schema validation.
+6. Qodana workflow: token/sarif contract and dead-code gate.
+
+## Workflow Constraints
+- Workflow YAML contains entry-calls only.
+- Check logic is implemented in `.NET` validators under `tools/ci/checks/`.
+- Shell scripts in `tools/ci/` handle orchestration and artifact handling only.
diff --git a/docs/governance/CI_POLICY.md b/docs/governance/CI_POLICY.md
new file mode 100644
index 0000000..db21966
--- /dev/null
+++ b/docs/governance/CI_POLICY.md
@@ -0,0 +1,43 @@
+# CI Policy (SSOT)
+
+## Scope
+This document is the single source of truth for CI rule IDs, severity handling, and exit code policy.
+
+## Global Rules
+- Fail-closed: no silent bypass paths.
+- No `continue-on-error: true` in workflow files.
+- No `|| true` on critical workflow paths.
+- No `set +e` without explicit allow-list entry.
+- Workflow YAML only calls entry scripts under `tools/ci/bin/`.
+
+## Result Contract
+All required checks MUST write:
+- `artifacts/ci//raw.log`
+- `artifacts/ci//summary.md`
+- `artifacts/ci//result.json`
+
+`result.json` must comply with `tools/ci/schema/result.schema.json`.
+
+## Rule Catalog
+- `CI-ARTIFACT-001` fail: required artifact missing.
+- `CI-SCHEMA-001` fail: `result.json` schema validation failed.
+- `CI-SHELL-001` fail: found `continue-on-error: true`.
+- `CI-SHELL-002` fail: found `|| true` in critical workflow path.
+- `CI-SHELL-003` fail: found `set +e` outside allow-list.
+- `CI-SHELL-004` fail: workflow `run: |` block exceeds configured max lines.
+- `CI-GRAPH-001` fail: required CI graph edge or job constraint violated.
+- `CI-QODANA-001` fail: `QODANA_TOKEN` missing.
+- `CI-QODANA-002` fail: expected SARIF missing.
+- `CI-QODANA-003` fail: SARIF invalid JSON.
+
+## Severity Rules
+- `warn`: visible, non-blocking.
+- `fail`: blocking, exit code non-zero.
+
+## Exit Code Matrix
+- `0`: success (`pass` or `warn`)
+- `1`: policy/contract/check failure (`fail`)
+- `2`: invalid invocation or missing prerequisites
+
+## set +e Allow-list
+No allow-list entries in Phase 1.
diff --git a/docs/versioning/CHANGELOG.md b/docs/versioning/CHANGELOG.md
index d587bc6..dc7ff1f 100644
--- a/docs/versioning/CHANGELOG.md
+++ b/docs/versioning/CHANGELOG.md
@@ -4,7 +4,7 @@ Alle Aenderungen werden hier technisch dokumentiert. Die Version selbst ist in
`Directory.Build.props` die SSOT.
## [Unreleased]
-- BREAKING: Version-Baseline auf `4.0.0` angehoben (Major-Bump durch oeffentliche API-/Struktur-Aenderungen im Branch).
+- Hinweis: Version-Baseline `4.0.0` ist bereits in `main` enthalten.
- Added:
- Changed:
- Fixed:
diff --git a/global.json b/global.json
new file mode 100644
index 0000000..a844f93
--- /dev/null
+++ b/global.json
@@ -0,0 +1,6 @@
+{
+ "sdk": {
+ "version": "10.0.102",
+ "rollForward": "latestPatch"
+ }
+}
diff --git a/src/FileClassifier.App/README.md b/src/FileClassifier.App/README.md
index 6061102..f707fdd 100644
--- a/src/FileClassifier.App/README.md
+++ b/src/FileClassifier.App/README.md
@@ -30,6 +30,6 @@ dotnet run --project src/FileClassifier.App -- ./tests/FileTypeDetectionLib.Test
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/src/FileClassifier.App/packages.lock.json b/src/FileClassifier.App/packages.lock.json
new file mode 100644
index 0000000..298f992
--- /dev/null
+++ b/src/FileClassifier.App/packages.lock.json
@@ -0,0 +1,56 @@
+{
+ "version": 2,
+ "dependencies": {
+ "net10.0": {
+ "MimeTypesMap": {
+ "type": "Transitive",
+ "resolved": "1.0.9",
+ "contentHash": "M0TuSCwL1a8QV0VKw8ysY4AIs6v/Aor3N7GXQeqgNlAvqjx9Kj9KxNd09Pg5RzpY1tCOU8mkrfYBi1Lxwj8quQ=="
+ },
+ "ZstdSharp.Port": {
+ "type": "Transitive",
+ "resolved": "0.8.4",
+ "contentHash": "eieSXq3kakCUXbgdxkKaRqWS6hF0KBJcqok9LlDCs60GOyrynLvPOcQ0pRw7shdPF7lh/VepJ9cP9n9HHc759g=="
+ },
+ "filetypedetectionlib": {
+ "type": "Project",
+ "dependencies": {
+ "Microsoft.IO.RecyclableMemoryStream": "[3.0.1, )",
+ "Mime": "[3.8.0, )",
+ "SharpCompress": "[0.39.0, )",
+ "System.IO.Hashing": "[10.0.2, )"
+ }
+ },
+ "Microsoft.IO.RecyclableMemoryStream": {
+ "type": "CentralTransitive",
+ "requested": "[3.0.1, )",
+ "resolved": "3.0.1",
+ "contentHash": "s/s20YTVY9r9TPfTrN5g8zPF1YhwxyqO6PxUkrYTGI2B+OGPe9AdajWZrLhFqXIvqIW23fnUE4+ztrUWNU1+9g=="
+ },
+ "Mime": {
+ "type": "CentralTransitive",
+ "requested": "[3.8.0, )",
+ "resolved": "3.8.0",
+ "contentHash": "SG8QHXjnyLoVeIOSw4ym7orS5LIRPBpzFQYfkgSqyAkeog+eZNMj32UOEO1SxLNBASxNPgVBIacxOOZsenBImg==",
+ "dependencies": {
+ "MimeTypesMap": "1.0.9"
+ }
+ },
+ "SharpCompress": {
+ "type": "CentralTransitive",
+ "requested": "[0.39.0, )",
+ "resolved": "0.39.0",
+ "contentHash": "0esqIUDlg68Z7+Weuge4QzEvNtawUO4obTJFL7xuf4DBHMxVRr+wbNgiX9arMrj3kGXQSvLe0zbZG3oxpkwJOA==",
+ "dependencies": {
+ "ZstdSharp.Port": "0.8.4"
+ }
+ },
+ "System.IO.Hashing": {
+ "type": "CentralTransitive",
+ "requested": "[10.0.2, )",
+ "resolved": "10.0.2",
+ "contentHash": "AKJknIFi9O3+rGExxTry188JPvUoZAPcCtS2qdqyFhIzsxQ1Ap94BeGDG0VzVEHakhmRxmJtVih6TsHoghIt/g=="
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/FileTypeDetection/Abstractions/Archive/README.md b/src/FileTypeDetection/Abstractions/Archive/README.md
index 5f73dbb..e35c736 100644
--- a/src/FileTypeDetection/Abstractions/Archive/README.md
+++ b/src/FileTypeDetection/Abstractions/Archive/README.md
@@ -22,6 +22,6 @@ Archiv-Eintragsmodell für sichere In-Memory-Extraktion.
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/src/FileTypeDetection/Abstractions/Detection/README.md b/src/FileTypeDetection/Abstractions/Detection/README.md
index 9d77557..3f90882 100644
--- a/src/FileTypeDetection/Abstractions/Detection/README.md
+++ b/src/FileTypeDetection/Abstractions/Detection/README.md
@@ -24,6 +24,6 @@ Detektions-Rückgabemodelle der Public API.
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/src/FileTypeDetection/Abstractions/Hashing/README.md b/src/FileTypeDetection/Abstractions/Hashing/README.md
index da125ab..c9dae8b 100644
--- a/src/FileTypeDetection/Abstractions/Hashing/README.md
+++ b/src/FileTypeDetection/Abstractions/Hashing/README.md
@@ -27,6 +27,6 @@ Deterministische Hash-Evidence-Modelle für Physical/Logical-Nachweise und Round
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/src/FileTypeDetection/Abstractions/README.md b/src/FileTypeDetection/Abstractions/README.md
index ff7893f..68688d7 100644
--- a/src/FileTypeDetection/Abstractions/README.md
+++ b/src/FileTypeDetection/Abstractions/README.md
@@ -44,6 +44,6 @@ Immutable Rückgabemodelle für stabile API-Verträge.
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/src/FileTypeDetection/Configuration/README.md b/src/FileTypeDetection/Configuration/README.md
index 13ce27f..580d9fb 100644
--- a/src/FileTypeDetection/Configuration/README.md
+++ b/src/FileTypeDetection/Configuration/README.md
@@ -51,6 +51,6 @@ flowchart LR
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/src/FileTypeDetection/Detection/FileTypeRegistry.vb b/src/FileTypeDetection/Detection/FileTypeRegistry.vb
index 801123a..e08677e 100644
--- a/src/FileTypeDetection/Detection/FileTypeRegistry.vb
+++ b/src/FileTypeDetection/Detection/FileTypeRegistry.vb
@@ -138,15 +138,18 @@ Namespace FileTypeDetection
Friend Shared Function DetectByMagic(header As Byte()) As FileKind
If header Is Nothing OrElse header.Length = 0 Then Return FileKind.Unknown
- Dim match = MagicRules.
- SelectMany(Function(rule) rule.Patterns.
- Select(Function(pattern) New With {.Rule = rule, .Pattern = pattern})).
- FirstOrDefault(Function(item)
- Dim segments = item.Pattern.Segments
- Return segments.All(Function(segment) HasSegment(header, segment))
- End Function)
-
- Return If(match Is Nothing, FileKind.Unknown, match.Rule.Kind)
+ For i = 0 To MagicRules.Length - 1
+ Dim rule = MagicRules(i)
+ Dim patterns = rule.Patterns
+ For j = 0 To patterns.Length - 1
+ Dim segments = patterns(j).Segments
+ If segments.All(Function(segment) HasSegment(header, segment)) Then
+ Return rule.Kind
+ End If
+ Next
+ Next
+
+ Return FileKind.Unknown
End Function
Friend Shared Function HasDirectHeaderDetection(kind As FileKind) As Boolean
diff --git a/src/FileTypeDetection/Detection/README.md b/src/FileTypeDetection/Detection/README.md
index d165feb..845787f 100644
--- a/src/FileTypeDetection/Detection/README.md
+++ b/src/FileTypeDetection/Detection/README.md
@@ -53,6 +53,6 @@ flowchart TD
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/src/FileTypeDetection/FileTypeDetector.vb b/src/FileTypeDetection/FileTypeDetector.vb
index ef90a5e..ffb4471 100644
--- a/src/FileTypeDetection/FileTypeDetector.vb
+++ b/src/FileTypeDetection/FileTypeDetector.vb
@@ -3,6 +3,7 @@ Option Explicit On
Imports System.IO
Imports System.Linq
+Imports System.Diagnostics.CodeAnalysis
Namespace FileTypeDetection
'''
@@ -140,6 +141,8 @@ Namespace FileTypeDetection
'''
''' Liefert ein detailliertes, auditierbares Detektionsergebnis inkl. Endungs-Policy.
'''
+ ' ReSharper disable once MemberCanBeMadeStatic.Global
+
Public Function DetectDetailed(path As String, verifyExtension As Boolean) As DetectionDetail
Dim opt = GetDefaultOptions()
Dim trace As DetectionTrace = DetectionTrace.Empty
@@ -194,7 +197,7 @@ Namespace FileTypeDetection
End Try
End Function
- Private Function DetectPathCore(path As String) As FileType
+ Private Shared Function DetectPathCore(path As String) As FileType
Dim opt = GetDefaultOptions()
Dim trace As DetectionTrace = DetectionTrace.Empty
Return DetectPathCoreWithTrace(path, opt, trace)
@@ -239,6 +242,8 @@ Namespace FileTypeDetection
'''
''' Zu pruefende Nutzdaten.
''' Erkannter Typ oder Unknown.
+ ' ReSharper disable once MemberCanBeMadeStatic.Global
+
Public Function Detect(data As Byte()) As FileType
Dim opt = GetDefaultOptions()
Return DetectInternalBytes(data, opt)
diff --git a/src/FileTypeDetection/Infrastructure/README.md b/src/FileTypeDetection/Infrastructure/README.md
index b4497e9..22a7f99 100644
--- a/src/FileTypeDetection/Infrastructure/README.md
+++ b/src/FileTypeDetection/Infrastructure/README.md
@@ -59,6 +59,6 @@ sequenceDiagram
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/src/FileTypeDetection/README.md b/src/FileTypeDetection/README.md
index 44fc215..ecd1b1b 100644
--- a/src/FileTypeDetection/README.md
+++ b/src/FileTypeDetection/README.md
@@ -123,12 +123,11 @@ flowchart LR
## 10. Nachweise
- Build: `dotnet build FileClassifier.sln --no-restore -v minimal`
-- Test: `dotnet test FileClassifier.sln --no-build -v minimal`
-- Portable Check: `bash tools/check-portable-filetypedetection.sh --clean`
+- Test: siehe Runbook in `README.md` (BDD + Coverage).
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/src/FileTypeDetection/packages.lock.json b/src/FileTypeDetection/packages.lock.json
new file mode 100644
index 0000000..4639acc
--- /dev/null
+++ b/src/FileTypeDetection/packages.lock.json
@@ -0,0 +1,47 @@
+{
+ "version": 2,
+ "dependencies": {
+ "net10.0": {
+ "Microsoft.IO.RecyclableMemoryStream": {
+ "type": "Direct",
+ "requested": "[3.0.1, )",
+ "resolved": "3.0.1",
+ "contentHash": "s/s20YTVY9r9TPfTrN5g8zPF1YhwxyqO6PxUkrYTGI2B+OGPe9AdajWZrLhFqXIvqIW23fnUE4+ztrUWNU1+9g=="
+ },
+ "Mime": {
+ "type": "Direct",
+ "requested": "[3.8.0, )",
+ "resolved": "3.8.0",
+ "contentHash": "SG8QHXjnyLoVeIOSw4ym7orS5LIRPBpzFQYfkgSqyAkeog+eZNMj32UOEO1SxLNBASxNPgVBIacxOOZsenBImg==",
+ "dependencies": {
+ "MimeTypesMap": "1.0.9"
+ }
+ },
+ "SharpCompress": {
+ "type": "Direct",
+ "requested": "[0.39.0, )",
+ "resolved": "0.39.0",
+ "contentHash": "0esqIUDlg68Z7+Weuge4QzEvNtawUO4obTJFL7xuf4DBHMxVRr+wbNgiX9arMrj3kGXQSvLe0zbZG3oxpkwJOA==",
+ "dependencies": {
+ "ZstdSharp.Port": "0.8.4"
+ }
+ },
+ "System.IO.Hashing": {
+ "type": "Direct",
+ "requested": "[10.0.2, )",
+ "resolved": "10.0.2",
+ "contentHash": "AKJknIFi9O3+rGExxTry188JPvUoZAPcCtS2qdqyFhIzsxQ1Ap94BeGDG0VzVEHakhmRxmJtVih6TsHoghIt/g=="
+ },
+ "MimeTypesMap": {
+ "type": "Transitive",
+ "resolved": "1.0.9",
+ "contentHash": "M0TuSCwL1a8QV0VKw8ysY4AIs6v/Aor3N7GXQeqgNlAvqjx9Kj9KxNd09Pg5RzpY1tCOU8mkrfYBi1Lxwj8quQ=="
+ },
+ "ZstdSharp.Port": {
+ "type": "Transitive",
+ "resolved": "0.8.4",
+ "contentHash": "eieSXq3kakCUXbgdxkKaRqWS6hF0KBJcqok9LlDCs60GOyrynLvPOcQ0pRw7shdPF7lh/VepJ9cP9n9HHc759g=="
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/README.md b/src/README.md
index 9fcaa8d..55e5e5e 100644
--- a/src/README.md
+++ b/src/README.md
@@ -32,11 +32,10 @@ Im Root von `FileTypeDetection` liegen nur:
Jeder versionierte Quellordner unter `src/*` besitzt eine eigene `README.md` mit Verantwortungen und Verweisen.
## Synchronisation
-- `bash tools/sync-portable-filetypedetection.sh`
-- `bash tools/sync-doc-conventions.sh`
+Derzeit keine Repo-internen Sync-Skripte (portable/doc conventions).
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/tests/FileTypeDetectionLib.Tests/Benchmarks/README.md b/tests/FileTypeDetectionLib.Tests/Benchmarks/README.md
index 5d37c4b..71fa038 100644
--- a/tests/FileTypeDetectionLib.Tests/Benchmarks/README.md
+++ b/tests/FileTypeDetectionLib.Tests/Benchmarks/README.md
@@ -23,6 +23,6 @@ Benchmark-Szenarien für relative Laufzeitentwicklung.
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/tests/FileTypeDetectionLib.Tests/Features/README.md b/tests/FileTypeDetectionLib.Tests/Features/README.md
index 1f1e810..c45a80d 100644
--- a/tests/FileTypeDetectionLib.Tests/Features/README.md
+++ b/tests/FileTypeDetectionLib.Tests/Features/README.md
@@ -33,6 +33,6 @@ Zentraler Einstieg für alle ausführbaren BDD-Feature-Dateien der Test-Suite.
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/tests/FileTypeDetectionLib.Tests/Integration/README.md b/tests/FileTypeDetectionLib.Tests/Integration/README.md
index cd3c4a0..9f0e2d2 100644
--- a/tests/FileTypeDetectionLib.Tests/Integration/README.md
+++ b/tests/FileTypeDetectionLib.Tests/Integration/README.md
@@ -13,6 +13,6 @@ Nachweise für deterministisches Verhalten über mehrere Containerformate und ec
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/tests/FileTypeDetectionLib.Tests/Property/README.md b/tests/FileTypeDetectionLib.Tests/Property/README.md
index 0569bbf..cd81ea5 100644
--- a/tests/FileTypeDetectionLib.Tests/Property/README.md
+++ b/tests/FileTypeDetectionLib.Tests/Property/README.md
@@ -27,6 +27,6 @@ Eigenschaftsbasierte Verifikation von Archiv-Limits, Options-Invarianten und Mat
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/tests/FileTypeDetectionLib.Tests/README.md b/tests/FileTypeDetectionLib.Tests/README.md
index a315f2f..15fd82d 100644
--- a/tests/FileTypeDetectionLib.Tests/README.md
+++ b/tests/FileTypeDetectionLib.Tests/README.md
@@ -114,6 +114,6 @@ Hinweis:
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/tests/FileTypeDetectionLib.Tests/Steps/README.md b/tests/FileTypeDetectionLib.Tests/Steps/README.md
index 6af69a5..4063feb 100644
--- a/tests/FileTypeDetectionLib.Tests/Steps/README.md
+++ b/tests/FileTypeDetectionLib.Tests/Steps/README.md
@@ -23,6 +23,6 @@ Bindet Gherkin-Schritte an konkrete Testlogik.
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/tests/FileTypeDetectionLib.Tests/Support/ArchiveEntryPayloadFactory.cs b/tests/FileTypeDetectionLib.Tests/Support/ArchiveEntryPayloadFactory.cs
index 54a9950..837595c 100644
--- a/tests/FileTypeDetectionLib.Tests/Support/ArchiveEntryPayloadFactory.cs
+++ b/tests/FileTypeDetectionLib.Tests/Support/ArchiveEntryPayloadFactory.cs
@@ -38,11 +38,6 @@ internal static byte[] CreateZipWithEntrySizes(params int[] entrySizes)
return ms.ToArray();
}
- internal static byte[] CreateNestedZip(int nestedZipBytes)
- {
- return CreateNestedZipWithInnerLength(nestedZipBytes).zipBytes;
- }
-
internal static (byte[] zipBytes, long innerUncompressedBytes) CreateNestedZipWithInnerLength(int nestedZipBytes)
{
var nestedContent = CreateZipWithEntries(1, Math.Max(1, nestedZipBytes));
@@ -122,4 +117,4 @@ private static byte[] CreatePayload(int size, byte value)
return data;
}
-}
\ No newline at end of file
+}
diff --git a/tests/FileTypeDetectionLib.Tests/Support/FixtureManifestCatalog.cs b/tests/FileTypeDetectionLib.Tests/Support/FixtureManifestCatalog.cs
index 1494879..63eeb00 100644
--- a/tests/FileTypeDetectionLib.Tests/Support/FixtureManifestCatalog.cs
+++ b/tests/FileTypeDetectionLib.Tests/Support/FixtureManifestCatalog.cs
@@ -23,8 +23,6 @@ private FixtureManifestCatalog(
_byFileName = byFileName;
}
- internal IReadOnlyCollection Entries => _byFixtureId.Values.ToList().AsReadOnly();
-
internal static FixtureManifestCatalog LoadAndValidate(string resourcesRoot)
{
if (string.IsNullOrWhiteSpace(resourcesRoot))
@@ -36,7 +34,7 @@ internal static FixtureManifestCatalog LoadAndValidate(string resourcesRoot)
var doc = JsonSerializer.Deserialize(
File.ReadAllText(manifestPath),
JsonOptions);
- if (doc is null || doc.Fixtures is null || doc.Fixtures.Count == 0)
+ if (doc?.Fixtures is not { Count: > 0 })
throw new InvalidOperationException("Fixture manifest is empty.");
var byFixtureId = new Dictionary(KeyComparer);
@@ -115,6 +113,18 @@ private static void ValidateEntryFields(FixtureManifestEntry entry)
if (string.IsNullOrWhiteSpace(entry.Sha256) || entry.Sha256.Length != 64)
throw new InvalidOperationException($"Fixture '{entry.FixtureId}' requires SHA-256 (64 hex chars).");
+
+ if (string.IsNullOrWhiteSpace(entry.SourceUrl))
+ throw new InvalidOperationException($"Fixture '{entry.FixtureId}' requires sourceUrl.");
+
+ if (string.IsNullOrWhiteSpace(entry.SourceRef))
+ throw new InvalidOperationException($"Fixture '{entry.FixtureId}' requires sourceRef.");
+
+ if (string.IsNullOrWhiteSpace(entry.Purpose))
+ throw new InvalidOperationException($"Fixture '{entry.FixtureId}' requires purpose.");
+
+ if (string.IsNullOrWhiteSpace(entry.SecurityNotes))
+ throw new InvalidOperationException($"Fixture '{entry.FixtureId}' requires securityNotes.");
}
private static string ComputeSha256(string path)
diff --git a/tests/FileTypeDetectionLib.Tests/Support/README.md b/tests/FileTypeDetectionLib.Tests/Support/README.md
index a7a7752..3971fa2 100644
--- a/tests/FileTypeDetectionLib.Tests/Support/README.md
+++ b/tests/FileTypeDetectionLib.Tests/Support/README.md
@@ -26,6 +26,6 @@ Gemeinsame Test-Helfer für stabilen, deterministischen Testbetrieb.
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/tests/FileTypeDetectionLib.Tests/Unit/ArchiveExtractorAdditionalUnitTests.cs b/tests/FileTypeDetectionLib.Tests/Unit/ArchiveExtractorAdditionalUnitTests.cs
index 4619356..f7796a5 100644
--- a/tests/FileTypeDetectionLib.Tests/Unit/ArchiveExtractorAdditionalUnitTests.cs
+++ b/tests/FileTypeDetectionLib.Tests/Unit/ArchiveExtractorAdditionalUnitTests.cs
@@ -253,7 +253,7 @@ public FakeEntry(string? relativePath = null, long? uncompressedSize = null, lon
public bool IsDirectory { get; }
public long? UncompressedSize { get; }
public long? CompressedSize { get; }
- public string LinkTarget { get; } = string.Empty;
+ public string LinkTarget => string.Empty;
public Stream OpenStream()
{
@@ -276,7 +276,7 @@ public NullStreamEntry(string? relativePath = null, long? uncompressedSize = nul
public bool IsDirectory { get; }
public long? UncompressedSize { get; }
public long? CompressedSize { get; }
- public string LinkTarget { get; } = string.Empty;
+ public string LinkTarget => string.Empty;
public Stream OpenStream()
{
diff --git a/tests/FileTypeDetectionLib.Tests/Unit/ArchiveExtractorReflectionUnitTests.cs b/tests/FileTypeDetectionLib.Tests/Unit/ArchiveExtractorReflectionUnitTests.cs
index c5e45ac..904ecfe 100644
--- a/tests/FileTypeDetectionLib.Tests/Unit/ArchiveExtractorReflectionUnitTests.cs
+++ b/tests/FileTypeDetectionLib.Tests/Unit/ArchiveExtractorReflectionUnitTests.cs
@@ -101,7 +101,7 @@ public FakeEntry(Func streamFactory, string? relativePath = null, long?
public bool IsDirectory { get; }
public long? UncompressedSize { get; }
public long? CompressedSize { get; }
- public string LinkTarget { get; } = string.Empty;
+ public string LinkTarget => string.Empty;
public Stream OpenStream()
{
diff --git a/tests/FileTypeDetectionLib.Tests/Unit/ArchiveInternalsPrivateBranchUnitTests.cs b/tests/FileTypeDetectionLib.Tests/Unit/ArchiveInternalsPrivateBranchUnitTests.cs
index 234d46a..3466917 100644
--- a/tests/FileTypeDetectionLib.Tests/Unit/ArchiveInternalsPrivateBranchUnitTests.cs
+++ b/tests/FileTypeDetectionLib.Tests/Unit/ArchiveInternalsPrivateBranchUnitTests.cs
@@ -176,7 +176,7 @@ public FakeEntry(long? uncompressedSize = null, long? compressedSize = null, boo
public bool IsDirectory { get; }
public long? UncompressedSize { get; }
public long? CompressedSize { get; }
- public string LinkTarget { get; } = string.Empty;
+ public string LinkTarget => string.Empty;
public Stream OpenStream()
{
diff --git a/tests/FileTypeDetectionLib.Tests/Unit/ArchiveManagedBackendUnitTests.cs b/tests/FileTypeDetectionLib.Tests/Unit/ArchiveManagedBackendUnitTests.cs
index 231a8a6..9506a91 100644
--- a/tests/FileTypeDetectionLib.Tests/Unit/ArchiveManagedBackendUnitTests.cs
+++ b/tests/FileTypeDetectionLib.Tests/Unit/ArchiveManagedBackendUnitTests.cs
@@ -30,20 +30,22 @@ public void Process_ExtractsEntryModels_ForZip()
var ok = backend.Process(stream, opt, 0, ArchiveContainerType.Zip, entry =>
{
- if (entry.RelativePath == "a.txt")
+ switch (entry.RelativePath)
{
- sawFile = true;
- Assert.False(entry.IsDirectory);
- Assert.Equal(3, entry.UncompressedSize);
- Assert.True(entry.CompressedSize.HasValue);
- using var s = entry.OpenStream();
- Assert.True(s.CanRead);
- }
-
- else if (entry.RelativePath == "dir/")
- {
- sawDir = true;
- Assert.True(entry.IsDirectory);
+ case "a.txt":
+ sawFile = true;
+ Assert.False(entry.IsDirectory);
+ Assert.Equal(3, entry.UncompressedSize);
+ Assert.True(entry.CompressedSize.HasValue);
+ using (var s = entry.OpenStream())
+ {
+ Assert.True(s.CanRead);
+ }
+ break;
+ case "dir/":
+ sawDir = true;
+ Assert.True(entry.IsDirectory);
+ break;
}
return true;
diff --git a/tests/FileTypeDetectionLib.Tests/Unit/README.md b/tests/FileTypeDetectionLib.Tests/Unit/README.md
index a1438e8..22244fc 100644
--- a/tests/FileTypeDetectionLib.Tests/Unit/README.md
+++ b/tests/FileTypeDetectionLib.Tests/Unit/README.md
@@ -76,6 +76,6 @@ Hinweis: Keine Coverage-Excludes (maximal strikt).
## Dokumentpflege-Checkliste
- [ ] Inhalt auf aktuellen Code-Stand geprüft.
-- [ ] Links und Anker mit `python3 tools/check-markdown-links.py` geprüft.
+- [ ] Links und Anker mit `python3 tools/check-docs.py` geprüft.
- [ ] Beispiele/Kommandos lokal verifiziert.
- [ ] Begriffe mit `docs/01_FUNCTIONS.md` abgeglichen.
diff --git a/tests/FileTypeDetectionLib.Tests/packages.lock.json b/tests/FileTypeDetectionLib.Tests/packages.lock.json
new file mode 100644
index 0000000..2654d65
--- /dev/null
+++ b/tests/FileTypeDetectionLib.Tests/packages.lock.json
@@ -0,0 +1,240 @@
+{
+ "version": 2,
+ "dependencies": {
+ "net10.0": {
+ "coverlet.collector": {
+ "type": "Direct",
+ "requested": "[6.0.4, )",
+ "resolved": "6.0.4",
+ "contentHash": "lkhqpF8Pu2Y7IiN7OntbsTtdbpR1syMsm2F3IgX6ootA4ffRqWL5jF7XipHuZQTdVuWG/gVAAcf8mjk8Tz0xPg=="
+ },
+ "coverlet.msbuild": {
+ "type": "Direct",
+ "requested": "[6.0.4, )",
+ "resolved": "6.0.4",
+ "contentHash": "Qa7Hg+wrOMDKpXVn2dw4Wlun490bIWsFW0fdNJQFJLZnbU27MCP0HJ2mPgS+3EQBQUb0zKlkwiQzP+j38Hc3Iw=="
+ },
+ "Microsoft.NET.Test.Sdk": {
+ "type": "Direct",
+ "requested": "[18.0.1, )",
+ "resolved": "18.0.1",
+ "contentHash": "WNpu6vI2rA0pXY4r7NKxCN16XRWl5uHu6qjuyVLoDo6oYEggIQefrMjkRuibQHm/NslIUNCcKftvoWAN80MSAg==",
+ "dependencies": {
+ "Microsoft.CodeCoverage": "18.0.1",
+ "Microsoft.TestPlatform.TestHost": "18.0.1"
+ }
+ },
+ "Reqnroll.Tools.MsBuild.Generation": {
+ "type": "Direct",
+ "requested": "[3.3.3, )",
+ "resolved": "3.3.3",
+ "contentHash": "hKh06b4dKTy8YzcyCmtH3pH6dPZ5v4xhHvZ6Rq4AlDQgzvqtkqcgtAMYJEjMtvnaNfJVQcg6pZSi9s2w/zX50g=="
+ },
+ "Reqnroll.xUnit": {
+ "type": "Direct",
+ "requested": "[3.3.3, )",
+ "resolved": "3.3.3",
+ "contentHash": "s9n/De/L9Di4s1dxgMEAWM9KgCGBWomqsEbzsBEn2U2ahnxnXyjXoVU8+tjOAGeLbythmXudbw+//pqzkrp2pg==",
+ "dependencies": {
+ "Reqnroll": "3.3.3",
+ "Reqnroll.Tools.MsBuild.Generation": "3.3.3",
+ "Xunit.SkippableFact": "1.4.13",
+ "xunit.core": "2.8.1"
+ }
+ },
+ "xunit": {
+ "type": "Direct",
+ "requested": "[2.9.3, )",
+ "resolved": "2.9.3",
+ "contentHash": "TlXQBinK35LpOPKHAqbLY4xlEen9TBafjs0V5KnA4wZsoQLQJiirCR4CbIXvOH8NzkW4YeJKP5P/Bnrodm0h9Q==",
+ "dependencies": {
+ "xunit.analyzers": "1.18.0",
+ "xunit.assert": "2.9.3",
+ "xunit.core": "[2.9.3]"
+ }
+ },
+ "xunit.runner.visualstudio": {
+ "type": "Direct",
+ "requested": "[3.1.5, )",
+ "resolved": "3.1.5",
+ "contentHash": "tKi7dSTwP4m5m9eXPM2Ime4Kn7xNf4x4zT9sdLO/G4hZVnQCRiMTWoSZqI/pYTVeI27oPPqHBKYI/DjJ9GsYgA=="
+ },
+ "Cucumber.CucumberExpressions": {
+ "type": "Transitive",
+ "resolved": "17.1.0",
+ "contentHash": "IZFDLLwrUCKuGDXQzmAWZa+kQFpVmVwWD8wEl0UWGOR83UQfzqg8hZeSSNUOXlW0Kh3vMO6kEm3FpNC4amKMaw=="
+ },
+ "Cucumber.HtmlFormatter": {
+ "type": "Transitive",
+ "resolved": "22.2.0",
+ "contentHash": "hlP9GGzRADDKh0/GEt2aWD9G08qEDD4z6TC6iqJt3xH8kiU3iAbg1rOCtorN13zNdaaVxdraHzznWTMT/DnM1g==",
+ "dependencies": {
+ "Cucumber.Messages": "30.1.0"
+ }
+ },
+ "Cucumber.Messages": {
+ "type": "Transitive",
+ "resolved": "30.1.0",
+ "contentHash": "HDfIzDd7JFDEwiNHpgOHBPEiyVjXQws+CEnVb+Lwiq58jHNA/xYwmTswbqknOxcAejCRFP1de/hHHw+ZuTzV6A=="
+ },
+ "Gherkin": {
+ "type": "Transitive",
+ "resolved": "35.0.0",
+ "contentHash": "ha+QNevQsXEESbzazUceHTCtKOjoBTBa8kLFdKpu6vSEbb3UMt3a55RG78tIoU/C4qzIqX3jQxaIdIcYGO+IWQ==",
+ "dependencies": {
+ "Cucumber.Messages": "29.0.1"
+ }
+ },
+ "Microsoft.Bcl.AsyncInterfaces": {
+ "type": "Transitive",
+ "resolved": "9.0.6",
+ "contentHash": "82rLw487j5jBXEi2r3WvA/cagOhcRREVRtet6izzjDMY+i392W5oNSN2KCtuIvlTpyMONEUD0MIlGAgDdsvQ/w=="
+ },
+ "Microsoft.CodeCoverage": {
+ "type": "Transitive",
+ "resolved": "18.0.1",
+ "contentHash": "O+utSr97NAJowIQT/OVp3Lh9QgW/wALVTP4RG1m2AfFP4IyJmJz0ZBmFJUsRQiAPgq6IRC0t8AAzsiPIsaUDEA=="
+ },
+ "Microsoft.Extensions.DependencyModel": {
+ "type": "Transitive",
+ "resolved": "8.0.2",
+ "contentHash": "mUBDZZRgZrSyFOsJ2qJJ9fXfqd/kXJwf3AiDoqLD9m6TjY5OO/vLNOb9fb4juC0487eq4hcGN/M2Rh/CKS7QYw=="
+ },
+ "Microsoft.TestPlatform.ObjectModel": {
+ "type": "Transitive",
+ "resolved": "18.0.1",
+ "contentHash": "qT/mwMcLF9BieRkzOBPL2qCopl8hQu6A1P7JWAoj/FMu5i9vds/7cjbJ/LLtaiwWevWLAeD5v5wjQJ/l6jvhWQ=="
+ },
+ "Microsoft.TestPlatform.TestHost": {
+ "type": "Transitive",
+ "resolved": "18.0.1",
+ "contentHash": "uDJKAEjFTaa2wHdWlfo6ektyoh+WD4/Eesrwb4FpBFKsLGehhACVnwwTI4qD3FrIlIEPlxdXg3SyrYRIcO+RRQ==",
+ "dependencies": {
+ "Microsoft.TestPlatform.ObjectModel": "18.0.1",
+ "Newtonsoft.Json": "13.0.3"
+ }
+ },
+ "MimeTypesMap": {
+ "type": "Transitive",
+ "resolved": "1.0.9",
+ "contentHash": "M0TuSCwL1a8QV0VKw8ysY4AIs6v/Aor3N7GXQeqgNlAvqjx9Kj9KxNd09Pg5RzpY1tCOU8mkrfYBi1Lxwj8quQ=="
+ },
+ "Newtonsoft.Json": {
+ "type": "Transitive",
+ "resolved": "13.0.3",
+ "contentHash": "HrC5BXdl00IP9zeV+0Z848QWPAoCr9P3bDEZguI+gkLcBKAOxix/tLEAAHC+UvDNPv4a2d18lOReHMOagPa+zQ=="
+ },
+ "Reqnroll": {
+ "type": "Transitive",
+ "resolved": "3.3.3",
+ "contentHash": "3f4zOQraEDna8x3yZ9sEuPz4eQ60XsVIyYCr9RnzgK9cfb6ykGxWxkYadj7tq3hP+Qy6ox3va8Ma2p18xzRlig==",
+ "dependencies": {
+ "Cucumber.CucumberExpressions": "17.1.0",
+ "Cucumber.HtmlFormatter": "22.2.0",
+ "Cucumber.Messages": "30.1.0",
+ "Gherkin": "35.0.0",
+ "Microsoft.Bcl.AsyncInterfaces": "9.0.6",
+ "Microsoft.Extensions.DependencyModel": "8.0.2"
+ }
+ },
+ "Validation": {
+ "type": "Transitive",
+ "resolved": "2.4.18",
+ "contentHash": "NfvWJ1QeuZ1FQCkqgXTu1cOkRkbNCfxs4Tat+abXLwom6OXbULVhRGp34BTvVB4XPxj6VIAl7KfLfStXMt/Ehw=="
+ },
+ "xunit.abstractions": {
+ "type": "Transitive",
+ "resolved": "2.0.3",
+ "contentHash": "pot1I4YOxlWjIb5jmwvvQNbTrZ3lJQ+jUGkGjWE3hEFM0l5gOnBWS+H3qsex68s5cO52g+44vpGzhAt+42vwKg=="
+ },
+ "xunit.analyzers": {
+ "type": "Transitive",
+ "resolved": "1.18.0",
+ "contentHash": "OtFMHN8yqIcYP9wcVIgJrq01AfTxijjAqVDy/WeQVSyrDC1RzBWeQPztL49DN2syXRah8TYnfvk035s7L95EZQ=="
+ },
+ "xunit.assert": {
+ "type": "Transitive",
+ "resolved": "2.9.3",
+ "contentHash": "/Kq28fCE7MjOV42YLVRAJzRF0WmEqsmflm0cfpMjGtzQ2lR5mYVj1/i0Y8uDAOLczkL3/jArrwehfMD0YogMAA=="
+ },
+ "xunit.core": {
+ "type": "Transitive",
+ "resolved": "2.9.3",
+ "contentHash": "BiAEvqGvyme19wE0wTKdADH+NloYqikiU0mcnmiNyXaF9HyHmE6sr/3DC5vnBkgsWaE6yPyWszKSPSApWdRVeQ==",
+ "dependencies": {
+ "xunit.extensibility.core": "[2.9.3]",
+ "xunit.extensibility.execution": "[2.9.3]"
+ }
+ },
+ "xunit.extensibility.core": {
+ "type": "Transitive",
+ "resolved": "2.9.3",
+ "contentHash": "kf3si0YTn2a8J8eZNb+zFpwfoyvIrQ7ivNk5ZYA5yuYk1bEtMe4DxJ2CF/qsRgmEnDr7MnW1mxylBaHTZ4qErA==",
+ "dependencies": {
+ "xunit.abstractions": "2.0.3"
+ }
+ },
+ "xunit.extensibility.execution": {
+ "type": "Transitive",
+ "resolved": "2.9.3",
+ "contentHash": "yMb6vMESlSrE3Wfj7V6cjQ3S4TXdXpRqYeNEI3zsX31uTsGMJjEw6oD5F5u1cHnMptjhEECnmZSsPxB6ChZHDQ==",
+ "dependencies": {
+ "xunit.extensibility.core": "[2.9.3]"
+ }
+ },
+ "Xunit.SkippableFact": {
+ "type": "Transitive",
+ "resolved": "1.4.13",
+ "contentHash": "IyzZNvJEtXGlXrzxDiSbtH5Lyxf4iJdRQADuyjGdDf00LjXRLJwIoezQNFhFGKTMtvk8IIgaSHxW4mAV4O7b8A==",
+ "dependencies": {
+ "Validation": "2.4.18",
+ "xunit.extensibility.execution": "2.4.0"
+ }
+ },
+ "ZstdSharp.Port": {
+ "type": "Transitive",
+ "resolved": "0.8.4",
+ "contentHash": "eieSXq3kakCUXbgdxkKaRqWS6hF0KBJcqok9LlDCs60GOyrynLvPOcQ0pRw7shdPF7lh/VepJ9cP9n9HHc759g=="
+ },
+ "filetypedetectionlib": {
+ "type": "Project",
+ "dependencies": {
+ "Microsoft.IO.RecyclableMemoryStream": "[3.0.1, )",
+ "Mime": "[3.8.0, )",
+ "SharpCompress": "[0.39.0, )",
+ "System.IO.Hashing": "[10.0.2, )"
+ }
+ },
+ "Microsoft.IO.RecyclableMemoryStream": {
+ "type": "CentralTransitive",
+ "requested": "[3.0.1, )",
+ "resolved": "3.0.1",
+ "contentHash": "s/s20YTVY9r9TPfTrN5g8zPF1YhwxyqO6PxUkrYTGI2B+OGPe9AdajWZrLhFqXIvqIW23fnUE4+ztrUWNU1+9g=="
+ },
+ "Mime": {
+ "type": "CentralTransitive",
+ "requested": "[3.8.0, )",
+ "resolved": "3.8.0",
+ "contentHash": "SG8QHXjnyLoVeIOSw4ym7orS5LIRPBpzFQYfkgSqyAkeog+eZNMj32UOEO1SxLNBASxNPgVBIacxOOZsenBImg==",
+ "dependencies": {
+ "MimeTypesMap": "1.0.9"
+ }
+ },
+ "SharpCompress": {
+ "type": "CentralTransitive",
+ "requested": "[0.39.0, )",
+ "resolved": "0.39.0",
+ "contentHash": "0esqIUDlg68Z7+Weuge4QzEvNtawUO4obTJFL7xuf4DBHMxVRr+wbNgiX9arMrj3kGXQSvLe0zbZG3oxpkwJOA==",
+ "dependencies": {
+ "ZstdSharp.Port": "0.8.4"
+ }
+ },
+ "System.IO.Hashing": {
+ "type": "CentralTransitive",
+ "requested": "[10.0.2, )",
+ "resolved": "10.0.2",
+ "contentHash": "AKJknIFi9O3+rGExxTry188JPvUoZAPcCtS2qdqyFhIzsxQ1Ap94BeGDG0VzVEHakhmRxmJtVih6TsHoghIt/g=="
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/tools/check-docs.py b/tools/check-docs.py
index b153db8..6110189 100755
--- a/tools/check-docs.py
+++ b/tools/check-docs.py
@@ -5,6 +5,8 @@
ROOT = Path(__file__).resolve().parents[1]
DOCS_DIR = ROOT / "docs"
+SRC_DIR = ROOT / "src"
+TESTS_DIR = ROOT / "tests"
LINK_PATTERN = re.compile(r"!?\[[^\]]*\]\(([^)]+)\)")
@@ -27,11 +29,17 @@ def resolve_target(base: Path, target: str) -> Path:
return (base / path_part).resolve()
+def collect_markdown_files() -> list[Path]:
+ files: list[Path] = [ROOT / "README.md"]
+ for directory in (DOCS_DIR, SRC_DIR, TESTS_DIR):
+ if directory.exists():
+ files.extend(directory.rglob("*.md"))
+ return files
+
+
def check_links() -> list[str]:
errors: list[str] = []
- files = [ROOT / "README.md"]
- if DOCS_DIR.exists():
- files.extend(DOCS_DIR.rglob("*.md"))
+ files = collect_markdown_files()
for md_file in files:
if not md_file.exists():
diff --git a/tools/ci/bin/assert_ci_graph.sh b/tools/ci/bin/assert_ci_graph.sh
new file mode 100755
index 0000000..ac988ab
--- /dev/null
+++ b/tools/ci/bin/assert_ci_graph.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+WORKFLOW_PATH="${1:-.github/workflows/ci.yml}"
+EXPECTED_PATH="${2:-tools/ci/policies/ci_graph_expected.json}"
+
+DOTNET_CMD=(dotnet "tools/ci/checks/CiGraphValidator/bin/Release/net10.0/CiGraphValidator.dll" "$WORKFLOW_PATH" "$EXPECTED_PATH")
+"${DOTNET_CMD[@]}"
diff --git a/tools/ci/bin/run.sh b/tools/ci/bin/run.sh
new file mode 100755
index 0000000..9366cff
--- /dev/null
+++ b/tools/ci/bin/run.sh
@@ -0,0 +1,166 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_DIR="$(cd -- "${SCRIPT_DIR}/../../.." && pwd)"
+
+# shellcheck source=tools/ci/lib/result.sh
+source "${ROOT_DIR}/tools/ci/lib/result.sh"
+
+CHECK_ID="${1:-}"
+if [[ -z "$CHECK_ID" ]]; then
+ echo "Usage: tools/ci/bin/run.sh " >&2
+ exit 2
+fi
+
+OUT_DIR="artifacts/ci/${CHECK_ID}"
+ci_result_init "$CHECK_ID" "$OUT_DIR"
+
+finalized=0
+finalize_and_exit() {
+ if [[ "$finalized" -eq 0 ]]; then
+ ci_result_finalize
+ finalized=1
+ fi
+}
+trap finalize_and_exit EXIT
+trap 'ci_result_append_summary "Check '\''${CHECK_ID}'\'' failed."' ERR
+
+run_or_fail() {
+ local rule_id="$1"
+ local message="$2"
+ shift 2
+ if ! ci_run_capture "$message" "$@"; then
+ ci_result_add_violation "$rule_id" "fail" "$message" "$CI_RAW_LOG"
+ return 1
+ fi
+}
+
+build_validators() {
+ run_or_fail "CI-SETUP-001" "Restore validator projects (locked mode)" dotnet restore --locked-mode "${ROOT_DIR}/tools/ci/checks/ResultSchemaValidator/ResultSchemaValidator.csproj"
+ run_or_fail "CI-SETUP-001" "Build ResultSchemaValidator" dotnet build -c Release "${ROOT_DIR}/tools/ci/checks/ResultSchemaValidator/ResultSchemaValidator.csproj"
+ run_or_fail "CI-SETUP-001" "Restore CiGraphValidator (locked mode)" dotnet restore --locked-mode "${ROOT_DIR}/tools/ci/checks/CiGraphValidator/CiGraphValidator.csproj"
+ run_or_fail "CI-SETUP-001" "Build CiGraphValidator" dotnet build -c Release "${ROOT_DIR}/tools/ci/checks/CiGraphValidator/CiGraphValidator.csproj"
+ run_or_fail "CI-SETUP-001" "Restore QodanaContractValidator (locked mode)" dotnet restore --locked-mode "${ROOT_DIR}/tools/ci/checks/QodanaContractValidator/QodanaContractValidator.csproj"
+ run_or_fail "CI-SETUP-001" "Build QodanaContractValidator" dotnet build -c Release "${ROOT_DIR}/tools/ci/checks/QodanaContractValidator/QodanaContractValidator.csproj"
+}
+
+run_preflight() {
+ build_validators
+ run_or_fail "CI-PREFLIGHT-001" "Label engine tests" node "${ROOT_DIR}/tools/versioning/test-compute-pr-labels.js"
+ run_or_fail "CI-PREFLIGHT-001" "Docs check" python3 "${ROOT_DIR}/tools/check-docs.py"
+ run_or_fail "CI-PREFLIGHT-001" "Versioning guard" bash "${ROOT_DIR}/tools/versioning/check-versioning.sh"
+ run_or_fail "CI-PREFLIGHT-001" "Format check" dotnet format "${ROOT_DIR}/FileClassifier.sln" --verify-no-changes
+ run_or_fail "CI-PREFLIGHT-001" "Policy shell safety" bash "${ROOT_DIR}/tools/ci/policies/policy_shell_safety.sh"
+ run_or_fail "CI-GRAPH-001" "CI graph assertion" bash "${ROOT_DIR}/tools/ci/bin/assert_ci_graph.sh"
+
+ ci_result_append_summary "Preflight checks completed."
+}
+
+run_build() {
+ run_or_fail "CI-BUILD-001" "Restore solution (locked mode)" dotnet restore --locked-mode "${ROOT_DIR}/FileClassifier.sln" -v minimal
+ run_or_fail "CI-BUILD-001" "Build solution" dotnet build "${ROOT_DIR}/FileClassifier.sln" --no-restore -warnaserror -v minimal
+ ci_result_append_summary "Build completed."
+}
+
+run_security_nuget() {
+ run_or_fail "CI-SECURITY-002" "Restore solution (locked mode)" dotnet restore --locked-mode "${ROOT_DIR}/FileClassifier.sln" -v minimal
+ run_or_fail "CI-SECURITY-002" "NuGet vulnerability scan" dotnet list "${ROOT_DIR}/FileClassifier.sln" package --vulnerable --include-transitive
+
+ if rg -n "\\b(High|Critical)\\b" "$CI_RAW_LOG" >/dev/null; then
+ ci_result_add_violation "CI-SECURITY-001" "fail" "High/Critical NuGet vulnerabilities detected" "$CI_RAW_LOG"
+ ci_result_append_summary "High/Critical NuGet vulnerabilities detected."
+ return 1
+ fi
+
+ run_or_fail "CI-SECURITY-002" "NuGet deprecated packages" dotnet list "${ROOT_DIR}/FileClassifier.sln" package --deprecated
+ ci_result_append_summary "NuGet security checks completed."
+}
+
+run_tests_bdd_coverage() {
+ local tests_dir="${OUT_DIR}/tests"
+ local coverage_dir="${OUT_DIR}/coverage"
+ mkdir -p "$tests_dir" "$coverage_dir"
+
+ run_or_fail "CI-TEST-001" "Restore solution (locked mode)" dotnet restore --locked-mode "${ROOT_DIR}/FileClassifier.sln" -v minimal
+ run_or_fail "CI-TEST-001" "BDD tests + coverage" env TEST_BDD_OUTPUT_DIR="$tests_dir" bash "${ROOT_DIR}/tools/test-bdd-readable.sh" -- /p:CollectCoverage=true /p:Include="[FileTypeDetectionLib]*" /p:CoverletOutputFormat=cobertura /p:CoverletOutput="${coverage_dir}/coverage" /p:Threshold=85%2c69 /p:ThresholdType=line%2cbranch /p:ThresholdStat=total
+ ci_result_append_summary "BDD coverage checks completed."
+}
+
+run_summary() {
+ build_validators
+ run_or_fail "CI-ARTIFACT-001" "Artifact contract policy" bash "${ROOT_DIR}/tools/ci/policies/policy_artifact_contract.sh" preflight build security-nuget tests-bdd-coverage
+ ci_result_append_summary "Summary contract checks completed."
+}
+
+run_pr_labeling() {
+ run_or_fail "CI-LABEL-001" "Fetch PR head" git fetch --no-tags --prune origin "${GITHUB_SHA}"
+
+ local pr_number
+ pr_number="$(jq -r '.pull_request.number // empty' "${GITHUB_EVENT_PATH}")"
+ if [[ -z "$pr_number" ]]; then
+ ci_result_add_violation "CI-LABEL-001" "fail" "pull_request number missing in event payload" "$GITHUB_EVENT_PATH"
+ return 1
+ fi
+
+ local head_sha
+ head_sha="$(jq -r '.pull_request.head.sha // empty' "${GITHUB_EVENT_PATH}")"
+
+ run_or_fail "CI-LABEL-001" "Derive versioning decision" env BASE_REF=origin/main HEAD_REF="$head_sha" "${ROOT_DIR}/tools/versioning/check-versioning.sh"
+
+ local files_json labels_json pr_title
+ files_json="$(gh api "repos/${GITHUB_REPOSITORY}/pulls/${pr_number}/files" --paginate --jq '[.[].filename]')"
+ labels_json="$(gh api "repos/${GITHUB_REPOSITORY}/issues/${pr_number}" --jq '[.labels[].name]')"
+ pr_title="$(gh api "repos/${GITHUB_REPOSITORY}/pulls/${pr_number}" --jq '.title')"
+
+ mkdir -p "${OUT_DIR}"
+ FILES_JSON="$files_json" EXISTING_LABELS_JSON="$labels_json" PR_TITLE="$pr_title" VERSION_REQUIRED="none" VERSION_ACTUAL="none" VERSION_REASON="contract-run" VERSION_GUARD_EXIT="0" OUTPUT_PATH="${OUT_DIR}/decision.json" \
+ ci_run_capture "Compute deterministic labels" node "${ROOT_DIR}/tools/versioning/compute-pr-labels.js"
+
+ run_or_fail "CI-LABEL-001" "Validate label decision" node "${ROOT_DIR}/tools/versioning/validate-label-decision.js" "${ROOT_DIR}/tools/versioning/label-schema.json" "${OUT_DIR}/decision.json"
+ ci_result_append_summary "PR labeling checks completed."
+}
+
+run_qodana_contract() {
+ build_validators
+ local sarif_path="${OUT_DIR}/qodana.sarif.json"
+ if ! ci_run_capture "Qodana contract validator" dotnet "${ROOT_DIR}/tools/ci/checks/QodanaContractValidator/bin/Release/net10.0/QodanaContractValidator.dll" --sarif "$sarif_path"; then
+ if rg -q "CI-QODANA-001" "$CI_RAW_LOG"; then
+ ci_result_add_violation "CI-QODANA-001" "fail" "QODANA_TOKEN missing" "$CI_RAW_LOG"
+ elif rg -q "CI-QODANA-002" "$CI_RAW_LOG"; then
+ ci_result_add_violation "CI-QODANA-002" "fail" "Qodana SARIF missing" "$CI_RAW_LOG"
+ elif rg -q "CI-QODANA-003" "$CI_RAW_LOG"; then
+ ci_result_add_violation "CI-QODANA-003" "fail" "Qodana SARIF invalid" "$CI_RAW_LOG"
+ else
+ ci_result_add_violation "CI-QODANA-001" "fail" "Qodana contract validation failed" "$CI_RAW_LOG"
+ fi
+ return 1
+ fi
+ ci_result_append_summary "Qodana contract validation completed."
+}
+
+main() {
+ cd "$ROOT_DIR"
+ case "$CHECK_ID" in
+ preflight) run_preflight ;;
+ build) run_build ;;
+ security-nuget) run_security_nuget ;;
+ tests-bdd-coverage) run_tests_bdd_coverage ;;
+ summary) run_summary ;;
+ pr-labeling) run_pr_labeling ;;
+ qodana) run_qodana_contract ;;
+ *)
+ ci_result_add_violation "CI-RUNNER-001" "fail" "unknown check_id '${CHECK_ID}'" "tools/ci/bin/run.sh"
+ return 2
+ ;;
+ esac
+}
+
+main
+
+if [[ "$(cat "$CI_STATUS_FILE")" == "fail" ]]; then
+ ci_result_append_summary "Check '${CHECK_ID}' failed."
+ exit 1
+fi
+
+ci_result_append_summary "Check '${CHECK_ID}' passed."
diff --git a/tools/ci/checks/CiGraphValidator/CiGraphValidator.csproj b/tools/ci/checks/CiGraphValidator/CiGraphValidator.csproj
new file mode 100644
index 0000000..de913c2
--- /dev/null
+++ b/tools/ci/checks/CiGraphValidator/CiGraphValidator.csproj
@@ -0,0 +1,11 @@
+
+
+ Exe
+ net10.0
+ enable
+ enable
+
+
+
+
+
diff --git a/tools/ci/checks/CiGraphValidator/Program.cs b/tools/ci/checks/CiGraphValidator/Program.cs
new file mode 100644
index 0000000..36a1126
--- /dev/null
+++ b/tools/ci/checks/CiGraphValidator/Program.cs
@@ -0,0 +1,92 @@
+using System.Text.Json;
+using YamlDotNet.RepresentationModel;
+
+if (args.Length != 2)
+{
+ Console.Error.WriteLine("Usage: CiGraphValidator ");
+ return 2;
+}
+
+var workflowPath = args[0];
+var expectedPath = args[1];
+if (!File.Exists(workflowPath) || !File.Exists(expectedPath))
+{
+ Console.Error.WriteLine("Workflow or expected file missing.");
+ return 1;
+}
+
+var expected = JsonDocument.Parse(File.ReadAllText(expectedPath)).RootElement;
+var allowedJobs = expected.GetProperty("allowed_jobs").EnumerateArray().Select(x => x.GetString()!).ToHashSet();
+var requiredEdges = expected.GetProperty("required_needs_edges").EnumerateArray().Select(x =>
+ (From: x.GetProperty("from").GetString()!, To: x.GetProperty("to").GetString()!)).ToList();
+
+var yaml = new YamlStream();
+yaml.Load(new StringReader(File.ReadAllText(workflowPath)));
+var root = (YamlMappingNode)yaml.Documents[0].RootNode;
+if (!root.Children.TryGetValue(new YamlScalarNode("jobs"), out var jobsNodeRaw))
+{
+ Console.Error.WriteLine("No jobs node in workflow.");
+ return 1;
+}
+
+var jobsNode = (YamlMappingNode)jobsNodeRaw;
+var jobNeeds = new Dictionary>(StringComparer.Ordinal);
+
+foreach (var child in jobsNode.Children)
+{
+ var jobId = ((YamlScalarNode)child.Key).Value ?? string.Empty;
+ var map = child.Value as YamlMappingNode;
+ var needs = new HashSet(StringComparer.Ordinal);
+ if (map != null && map.Children.TryGetValue(new YamlScalarNode("needs"), out var needsNode))
+ {
+ switch (needsNode)
+ {
+ case YamlScalarNode scalar when !string.IsNullOrWhiteSpace(scalar.Value):
+ needs.Add(scalar.Value!);
+ break;
+ case YamlSequenceNode seq:
+ foreach (var item in seq.Children.OfType())
+ {
+ if (!string.IsNullOrWhiteSpace(item.Value)) needs.Add(item.Value!);
+ }
+ break;
+ }
+ }
+
+ jobNeeds[jobId] = needs;
+}
+
+var errors = new List();
+
+foreach (var allowed in allowedJobs)
+{
+ if (!jobNeeds.ContainsKey(allowed))
+ {
+ errors.Add($"Missing allowed job: {allowed}");
+ }
+}
+
+foreach (var present in jobNeeds.Keys)
+{
+ if (!allowedJobs.Contains(present))
+ {
+ errors.Add($"Unexpected job found: {present}");
+ }
+}
+
+foreach (var edge in requiredEdges)
+{
+ if (!jobNeeds.TryGetValue(edge.From, out var needs) || !needs.Contains(edge.To))
+ {
+ errors.Add($"Missing required edge: {edge.From} -> {edge.To}");
+ }
+}
+
+if (errors.Count > 0)
+{
+ foreach (var e in errors) Console.Error.WriteLine(e);
+ return 1;
+}
+
+Console.WriteLine("CI graph validation passed.");
+return 0;
diff --git a/tools/ci/checks/CiGraphValidator/packages.lock.json b/tools/ci/checks/CiGraphValidator/packages.lock.json
new file mode 100644
index 0000000..a795303
--- /dev/null
+++ b/tools/ci/checks/CiGraphValidator/packages.lock.json
@@ -0,0 +1,13 @@
+{
+ "version": 2,
+ "dependencies": {
+ "net10.0": {
+ "YamlDotNet": {
+ "type": "Direct",
+ "requested": "[16.3.0, )",
+ "resolved": "16.3.0",
+ "contentHash": "SgMOdxbz8X65z8hraIs6hOEdnkH6hESTAIUa7viEngHOYaH+6q5XJmwr1+yb9vJpNQ19hCQY69xbFsLtXpobQA=="
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/tools/ci/checks/QodanaContractValidator/Program.cs b/tools/ci/checks/QodanaContractValidator/Program.cs
new file mode 100644
index 0000000..84eb895
--- /dev/null
+++ b/tools/ci/checks/QodanaContractValidator/Program.cs
@@ -0,0 +1,48 @@
+using System.Text.Json;
+
+var argsList = args.ToList();
+string? sarifPath = null;
+for (var i = 0; i < argsList.Count; i++)
+{
+ if (argsList[i] == "--sarif" && i + 1 < argsList.Count)
+ {
+ sarifPath = argsList[++i];
+ }
+}
+
+if (string.IsNullOrWhiteSpace(sarifPath))
+{
+ Console.Error.WriteLine("Usage: QodanaContractValidator --sarif ");
+ return 2;
+}
+
+var token = Environment.GetEnvironmentVariable("QODANA_TOKEN");
+if (string.IsNullOrWhiteSpace(token))
+{
+ Console.Error.WriteLine("CI-QODANA-001: QODANA_TOKEN missing");
+ return 1;
+}
+
+if (!File.Exists(sarifPath))
+{
+ Console.Error.WriteLine($"CI-QODANA-002: SARIF missing at {sarifPath}");
+ return 1;
+}
+
+try
+{
+ using var doc = JsonDocument.Parse(File.ReadAllText(sarifPath));
+ if (!doc.RootElement.TryGetProperty("runs", out var runs) || runs.ValueKind != JsonValueKind.Array)
+ {
+ Console.Error.WriteLine("CI-QODANA-003: SARIF missing runs[] array");
+ return 1;
+ }
+}
+catch (Exception ex)
+{
+ Console.Error.WriteLine($"CI-QODANA-003: SARIF invalid JSON: {ex.Message}");
+ return 1;
+}
+
+Console.WriteLine("Qodana contract validation passed.");
+return 0;
diff --git a/tools/ci/checks/QodanaContractValidator/QodanaContractValidator.csproj b/tools/ci/checks/QodanaContractValidator/QodanaContractValidator.csproj
new file mode 100644
index 0000000..238f157
--- /dev/null
+++ b/tools/ci/checks/QodanaContractValidator/QodanaContractValidator.csproj
@@ -0,0 +1,8 @@
+
+
+ Exe
+ net10.0
+ enable
+ enable
+
+
diff --git a/tools/ci/checks/QodanaContractValidator/packages.lock.json b/tools/ci/checks/QodanaContractValidator/packages.lock.json
new file mode 100644
index 0000000..6afd678
--- /dev/null
+++ b/tools/ci/checks/QodanaContractValidator/packages.lock.json
@@ -0,0 +1,6 @@
+{
+ "version": 2,
+ "dependencies": {
+ "net10.0": {}
+ }
+}
\ No newline at end of file
diff --git a/tools/ci/checks/ResultSchemaValidator/Program.cs b/tools/ci/checks/ResultSchemaValidator/Program.cs
new file mode 100644
index 0000000..e11c63f
--- /dev/null
+++ b/tools/ci/checks/ResultSchemaValidator/Program.cs
@@ -0,0 +1,230 @@
+using System.Globalization;
+using System.Text.Json;
+using System.Text.RegularExpressions;
+
+var argsList = args.ToList();
+string? schemaPath = null;
+string? resultPath = null;
+for (var i = 0; i < argsList.Count; i++)
+{
+ if (argsList[i] == "--schema" && i + 1 < argsList.Count)
+ {
+ schemaPath = argsList[++i];
+ continue;
+ }
+
+ if (argsList[i] == "--result" && i + 1 < argsList.Count)
+ {
+ resultPath = argsList[++i];
+ }
+}
+
+if (string.IsNullOrWhiteSpace(schemaPath) || string.IsNullOrWhiteSpace(resultPath))
+{
+ Console.Error.WriteLine("Usage: ResultSchemaValidator --schema --result ");
+ return 2;
+}
+
+if (!File.Exists(schemaPath))
+{
+ Console.Error.WriteLine($"Schema file missing: {schemaPath}");
+ return 1;
+}
+
+if (!File.Exists(resultPath))
+{
+ Console.Error.WriteLine($"Result file missing: {resultPath}");
+ return 1;
+}
+
+JsonDocument schemaDoc;
+try
+{
+ schemaDoc = JsonDocument.Parse(File.ReadAllText(schemaPath));
+}
+catch (Exception ex)
+{
+ Console.Error.WriteLine($"Schema JSON parse failed: {ex.Message}");
+ return 1;
+}
+
+var schemaRoot = schemaDoc.RootElement;
+var requiredProperties = new List();
+if (schemaRoot.TryGetProperty("required", out var requiredEl) && requiredEl.ValueKind == JsonValueKind.Array)
+{
+ foreach (var item in requiredEl.EnumerateArray())
+ {
+ var value = item.GetString();
+ if (!string.IsNullOrWhiteSpace(value))
+ {
+ requiredProperties.Add(value);
+ }
+ }
+}
+
+if (requiredProperties.Count == 0)
+{
+ Console.Error.WriteLine("Schema does not define required properties");
+ return 1;
+}
+
+int expectedSchemaVersion = 1;
+var allowedStatuses = new HashSet(StringComparer.Ordinal);
+if (schemaRoot.TryGetProperty("properties", out var propsEl) && propsEl.ValueKind == JsonValueKind.Object)
+{
+ if (propsEl.TryGetProperty("schema_version", out var schemaVersionEl) &&
+ schemaVersionEl.TryGetProperty("const", out var schemaVersionConst) &&
+ schemaVersionConst.ValueKind == JsonValueKind.Number &&
+ schemaVersionConst.TryGetInt32(out var parsedSchemaVersion))
+ {
+ expectedSchemaVersion = parsedSchemaVersion;
+ }
+
+ if (propsEl.TryGetProperty("status", out var statusSchemaEl) &&
+ statusSchemaEl.TryGetProperty("enum", out var statusEnumEl) &&
+ statusEnumEl.ValueKind == JsonValueKind.Array)
+ {
+ foreach (var item in statusEnumEl.EnumerateArray())
+ {
+ var status = item.GetString();
+ if (!string.IsNullOrWhiteSpace(status))
+ {
+ allowedStatuses.Add(status);
+ }
+ }
+ }
+}
+
+if (allowedStatuses.Count == 0)
+{
+ allowedStatuses.UnionWith(new[] { "pass", "warn", "fail" });
+}
+
+try
+{
+ using var doc = JsonDocument.Parse(File.ReadAllText(resultPath));
+ var root = doc.RootElement;
+ var errors = new List();
+
+ void RequireProperty(string name)
+ {
+ if (!root.TryGetProperty(name, out _))
+ {
+ errors.Add($"Missing property: {name}");
+ }
+ }
+
+ foreach (var property in requiredProperties)
+ {
+ RequireProperty(property);
+ }
+
+ if (root.TryGetProperty("schema_version", out var schemaVersion))
+ {
+ if (schemaVersion.ValueKind != JsonValueKind.Number ||
+ !schemaVersion.TryGetInt32(out var schemaInt) ||
+ schemaInt != expectedSchemaVersion)
+ {
+ errors.Add($"schema_version must be integer {expectedSchemaVersion}");
+ }
+ }
+
+ if (root.TryGetProperty("status", out var statusEl))
+ {
+ var status = statusEl.GetString();
+ if (status is null || !allowedStatuses.Contains(status))
+ {
+ errors.Add($"status must be one of {string.Join("|", allowedStatuses.OrderBy(s => s, StringComparer.Ordinal))}");
+ }
+ }
+
+ if (root.TryGetProperty("timing", out var timingEl))
+ {
+ if (timingEl.ValueKind != JsonValueKind.Object)
+ {
+ errors.Add("timing must be an object");
+ }
+ else
+ {
+ if (!timingEl.TryGetProperty("started_at", out var startedAt) || !IsIsoUtc(startedAt.GetString()))
+ {
+ errors.Add("timing.started_at must be ISO-8601 UTC");
+ }
+
+ if (!timingEl.TryGetProperty("finished_at", out var finishedAt) || !IsIsoUtc(finishedAt.GetString()))
+ {
+ errors.Add("timing.finished_at must be ISO-8601 UTC");
+ }
+
+ if (!timingEl.TryGetProperty("duration_ms", out var durationMs) || durationMs.ValueKind != JsonValueKind.Number || !durationMs.TryGetInt64(out var ms) || ms < 0)
+ {
+ errors.Add("timing.duration_ms must be non-negative integer");
+ }
+ }
+ }
+
+ if (root.TryGetProperty("rule_violations", out var violationsEl))
+ {
+ if (violationsEl.ValueKind != JsonValueKind.Array)
+ {
+ errors.Add("rule_violations must be an array");
+ }
+ else
+ {
+ foreach (var item in violationsEl.EnumerateArray())
+ {
+ if (!item.TryGetProperty("rule_id", out var ruleIdEl) || !Regex.IsMatch(ruleIdEl.GetString() ?? string.Empty, "^CI-[A-Z0-9_-]+-[0-9]{3}$"))
+ {
+ errors.Add("rule_violations[].rule_id invalid");
+ }
+
+ if (!item.TryGetProperty("severity", out var severityEl) || severityEl.GetString() is not ("warn" or "fail"))
+ {
+ errors.Add("rule_violations[].severity invalid");
+ }
+
+ if (!item.TryGetProperty("message", out var messageEl) || string.IsNullOrWhiteSpace(messageEl.GetString()))
+ {
+ errors.Add("rule_violations[].message missing");
+ }
+
+ if (!item.TryGetProperty("evidence_paths", out var evidenceEl) || evidenceEl.ValueKind != JsonValueKind.Array)
+ {
+ errors.Add("rule_violations[].evidence_paths must be array");
+ }
+ else if (severityEl.GetString() == "fail" && evidenceEl.GetArrayLength() < 1)
+ {
+ errors.Add("rule_violations[].evidence_paths must contain at least one item for fail severity");
+ }
+ }
+ }
+ }
+
+ if (errors.Count > 0)
+ {
+ foreach (var err in errors)
+ {
+ Console.Error.WriteLine(err);
+ }
+
+ return 1;
+ }
+
+ Console.WriteLine($"Result schema validation passed: {resultPath}");
+ return 0;
+}
+catch (Exception ex)
+{
+ Console.Error.WriteLine($"Result JSON parse failed: {ex.Message}");
+ return 1;
+}
+
+static bool IsIsoUtc(string? value)
+{
+ if (string.IsNullOrWhiteSpace(value) || !value.EndsWith("Z", StringComparison.Ordinal))
+ {
+ return false;
+ }
+
+ return DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out _);
+}
diff --git a/tools/ci/checks/ResultSchemaValidator/ResultSchemaValidator.csproj b/tools/ci/checks/ResultSchemaValidator/ResultSchemaValidator.csproj
new file mode 100644
index 0000000..238f157
--- /dev/null
+++ b/tools/ci/checks/ResultSchemaValidator/ResultSchemaValidator.csproj
@@ -0,0 +1,8 @@
+
+
+ Exe
+ net10.0
+ enable
+ enable
+
+
diff --git a/tools/ci/checks/ResultSchemaValidator/packages.lock.json b/tools/ci/checks/ResultSchemaValidator/packages.lock.json
new file mode 100644
index 0000000..6afd678
--- /dev/null
+++ b/tools/ci/checks/ResultSchemaValidator/packages.lock.json
@@ -0,0 +1,6 @@
+{
+ "version": 2,
+ "dependencies": {
+ "net10.0": {}
+ }
+}
\ No newline at end of file
diff --git a/tools/ci/lib/log.sh b/tools/ci/lib/log.sh
new file mode 100755
index 0000000..3b3875c
--- /dev/null
+++ b/tools/ci/lib/log.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+ci_now_utc() {
+ date -u +"%Y-%m-%dT%H:%M:%SZ"
+}
+
+ci_now_ms() {
+ local ms
+ ms=$(date -u +%s%3N 2>/dev/null || true)
+ if [[ -n "$ms" && "$ms" =~ ^[0-9]+$ ]]; then
+ printf '%s\n' "$ms"
+ return 0
+ fi
+ printf '%s000\n' "$(date -u +%s)"
+}
+
+ci_log() {
+ local level="$1"
+ shift
+ printf '[%s] [%s] %s\n' "$(ci_now_utc)" "$level" "$*"
+}
+
+ci_info() { ci_log INFO "$@"; }
+ci_warn() { ci_log WARN "$@"; }
+ci_error() { ci_log ERROR "$@" >&2; }
diff --git a/tools/ci/lib/result.sh b/tools/ci/lib/result.sh
new file mode 100755
index 0000000..8951b0b
--- /dev/null
+++ b/tools/ci/lib/result.sh
@@ -0,0 +1,116 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+# shellcheck source=tools/ci/lib/log.sh
+source "$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)/log.sh"
+
+ci_result_init() {
+ local check_id="$1"
+ local out_dir="$2"
+
+ export CI_CHECK_ID="$check_id"
+ export CI_OUT_DIR="$out_dir"
+ export CI_RAW_LOG="$out_dir/raw.log"
+ export CI_SUMMARY_MD="$out_dir/summary.md"
+ export CI_RESULT_JSON="$out_dir/result.json"
+ export CI_VIOLATIONS_NDJSON="$out_dir/.violations.ndjson"
+ export CI_EVIDENCE_NDJSON="$out_dir/.evidence.ndjson"
+ export CI_STATUS_FILE="$out_dir/.status"
+ export CI_START_MS
+ export CI_START_AT
+
+ mkdir -p "$out_dir"
+ : > "$CI_RAW_LOG"
+ : > "$CI_SUMMARY_MD"
+ : > "$CI_VIOLATIONS_NDJSON"
+ : > "$CI_EVIDENCE_NDJSON"
+ printf 'pass' > "$CI_STATUS_FILE"
+
+ CI_START_MS="$(ci_now_ms)"
+ CI_START_AT="$(ci_now_utc)"
+}
+
+ci_result_append_summary() {
+ printf '%s\n' "$*" >> "$CI_SUMMARY_MD"
+}
+
+ci_result_add_evidence() {
+ local evidence_path="$1"
+ jq -cn --arg p "$evidence_path" '$p' >> "$CI_EVIDENCE_NDJSON"
+}
+
+ci_result_add_violation() {
+ local rule_id="$1"
+ local severity="$2"
+ local message="$3"
+ shift 3
+ local evidence_paths=("$@")
+
+ if [[ "$severity" == "fail" ]]; then
+ printf 'fail' > "$CI_STATUS_FILE"
+ elif [[ "$(cat "$CI_STATUS_FILE")" == "pass" ]]; then
+ printf 'warn' > "$CI_STATUS_FILE"
+ fi
+
+ local ev_json
+ ev_json=$(printf '%s\n' "${evidence_paths[@]}" | jq -R . | jq -s .)
+
+ jq -cn \
+ --arg rule_id "$rule_id" \
+ --arg severity "$severity" \
+ --arg message "$message" \
+ --argjson evidence_paths "$ev_json" \
+ '{rule_id:$rule_id,severity:$severity,message:$message,evidence_paths:$evidence_paths}' >> "$CI_VIOLATIONS_NDJSON"
+
+ local p
+ for p in "${evidence_paths[@]}"; do
+ ci_result_add_evidence "$p"
+ done
+}
+
+ci_result_finalize() {
+ local finished_ms finished_at duration_ms status
+ finished_ms="$(ci_now_ms)"
+ finished_at="$(ci_now_utc)"
+ status="$(cat "$CI_STATUS_FILE")"
+ duration_ms=$((finished_ms - CI_START_MS))
+
+ local violations_json evidence_json artifacts_json
+ violations_json=$(jq -s . "$CI_VIOLATIONS_NDJSON")
+ evidence_json=$(jq -s 'unique' "$CI_EVIDENCE_NDJSON")
+ artifacts_json=$(jq -cn --arg raw "$CI_RAW_LOG" --arg summary "$CI_SUMMARY_MD" --arg result "$CI_RESULT_JSON" '[ $raw, $summary, $result ]')
+
+ jq -cn \
+ --arg check_id "$CI_CHECK_ID" \
+ --arg status "$status" \
+ --arg started_at "$CI_START_AT" \
+ --arg finished_at "$finished_at" \
+ --argjson duration_ms "$duration_ms" \
+ --argjson rule_violations "$violations_json" \
+ --argjson evidence_paths "$evidence_json" \
+ --argjson artifacts "$artifacts_json" \
+ '{
+ schema_version: 1,
+ check_id: $check_id,
+ status: $status,
+ rule_violations: $rule_violations,
+ evidence_paths: $evidence_paths,
+ artifacts: $artifacts,
+ timing: {
+ started_at: $started_at,
+ finished_at: $finished_at,
+ duration_ms: $duration_ms
+ }
+ }' > "$CI_RESULT_JSON"
+}
+
+ci_run_capture() {
+ local description="$1"
+ shift
+
+ ci_info "$description"
+ {
+ printf '$ %s\n' "$*"
+ "$@"
+ } >> "$CI_RAW_LOG" 2>&1
+}
diff --git a/tools/ci/policies/ci_graph_expected.json b/tools/ci/policies/ci_graph_expected.json
new file mode 100644
index 0000000..d726f4b
--- /dev/null
+++ b/tools/ci/policies/ci_graph_expected.json
@@ -0,0 +1,17 @@
+{
+ "allowed_jobs": [
+ "pr-labeling",
+ "preflight",
+ "build",
+ "security-nuget",
+ "tests-bdd-coverage",
+ "summary"
+ ],
+ "required_needs_edges": [
+ {"from": "build", "to": "preflight"},
+ {"from": "security-nuget", "to": "build"},
+ {"from": "tests-bdd-coverage", "to": "build"},
+ {"from": "summary", "to": "security-nuget"},
+ {"from": "summary", "to": "tests-bdd-coverage"}
+ ]
+}
diff --git a/tools/ci/policies/policy_artifact_contract.sh b/tools/ci/policies/policy_artifact_contract.sh
new file mode 100755
index 0000000..ab516e3
--- /dev/null
+++ b/tools/ci/policies/policy_artifact_contract.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+# shellcheck source=tools/ci/lib/result.sh
+source "$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")/.." && pwd)/lib/result.sh"
+
+if [[ $# -lt 1 ]]; then
+ echo "Usage: policy_artifact_contract.sh [ ...]" >&2
+ exit 2
+fi
+
+has_failures=0
+
+for check_id in "$@"; do
+ check_dir="artifacts/ci/${check_id}"
+ for req in raw.log summary.md result.json; do
+ if [[ ! -f "${check_dir}/${req}" ]]; then
+ ci_result_add_violation "CI-ARTIFACT-001" "fail" "missing required artifact ${check_dir}/${req}" "${check_dir}/${req}"
+ has_failures=1
+ continue
+ fi
+ done
+
+ if [[ -f "${check_dir}/result.json" ]]; then
+ if ! dotnet tools/ci/checks/ResultSchemaValidator/bin/Release/net10.0/ResultSchemaValidator.dll --schema tools/ci/schema/result.schema.json --result "${check_dir}/result.json" >> "$CI_RAW_LOG" 2>&1; then
+ ci_result_add_violation "CI-SCHEMA-001" "fail" "result.json schema validation failed for ${check_id}" "${check_dir}/result.json"
+ has_failures=1
+ fi
+ fi
+done
+
+if [[ "$has_failures" -eq 1 ]]; then
+ exit 1
+fi
diff --git a/tools/ci/policies/policy_shell_safety.sh b/tools/ci/policies/policy_shell_safety.sh
new file mode 100755
index 0000000..398850b
--- /dev/null
+++ b/tools/ci/policies/policy_shell_safety.sh
@@ -0,0 +1,86 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+# shellcheck source=tools/ci/lib/result.sh
+source "$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")/.." && pwd)/lib/result.sh"
+
+MAX_INLINE_RUN_LINES="${MAX_INLINE_RUN_LINES:-5}"
+
+findings=0
+
+while IFS=: read -r file line _; do
+ [[ -z "$file" ]] && continue
+ ci_result_add_violation "CI-SHELL-001" "fail" "continue-on-error true is forbidden" "${file}:${line}"
+ findings=$((findings + 1))
+done < <(rg -n "continue-on-error:\s*true" .github/workflows || true)
+
+while IFS=: read -r file line _; do
+ [[ -z "$file" ]] && continue
+ ci_result_add_violation "CI-SHELL-002" "fail" "'|| true' is forbidden on critical workflow paths" "${file}:${line}"
+ findings=$((findings + 1))
+done < <(rg -n "\|\|\s*true" .github/workflows || true)
+
+while IFS=: read -r file line _; do
+ [[ -z "$file" ]] && continue
+ ci_result_add_violation "CI-SHELL-003" "fail" "'set +e' is forbidden outside documented allow-list" "${file}:${line}"
+ findings=$((findings + 1))
+done < <(rg -n "^[[:space:]]*set[[:space:]]+\\+e([[:space:]]|$)" .github/workflows tools/ci || true)
+
+while IFS=: read -r file line count; do
+ [[ -z "$file" ]] && continue
+ ci_result_add_violation "CI-SHELL-004" "fail" "workflow run block exceeds max lines (${MAX_INLINE_RUN_LINES})" "${file}:${line}" "${file}:${count}"
+ findings=$((findings + 1))
+done < <(awk -v max="$MAX_INLINE_RUN_LINES" '
+ function lead_spaces(s, i,c) {
+ c = 0
+ for (i = 1; i <= length(s); i++) {
+ if (substr(s, i, 1) == " ") c++
+ else break
+ }
+ return c
+ }
+ BEGIN {inrun=0;count=0;start=0;run_indent=0;prev_file=""}
+ FNR == 1 {
+ if (NR != 1 && inrun == 1 && count > max) {
+ printf "%s:%d:%d\n", prev_file, start, count
+ }
+ inrun=0
+ count=0
+ start=0
+ run_indent=0
+ prev_file=FILENAME
+ }
+ {
+ prev_file=FILENAME
+ if ($0 ~ /^[[:space:]]*run:[[:space:]]*\|[[:space:]]*$/) {
+ inrun=1
+ count=0
+ start=NR
+ run_indent=lead_spaces($0)
+ next
+ }
+ if (inrun==1) {
+ curr_indent=lead_spaces($0)
+ if ($0 !~ /^[[:space:]]*$/ && curr_indent <= run_indent) {
+ if (count > max) {
+ printf "%s:%d:%d\n", FILENAME, start, count
+ }
+ inrun=0
+ } else {
+ count++
+ }
+ }
+ }
+ END {
+ if (inrun==1 && count > max) {
+ printf "%s:%d:%d\n", prev_file, start, count
+ }
+ }
+' .github/workflows/*.yml)
+
+if [[ "$findings" -eq 0 ]]; then
+ ci_result_append_summary "Shell safety policy passed."
+else
+ ci_result_append_summary "Shell safety policy violations: $findings"
+ exit 1
+fi
diff --git a/tools/ci/schema/result.schema.json b/tools/ci/schema/result.schema.json
new file mode 100644
index 0000000..be1cabe
--- /dev/null
+++ b/tools/ci/schema/result.schema.json
@@ -0,0 +1,82 @@
+{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "tools/ci/schema/result.schema.json",
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "schema_version",
+ "check_id",
+ "status",
+ "rule_violations",
+ "evidence_paths",
+ "artifacts",
+ "timing"
+ ],
+ "properties": {
+ "schema_version": {
+ "type": "integer",
+ "const": 1
+ },
+ "check_id": {
+ "type": "string",
+ "minLength": 1
+ },
+ "status": {
+ "type": "string",
+ "enum": ["pass", "warn", "fail"]
+ },
+ "rule_violations": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["rule_id", "severity", "message", "evidence_paths"],
+ "properties": {
+ "rule_id": {
+ "type": "string",
+ "pattern": "^CI-[A-Z0-9_-]+-[0-9]{3}$"
+ },
+ "severity": {
+ "type": "string",
+ "enum": ["warn", "fail"]
+ },
+ "message": {
+ "type": "string",
+ "minLength": 1
+ },
+ "evidence_paths": {
+ "type": "array",
+ "items": { "type": "string", "minLength": 1 }
+ }
+ }
+ }
+ },
+ "evidence_paths": {
+ "type": "array",
+ "items": { "type": "string", "minLength": 1 }
+ },
+ "artifacts": {
+ "type": "array",
+ "items": { "type": "string", "minLength": 1 }
+ },
+ "timing": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["started_at", "finished_at", "duration_ms"],
+ "properties": {
+ "started_at": {
+ "type": "string",
+ "pattern": "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(?:\\.\\d+)?Z$"
+ },
+ "finished_at": {
+ "type": "string",
+ "pattern": "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(?:\\.\\d+)?Z$"
+ },
+ "duration_ms": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ }
+ }
+}
diff --git a/tools/versioning/check-versioning.sh b/tools/versioning/check-versioning.sh
index 078a9e8..078d74c 100755
--- a/tools/versioning/check-versioning.sh
+++ b/tools/versioning/check-versioning.sh
@@ -120,14 +120,16 @@ if [[ "${MODE}" == "required" ]]; then
exit 0
fi
-# Determine base version from latest tag or base file
+# Determine base version deterministically from base ref file first.
+# Use tags only as a fallback for repositories that don't carry Directory.Build.props in base history.
base_version=""
-latest_tag=$(git tag -l 'v[0-9]*' --sort=-v:refname | head -n1)
-if [[ -n "${latest_tag}" ]]; then
- base_version="${latest_tag#v}"
-else
- if git show "${BASE}:Directory.Build.props" >/dev/null 2>&1; then
- base_version=$(git show "${BASE}:Directory.Build.props" | sed -n 's/.*\(.*\)<\/Version>.*/\1/p' | head -n1)
+if git show "${BASE}:Directory.Build.props" >/dev/null 2>&1; then
+ base_version=$(git show "${BASE}:Directory.Build.props" | sed -n 's/.*\(.*\)<\/Version>.*/\1/p' | head -n1)
+fi
+if [[ -z "${base_version}" ]]; then
+ latest_tag=$(git tag -l 'v[0-9]*' --sort=-v:refname | head -n1)
+ if [[ -n "${latest_tag}" ]]; then
+ base_version="${latest_tag#v}"
fi
fi