diff --git a/.env.example b/.env.example
index 9128c4e01..970f4cd10 100644
--- a/.env.example
+++ b/.env.example
@@ -58,7 +58,7 @@ SES_FROM=noreply@example.com
# Optional: Set APP_BASE_URL to override the defaults:
# - Local IDE: Defaults to http://localhost:4200
# - Local Docker: Defaults to http://localhost
-# - AWS Fargate: Set in task definition (https://goodone.ch)
+# - AWS Fargate: Set in task definition (https://GoodOne.ch)
# APP_BASE_URL=http://localhost:4200
# JWT Secret for local development
diff --git a/.github/workflows/guardrails.yml b/.github/workflows/guardrails.yml
new file mode 100644
index 000000000..e782a22ba
--- /dev/null
+++ b/.github/workflows/guardrails.yml
@@ -0,0 +1,34 @@
+name: Junie Guardrails
+
+on:
+ pull_request:
+ branches: [ main ]
+ push:
+ branches: [ main ]
+
+jobs:
+ validate:
+ name: Run Guardrails Validator
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install dependencies
+ run: pip install pyyaml
+
+ - name: Run Validator with Autofix
+ run: python scripts/task-governance/guardrails-validator.py --autofix
+
+ - name: Check for changes after autofix
+ run: |
+ if [ -n "$(git status --porcelain)" ]; then
+ echo "Autofix applied changes. Please review and commit locally."
+ git diff
+ exit 1
+ fi
diff --git a/.github/workflows/task-contract-lint.yml b/.github/workflows/task-contract-lint.yml
new file mode 100644
index 000000000..611b197c5
--- /dev/null
+++ b/.github/workflows/task-contract-lint.yml
@@ -0,0 +1,33 @@
+name: Task Contract Lint
+
+on:
+ pull_request:
+ paths:
+ - 'doc/knowledge/junie-tasks/**'
+ - 'scripts/task-governance/**'
+ - '.github/workflows/task-contract-lint.yml'
+
+jobs:
+ lint-task-contracts:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - uses: actions/setup-python@v5
+ with:
+ python-version: '3.11'
+
+ - name: Get changed files
+ id: changed-files
+ uses: tj-actions/changed-files@v44
+ with:
+ files: doc/knowledge/junie-tasks/**
+ json: true
+
+ - name: Run task contract linter
+ if: steps.changed-files.outputs.any_changed == 'true'
+ run: |
+ echo '${{ steps.changed-files.outputs.all_changed_files }}' | jq -r '.[]' > files_to_lint.txt
+ xargs -d '\n' -r python scripts/task-governance/lint_task_contracts.py < files_to_lint.txt
diff --git a/.github/workflows/validate-tasks.yml b/.github/workflows/validate-tasks.yml
index 331f50f53..de0622e9f 100644
--- a/.github/workflows/validate-tasks.yml
+++ b/.github/workflows/validate-tasks.yml
@@ -21,4 +21,4 @@ jobs:
run: pip install pyyaml
- name: Validate task files
- run: python scripts/validate_tasks.py doc/knowledge/junie-tasks
\ No newline at end of file
+ run: python scripts/task-governance/validate_tasks.py doc/knowledge/junie-tasks
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index bccd88b74..6df3587c5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2472,3 +2472,12 @@
/frontend/node/npx
/frontend/node/npx.cmd
/.ai/mcp/mcp.json
+/doc/knowledge/junie-tasks/backlog/
+/logs/ai-traces/
+/backend/logs/ai-traces/
+/tmp_repo/
+/sonar-export-new/issues/
+/sonar/sonar-export/issues/
+/sonar-export/issues/
+/sonar-export-fresh/issues/
+/test-results/
diff --git a/.idea/compiler.xml b/.idea/compiler.xml
index 8c6dc5196..c61dbc6f0 100644
--- a/.idea/compiler.xml
+++ b/.idea/compiler.xml
@@ -8,9 +8,9 @@
-
+
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
index 5ace414d8..dcb6b8c4c 100644
--- a/.idea/vcs.xml
+++ b/.idea/vcs.xml
@@ -1,6 +1,6 @@
-
+
diff --git a/.junie/guidelines.md b/.junie/guidelines.md
index e5c6b7394..32324a180 100644
--- a/.junie/guidelines.md
+++ b/.junie/guidelines.md
@@ -5,8 +5,37 @@ This document outlines the best practices and standards for the GoodOne project.
## General Principles
- **Modern Standards**: Use the latest stable versions of frameworks (Angular 21+, Spring Boot 4+). NEVER use deprecated methods, features, or syntax in any language (e.g., avoid `*ngIf` and `*ngFor` in Angular, use modern control flow instead).
- **Consistency**: Follow existing naming conventions and project structure.
-- **Clean Code**: Remove all unused imports, variables, and commented-out code blocks. Every test case MUST have at least one explicit assertion (e.g., `expect`, `assert`, `assertEquals`). Avoid empty catch blocks; at least log the exception or add a comment explaining why it's ignored.
-- **Method Complexity**: Keep methods small and focused (Single Responsibility Principle). Avoid "Brain Methods" with high cyclomatic complexity (e.g., > 10).
+- **Clean Code**: Remove all unused imports (including for TypeScript/Angular), variables, fields, parameters, and commented-out code blocks. Every test case MUST have at least one explicit assertion (e.g., `expect`, `assert`, `assertEquals`). Avoid empty catch blocks; at least log the exception or add a comment explaining why it's ignored. NEVER append empty trailing lines to files; all files MUST end with a single newline character.
+- **Complexity Guardrails**:
+ - **Cognitive Complexity**: Max 15 per method (Sonar java:S3776).
+ - **Nesting Depth**: Max 3 levels of indentation (if, for, while, try). Use early returns (guard clauses) to reduce nesting.
+ - **Method Length**: Aim for < 30 lines. Methods exceeding 50 lines MUST be refactored into smaller, focused private methods.
+ - **Parameters**: Max 7 parameters (Sonar java:S107). Use DTOs or Value Objects for more.
+ - **Boolean Expressions**: Avoid complex conditions with more than 3 operators (`&&`, `||`). Break them down into descriptive boolean variables or helper methods.
+ - **Switch/If-Else Chains**: Long chains of `if-else if` or `switch` (e.g., > 5 cases) should be refactored using a Map-based lookup, Strategy pattern, or Polymorphism.
+- **Modern Java Standards (Java 21)**:
+ - Use `Stream.toList()` instead of `.collect(Collectors.toList())` for unmodifiable lists.
+ - Use Pattern Matching for `instanceof` (e.g., `if (obj instanceof String s)`).
+ - Use Text Blocks (`"""..."""`) for multiline strings (e.g., SQL, JSON, HTML, or large text reports) instead of string concatenation.
+ - Use `.formatted()` for string interpolation in text blocks or literals.
+ - **Logging**: Use SLF4J `Logger` for all logging. NEVER use `System.out.println`, `System.err.println`, or `Throwable.printStackTrace()`. Use placeholders (e.g., `logger.info("User {} logged in", userId)`) instead of string concatenation.
+ - **Exceptions**: Avoid throwing generic exceptions like `RuntimeException`, `Exception`, or `Throwable`. Use specific, descriptive exceptions. NEVER ignore caught exceptions; log them or explain why they are ignored.
+ - **Modifiers**: Follow the standard Java modifier order: `public protected private abstract default static final transient volatile synchronized native strictfp`.
+- **Clean Logic**:
+ - Avoid nested ternary operators.
+ - Merge nested `if` statements into a single `if` with a combined condition (`&&`) when they have no `else` and no other code in between.
+ - Prefer method references (e.g., `Objects::nonNull`) over lambda expressions (e.g., `x -> x != null`) when the lambda only calls an existing method.
+ - Simplify boolean returns (e.g., `return condition;` instead of `if (condition) return true; else return false;`).
+ - Avoid using Java restricted identifiers (e.g., `record`, `yield`, `var`, `sealed`, `permits`) as variable or parameter names.
+ - Remove redundant type casts.
+ - Iterate over `entrySet()` instead of `keySet()` when both keys and values are required.
+ - **Constants**: Avoid duplicating string literals; use `private static final` constants (UPPER_SNAKE_CASE) for repeated values. Avoid "magic numbers"; use descriptive constants instead.
+ - **Hardcoded Values**: Avoid hardcoding URIs, file paths, or configuration values. Use `@Value` or `@ConfigurationProperties` to inject them.
+ - Remove unused private methods, fields, and parameters.
+- **Modern Frontend Standards**:
+ - Use `readonly` for TypeScript properties that are only set in the constructor or never reassigned.
+ - Use `@use` instead of `@import` in Sass/CSS files.
+ - Avoid deprecated APIs; always prefer the modern equivalent (e.g., modern Angular control flow).
- **Communication**: If there are doubts about the implementation or if better ways are identified, stop the implementation and ask the user before continuing.
- **RBAC Synchronization**: Frontend and backend Role-Based Access Control (RBAC) MUST always be in sync. Every protected UI route MUST have a corresponding protected REST endpoint with the same security policy (e.g., `ROLE_ADMIN`). This ensures "Defense in Depth" and prevents security bypasses via direct API access. This is a CRITICAL security requirement.
- **Centralized Versioning**:
@@ -14,7 +43,7 @@ This document outlines the best practices and standards for the GoodOne project.
- All modules (Backend, Frontend, Android, Test Client) must share the same version.
- Use `.\scripts\sync-version.ps1` to propagate version changes from the root `pom.xml` to other files (package.json, build.gradle, deployment scripts, and documentation).
- **Build Integrity**: Ensure the project builds successfully (`mvn clean install`) before submitting changes.
-- **Post‑Refactoring Build**: After every refactoring (no matter how small), the application MUST build successfully. Verify locally with:
+- **Post-Refactoring Build**: After every refactoring (no matter how small), the application MUST build successfully. Verify locally with:
- Backend+Frontend: `mvn clean install -DskipTests`
- Frontend only: `mvn clean install -pl frontend -DskipTests`
Any refactoring is incomplete until these builds pass.
@@ -37,6 +66,8 @@ This document outlines the best practices and standards for the GoodOne project.
- No dev-only features are exposed.
- Application can be demoed repeatedly without manual fixes.
- Playwright UX guardrails are successful (`npx playwright test e2e/ux-guardrails.spec.ts`).
+ - **Task Log Update**: The relevant task `.md` file MUST be updated with a log entry and testing instructions.
+ - **Basic Regression**: All relevant unit and E2E tests MUST pass before reporting "DONE".
- **Docker First**: Ensure all changes are compatible with the Docker-based deployment.
- **Language**: Always communicate in English for all interactions, thoughts, and documentation, unless explicitly requested otherwise by the user.
- **Translations**: Always provide translations for both supported languages (English `en.json` and German `de-ch.json`) when adding or modifying UI text. The `ch` part of the `de-ch` locale MUST be respected: never use the letter 'ß' (Eszett) in any German translations (e.g. use 'ss' instead).
@@ -62,6 +93,7 @@ This document outlines the best practices and standards for the GoodOne project.
6. `## Verification`
7. `## Links`
8. `## Notes (optional)`
+ 9. `## Acceptance Confirmation`
- **YAML Frontmatter**: Must include `key`, `title`, `taskset`, `priority`, `status`, `created`, `updated`, and `iterations`.
- `status` must be one of: `TODO`, `IN_PROGRESS`, `DONE`, `BLOCKED`.
- `priority` must be one of: `P0`, `P1`, `P2`.
@@ -78,16 +110,20 @@ This document outlines the best practices and standards for the GoodOne project.
- Never invent new metadata fields or move metadata into the markdown body.
- Never add emojis or decorative formatting.
- If an instruction conflicts with this guideline, refuse and explain why.
+ - **Task Storage**:
+ - The following folders are legacy and are used to keep the history of old tasks: `doc/knowledge/junie-tasks/taskset*`.
+ - All new tasks MUST be placed in category folders: `doc/knowledge/junie-tasks/AI-*/` (e.g., `doc/knowledge/junie-tasks/AI-REL/`).
+ - **CRITICAL**: Whenever you update an `.md` file in a legacy folder (e.g. `doc/knowledge/junie-tasks/taskset*`), you MUST move it to its corresponding category folder `doc/knowledge/junie-tasks/AI-*/` (e.g. `doc/knowledge/junie-tasks/AI-ARCH/`) before submitting. Do NOT update files in legacy folders anymore.
- **Chat Summarization Workaround**:
- If the Junie AI JetBrains plugin does not provide a manual rename option, ensure the full task filename (without extension) is mentioned prominently in the initial message of a task. This triggers the plugin's automatic summarizer to include the complete Task ID and filename in the chat history summary.
## Backend Development (Spring Boot)
### 1. Architecture
-- **Controllers**: Use RESTful controllers in `ch.goodone.goodone.backend.controller`.
-- **Models**: Use JPA entities in `ch.goodone.goodone.backend.model`. Always create a Flyway migration script (in `backend/src/main/resources/db/migration/`) whenever a JPA entity is created or modified to ensure the database schema stays in sync.
+- **Controllers**: Use RESTful controllers in `ch.goodone.backend.controller`.
+- **Models**: Use JPA entities in `ch.goodone.backend.model`. Always create a Flyway migration script (in `backend/src/main/resources/db/migration/`) whenever a JPA entity is created or modified to ensure the database schema stays in sync.
- **Idempotent Migrations**: All Flyway migration scripts MUST be idempotent. Use `CREATE TABLE IF NOT EXISTS`, `ALTER TABLE ... ADD COLUMN IF NOT EXISTS`, and similar constructs. This is critical for Fargate deployments where tasks may restart or run concurrently during rollout.
-- **Repositories**: Use Spring Data JPA repositories in `ch.goodone.goodone.backend.repository`. Avoid using direct SQL statements (e.g., via `JdbcTemplate`) in Java code. Use JPA or Spring Data JPA abstractions for all database operations.
+- **Repositories**: Use Spring Data JPA repositories in `ch.goodone.backend.repository`. Avoid using direct SQL statements (e.g., via `JdbcTemplate`) in Java code. Use JPA or Spring Data JPA abstractions for all database operations.
- **DTOs**: Use DTOs for API requests and responses to avoid leaking internal entity structures. Implement `fromEntity()` static methods in DTOs for centralized mapping.
### 2. Best Practices
@@ -97,15 +133,17 @@ This document outlines the best practices and standards for the GoodOne project.
- **Log Security**: NEVER log user-provided data (e.g., request parameters, headers, paths) directly without sanitization to prevent Log Injection. Use placeholders and only log trusted or sanitized values. NEVER log sensitive information like passwords, session tokens, or PII.
- **Type Safety**: Avoid using generic wildcard types like `ResponseEntity>` or `ResponseEntity` in controllers. Always use specific DTOs or `ResponseEntity` to maintain clear API contracts and avoid Sonar issues.
- **Validation**: Use `@Column` annotations for explicit database mapping. Use unique constraints where appropriate (e.g., login, email).
-- **JSON Handling**: Use `tools.jackson.databind.ObjectMapper` for JSON processing in tests.
+- **JSON Handling**: Always use **`tools.jackson.databind.ObjectMapper`** for all Spring `@Bean` definitions, `@Autowired` injections, and JSON processing in tests (matching Spring Boot 4's default). Continue using **`com.fasterxml.jackson.annotation.*`** for DTOs to maintain compatibility with existing annotations. Refer to [ADR-0067](doc/knowledge/adrs/adr-full-set.md#adr-0067-single-serialization-ownership-jackson-3-only) for details.
- **Auditing**: All major user interactions (login, registration, password changes, etc.) and significant system events must be logged to the `ActionLogService` to ensure a robust audit trail.
- **Date/Time**: Use `LocalDate` for dates. Use `@JsonFormat(pattern = "yyyy-MM-dd")` for DTO date fields.
- **Role-Based Access Control**: Enforce security in `SecurityConfig` and use the `Role` enum.
### 3. Testing
- Use JUnit 5 and MockMvc for controller testing.
+- JUnit 5 test classes and methods should have default (package-private) visibility. Avoid using `public`.
- Always include Spring Security in the test context if the endpoint is protected.
- Keep tests isolated from the database using a `test` profile if needed.
+- **AI Infrastructure for Builds/Tests**: Local builds and tests MUST run with Ollama, not OpenAI. No OpenAI tokens must be consumed during the build process. Ensure all `@SpringBootTest` use the `ollama` profile or mock AI beans accordingly.
## Frontend Development (Angular)
@@ -133,9 +171,12 @@ This document outlines the best practices and standards for the GoodOne project.
## Deployment & Environments
- **Local Dev**: Use `npm start` (Angular) and the Spring Boot application (IntelliJ). The Angular proxy (`proxy.conf.json`) handles routing to the backend on `localhost:8080`.
-- **Docker**: Use `docker compose up --build`. Nginx handles the reverse proxying of `/api` requests to the `backend` container.
+- **Docker (Local)**: Use `docker compose up --build`. Nginx handles the reverse proxying of `/api` requests to the `backend` container.
- **AWS ECS (Fargate)**:
- - **Resource Allocation**: To minimize costs, always use the minimal required resources for demo services. The standard allocation is **256 CPU units and 512 MiB memory**. Do not increase these without explicit justification and user approval.
+ - **Architecture**: Nginx is NOT used for reverse proxying. The AWS Application Load Balancer (ALB) handles routing (e.g., path-based routing for `/api`). Nginx in the frontend container only serves static files.
+ - **ALB Timeout**: Ensure the ALB 'Idle timeout' is set to at least **300 seconds** (default is 60s) to support long-running AI calls like ADR Drift detection.
+ - **Resource Allocation**: To minimize costs, always use the minimal required resources for demo services. The standard allocation is **256 CPU units and 512 MiB memory**. Do not increase these without explicit justification and user approval. For AI-heavy features with large prompts, **512 CPU units and 2048 MiB memory** may be required to avoid timeouts and high GC pressure.
+ - **Model Selection**: For latency-sensitive AI features like ADR Drift detection, use `gpt-4o-mini` to achieve response times < 10s and reduce token costs.
- **Single Task Enforcement**: Ensure only one task runs per service by setting `desired-count` to 1 and `maximum-percent` to 100 during deployments to avoid overlapping costs.
- **Task Definitions**: Always update the local task definition files in `deploy/aws/` and `task-def.json` at the root when making live changes to ensure consistency across deployments.
diff --git a/README.md b/README.md
index 522300f04..c4d79df2d 100644
--- a/README.md
+++ b/README.md
@@ -28,7 +28,7 @@ Instead of only generating code, the platform analyzes:
• engineering risks
Live demo
-https://goodone.ch
+https://GoodOne.ch
---
@@ -221,13 +221,13 @@ Documentation is located in the `doc` directory.
Key entry points:
Architecture
-doc/architecture/index.md
+doc/knowledge/architecture/index.md
User Guide
-doc/user-guide/user-guide.md
+doc/operations-guide.md
Admin Guide
-doc/admin-guide/admin-guide.md
+doc/operations-guide.md
Deployment
doc/infrastructure/Deployment.md
diff --git a/backend/data/dependency-check/publishedSuppressions.xml b/backend/data/dependency-check/publishedSuppressions.xml
deleted file mode 100644
index 0c9354f78..000000000
--- a/backend/data/dependency-check/publishedSuppressions.xml
+++ /dev/null
@@ -1,2056 +0,0 @@
-
-
-
-
- ^pkg:maven/org\.vaadin\.addon/easyuploads@.*$
- cpe:/a:vaadin:vaadin
-
-
-
- ^pkg:maven/ch\.qos\.logback/logback-classic@.*$
- cpe:/a:qos:slf4j
-
-
-
- ^pkg:maven/org\.eclipse\.microprofile\.config/microprofile-config-api@.*$
- cpe:/a:payara:payara
-
-
-
- ^pkg:maven/org\.apache\.james/apache-mime4j@.*$
- cpe:/a:apache:james
-
-
-
- ^pkg:maven/org\.postgresql/r2dbc-postgresql@.*$
- cpe:/a:postgresql:postgresql
-
-
-
- ^pkg:maven/org\.mockito/mockito-junit-jupiter@.*$
- cpe:/a:junit:junit4
-
-
-
- ^pkg:maven/org\.robolectric/junit@.*$
- cpe:/a:junit:junit4
-
-
-
- ^pkg:maven/com\.openhtmltopdf/openhtmltopdf-jsoup-dom-converter@.*$
- cpe:/a:jsoup:jsoup
-
-
-
- ^pkg:maven/com\.vladsch\.flexmark/flexmark-ext-xwiki-macros@.*$
- cpe:/a:xwiki:xwiki
-
-
-
- ^pkg:maven/com\.vladsch\.flexmark/flexmark-ext-macros@.*$
- cpe:/a:processing:processing
-
-
-
- ^pkg:maven/org\.jfrog\.artifactory\.client/artifactory-java-client-api@.*$
- cpe:/a:jfrog:artifactory
-
-
-
- ^pkg:maven/org\.jetbrains\.kotlin/kotlin-annotation-processing-gradle@.*$
- cpe:/a:processing:processing
-
-
-
- ^pkg:maven/org\.testcontainers/mysql@.*$
- cpe:/a:mysql:mysql
-
-
-
- ^pkg:maven/org\.mariadb/r2dbc-mariadb@.*$
- cpe:/a:mariadb:mariadb
-
-
-
- ^pkg:maven/org\.testcontainers/mariadb@.*$
- cpe:/a:mariadb:mariadb
-
-
-
- ^pkg:maven/org\.apache\.camel/camel-activemq@.*$
- cpe:/a:apache:activemq
-
-
-
- ^pkg:maven/org\.jruby\.rack/jruby-rack@.*$
- cpe:/a:jruby:jruby
-
-
-
- ^pkg:maven/org\.jruby/dirgra@.*$
- cpe:/a:jruby:jruby
-
-
-
- ^pkg:maven/org\.apache\.datasketches/datasketches-java@.*$
- cpe:/a:sketch:sketch
-
-
-
- ^pkg:maven/org\.locationtech\.spatial4j/spatial4j@.*$
- cpe:/a:pro_search:pro_search
-
-
-
- ^pkg:maven/com\.ko-sys\.av/airac@.*$
- cpe:/a:keybase:keybase
-
-
-
- ^pkg:maven/software\.aws\.rds/aws-mysql-jdbc@.*$
- cpe:/a:mysql:mysql
-
-
-
- ^pkg:maven/net\.openhft/chronicle-wire@.*$
- cpe:/a:wire:wire
-
-
-
- ^pkg:maven/com\.zendesk/mysql-binlog-connector-java@.*$
- cpe:/a:mysql:mysql
-
-
-
- ^pkg:maven/io\.debezium/debezium-connector-mysql@.*$
- cpe:/a:mysql:mysql
-
-
-
- ^pkg:maven/org\.apache\.hbase/hbase-zookeeper@.*$
- cpe:/a:apache:zookeeper
-
-
-
- ^pkg:maven/org\.ejbca\.cvc/cert-cvc@.*$
- cpe:/a:primekey:ejbca
-
-
-
- ^pkg:maven/org\.apache\.twill/twill-zookeeper@.*$
- cpe:/a:apache:zookeeper
-
-
-
- ^pkg:maven/org\.pf4j/pf4j@.*$
- cpe:/a:sonatype:nexus
-
-
-
- ^pkg:maven/org\.apache\.iceberg/iceberg-hive-metastore@.*$
- cpe:/a:apache:hive
-
-
-
- ^pkg:maven/org\.apache\.hbase/hbase-hadoop-compat@.*$
- cpe:/a:apache:hadoop
-
-
-
- ^pkg:maven/org\.apache\.flink/flink-rpc-akka-loader@.*$
- cpe:/a:akka:akka
-
-
-
- ^pkg:maven/org\.apache\.flink/flink-hadoop-fs@.*$
- cpe:/a:apache:hadoop
-
-
-
- ^pkg:maven/com\.azure\.resourcemanager/azure-resourcemanager-appplatform@.*$
- cpe:/a:microsoft:platform_sdk
-
-
-
- ^pkg:maven/org\.clojure/data\.priority-map@.*$
- cpe:/a:priority-software:priority
-
-
-
- ^pkg:maven/com\.amazonaws/aws-java-sdk-prometheus@.*$
- cpe:/a:prometheus:prometheus
-
-
-
- ^pkg:maven/org\.hibernate/hibernate-commons-annotations@.*$
- cpe:/a:hibernate:hibernate_orm
-
-
-
- ^pkg:maven/software\.aws\.rds/aws-mysql-jdbc@.*$
- cpe:/a:mariadb:mariadb
- cpe:/a:mysql:mysql
-
-
-
- ^pkg:maven/org\.jfrog\.artifactory\.client/artifactory-java-client-httpClient@.*$
- cpe:/a:jfrog:artifactory
-
-
-
- ^pkg:maven/io\.opentracing\.contrib/opentracing-apache-httpclient@.*$
- cpe:/a:apache:httpclient
-
-
-
- ^pkg:maven/org\.codehaus\.jackson/jackson-xc@.*$
- cpe:/a:fasterxml:jackson-databind
-
-
-
- 5b8f86fea035328fc9e8c660773037a3401ce25f
- .*
-
-
-
- ^pkg:maven/org\.wildfly\.wildfly-http-client/wildfly-http-ejb-client@.*$
- cpe:/a:redhat:jboss-ejb-client
-
-
-
- ^pkg:maven/org\.jgroups\.kubernetes/jgroups-kubernetes@.*$
- cpe:/a:redhat:jgroups
-
-
-
- ^pkg:maven/org\.apache\.james/queue-activemq-guice@.*$
- cpe:/a:apache:activemq
-
-
-
- ^pkg:maven/io\.projectreactor\.rabbitmq/reactor-rabbitmq@.*$
- cpe:/a:vmware:rabbitmq
-
-
-
- ^pkg:maven/org\.apache\.james/james-server-queue-activemq@.*$
- cpe:/a:apache:activemq
-
-
-
- ^pkg:maven/org\.apache\.james/apache-jsieve-core@.*$
- cpe:/a:apache:james
-
-
-
- ^.*$
- CVE-2021-4277
-
-
-
- ^pkg:maven/com\.google\.crypto\.tink/apps-webpush@.*$
- cpe:/a:google:google_apps
-
-
-
- ^pkg:maven/org\.apache\.hadoop\.thirdparty/hadoop-shaded-guava@.*$
- cpe:/a:apache:hadoop
-
-
-
- ^pkg:maven/com\.datastax\.oss/native-protocol@.*$
- cpe:/a:apache:cassandra
-
-
-
- ^pkg:maven/org\.openrewrite\.recipe/rewrite-jhipster@.*$
- cpe:/a:jhipster:jhipster
-
-
-
- ^pkg:maven/jakarta\.resource/jakarta\.resource-api@.*$
- cpe:/a:payara:payara
-
-
-
- ^pkg:maven/org\.eclipse\.microprofile\.jwt/microprofile-jwt-auth-api@.*$
- cpe:/a:payara:payara
-
-
-
- ^pkg:maven/org\.apache\.hadoop\.thirdparty/hadoop-shaded-protobuf_3_7@.*$
- cpe:/a:apache:hadoop
-
-
-
- ^pkg:maven/org\.codehaus\.woodstox/stax2-api@.*$
- cpe:/a:fasterxml:woodstox
-
-
-
- ^pkg:maven/com\.oracle\.database\.nls/orai18n@.*$
- cpe:/a:oracle:database
-
-
-
- ^pkg:maven/com\.oracle\.database\.nls/orai18n@.*$
- cpe:/a:oracle:oracle_database
-
-
-
- ^pkg:maven/org\.apache\.iceberg/iceberg-orc@.*$
- cpe:/a:apache:orc
-
-
-
- ^pkg:maven/org\.apache\.iceberg/iceberg-flink-1\.15@.*$
- cpe:/a:apache:flink
-
-
-
- ^pkg:maven/com\.googlecode\.javaewah/JavaEWAH@.*$
- cpe:/a:google:google_search
-
-
-
- ^pkg:maven/org\.apache\.flink/flink-s3-fs-hadoop@.*$
- cpe:/a:apache:hadoop
-
-
-
- ^pkg:maven/com\.microsoft\.azure/azure-cosmosdb-direct@.*$
- cpe:/a:microsoft:platform_sdk
-
-
-
- ^pkg:maven/org\.apache\.spark/spark-token-provider-kafka-0-10_2\.12@.*$
- cpe:/a:apache:kafka
-
-
-
- ^pkg:maven/com\.github\.luben/zstd-jni@.*$
- cpe:/a:freebsd:freebsd
-
-
-
- ^pkg:maven/io\.kamon/kamon-prometheus_2\.13@.*$
- cpe:/a:prometheus:prometheus
-
-
-
- ^pkg:maven/com\.github\.dasniko/testcontainers-keycloak@.*$
- cpe:/a:keycloak:keycloak
-
-
-
- ^pkg:maven/org\.apache\.kerby/zookeeper-backend@.*$
- cpe:/a:apache:zookeeper
-
-
-
- ^pkg:maven/javax\.resource/connector@.*$
- cpe:/a:sun:j2ee
-
-
-
- ^pkg:maven/org\.springframework\.cloud/spring-cloud-sleuth-autoconfigure@.*$
- cpe:/a:vmware:spring_cloud_config
-
-
-
- ^pkg:maven/org\.jfrog\.artifactory\.client/artifactory-java-client-services@.*$
- cpe:/a:jfrog:artifactory
-
-
-
- ^pkg:maven/org\.springframework\.integration/spring-integration-ftp@.*$
- cpe:/a:vmware:spring_integration
-
-
-
- ^pkg:maven/org\.jboss\.resteasy\.microprofile/microprofile-config@.*$
- cpe:/a:redhat:resteasy
-
-
-
- ^pkg:maven/org\.apache\.ignite/ignite-log4j2@.*$
- cpe:/a:apache:log4j
-
-
-
- ^pkg:maven/org\.apache\.directory\.api/api-ldap-net-mina@.*$
- cpe:/a:apache:mina
-
-
-
- ^pkg:maven/io\.quarkiverse\.openapi\.generator/quarkus-openapi-generator@.*$
- cpe:/a:openapi-generator:openapi_generator
-
-
-
- ^pkg:nuget/FluentFTP@.*$
- cpe:/a:ftp:ftp
-
-
-
- ^pkg:nuget/KubernetesClient@.*$
- cpe:/a:kubernetes:kubernetes
-
-
-
- ^pkg:maven/org\.apache\.sling/org\.apache\.sling\.commons\.johnzon@.*$
- cpe:/a:apache:sling_commons_json
-
-
-
- ^pkg:nuget/AspNetCoreRateLimit\.Redis@.*$
- cpe:/a:asp-project:asp-project
-
-
-
- ^pkg:maven/org\.jruby/jzlib@.*$
- cpe:/a:jruby:jruby
-
-
-
- ^pkg:maven/org\.jboss\.resteasy\.microprofile/.*$
- cpe:/a:redhat:resteasy
-
-
-
- ^pkg:maven/org\.jboss\.resteasy\.microprofile/microprofile-rest-client@.*$
- cpe:/a:redhat:resteasy
-
-
-
- ^pkg:maven/org\.apache\.sling/org\.apache\.sling\.commons\.osgi@.*$
- cpe:/a:apache:sling
-
-
-
- ^pkg:nuget/Minio\.AspNetCore@.*$
- cpe:/a:minio:minio
-
-
-
- ^pkg:maven/org\.apache\.thrift/libfb303@.*$
- cpe:/a:apache:thrift
-
-
-
- ^pkg:maven/org\.apache\.cxf/cxf-rt-bindings-soap@.*$
- cpe:/a:apache:soap
-
-
-
- ^pkg:maven/com\.itextpdf\.licensing/licensing-base@.*$
- cpe:/a:itextpdf:itext
-
-
-
- ^pkg:maven/com\.itextpdf\.licensing/licensing-remote@.*$
- cpe:/a:itextpdf:itext
-
-
-
- ^pkg:maven/io\.github\.detekt\.sarif4k/sarif4k-jvm@.*$
- cpe:/a:detekt:detekt
-
-
-
- ^pkg:maven/com\.lightbend\.akka\.grpc/.*$
- cpe:/a:akka:akka
- cpe:/a:lightbend:akka
-
-
-
- ^pkg:maven/com\.lightbend\.akka/akka-persistence-r2dbc.*$
- cpe:/a:akka:akka
- cpe:/a:lightbend:akka
-
-
-
- ^pkg:maven/com\.lightbend\.akka/akka-projection-.*$
- cpe:/a:akka:akka
- cpe:/a:lightbend:akka
-
-
-
- ^pkg:maven/org\.apache\.jackrabbit/oak-.*$
- cpe:/a:apache:jackrabbit
-
-
-
- ^pkg:maven/org\.apache\.jackrabbit/oak-core@.*$
- cpe:/a:apache:jackrabbit
-
-
-
- ^pkg:maven/com\.vaadin/vaadin-swing-kit-flow@.*$
- cpe:/a:vaadin:flow
-
-
-
- ^pkg:maven/org\.apache\.sling/org\.apache\.sling\.commons\.johnzon@.*$
- cpe:/a:apache:sling
-
-
-
- ^pkg:maven/org\.apache\.geronimo\.specs/geronimo-saaj_1\.3_spec@.*$
- cpe:/a:apache:soap
-
-
-
- ^pkg:maven/org\.ops4j\.pax\.logging/pax-logging-log4j2@.*$
- cpe:/a:apache:log4j
-
-
-
- ^pkg:maven/software\.amazon\.awssdk\.crt/aws-crt@.*$
- cpe:/a:amazon:aws-sdk-java
-
-
-
- ^pkg:maven/com\.adobe\.cq/core\.wcm\.components\.core@.*$
- cpe:/a:adobe:download_manager
-
-
-
- ^pkg:maven/com\.adobe\.cq/core\.wcm\.components\.core@.*$
- cpe:/a:adobe:experience_manager
-
-
-
- ^pkg:maven/com\.adobe\.cq/core\.wcm\.components\.core@.*$
- cpe:/a:adobe:experience_manager_forms
-
-
-
- ^pkg:maven/com\.adobe\.cq/core\.wcm\.components\.core@.*$
- cpe:/a:adobe:form_client
-
-
-
- ^pkg:maven/com\.adobe\.cq/core\.wcm\.components\.core@.*$
- cpe:/a:list_site_pro:list_site_pro
-
-
-
- ^pkg:maven/org\.springframework\.plugin/spring-plugin-core@.*$
- cpe:/a:vmware:spring
-
-
-
- ^pkg:maven/org\.springframework(?!\.kafka).*$
- CVE-2023-34040
-
-
-
- ^pkg:maven/org\.logback-extensions/logback-ext-spring@.*$
- cpe:/a:qos:logback
-
-
-
- ^pkg:npm/mysql@.*$
- cpe:/a:mysql:mysql
-
-
-
- ^pkg:maven/net\.rossillo\.mvc\.cache/spring-mvc-cache-control@.*$
- cpe:/a:spring:spring
-
-
-
- ^pkg:maven/ch\.qos\.logback\.contrib/logback-json-core@.*$
- cpe:/a:json-c:json-c
-
-
-
- ^pkg:maven/ch\.qos\.logback\.contrib/logback-json-classic@.*$
- cpe:/a:json-c:json-c
-
-
-
- ^pkg:maven/io\.asyncer/r2dbc-mysql@.*$
- cpe:/a:mysql:mysql
-
-
-
- ^pkg:maven/io\.netty\.incubator/netty-incubator-codec-native-quic@.*$
- cpe:/a:chromium:chromium
-
-
-
- ^pkg:maven/xalan/xalan@.*$
- cpe:/a:apache:commons_bcel
-
-
-
- ^pkg:nuget/CommandLineParser@.*$
- cpe:/a:line:line
-
-
-
- ^pkg:maven/org\.flywaydb/flyway-database-postgresql@.*$
- cpe:/a:postgresql:postgresql
-
-
-
- ^pkg:maven/net\.lbruun\.springboot/preliquibase-spring-boot-starter@.*$
- cpe:/a:liquibase:liquibase
-
-
-
- ^pkg:maven/rubygems/.*@.*$
- cpe:/a:rubygems:rubygems
-
-
-
- ^pkg:maven/org\.apache\.parquet/parquet-avro@.*$
- cpe:/a:apache:avro
-
-
-
- ^pkg:maven/org\.apache\.camel/camel-reactive-executor-tomcat@.*$
- cpe:/a:apache_tomcat:apache_tomcat
-
-
-
- ^pkg:maven/info\.picocli/picocli@.*$
- cpe:/a:line:line
-
-
-
- ^pkg:maven/io\.r2dbc/r2dbc-mssql@.*$
- cpe:/a:microsoft:sql_server
-
-
-
- ^pkg:maven/org\.thymeleaf\.extras/thymeleaf-extras-java8time@.*$
- cpe:/a:thymeleaf:thymeleaf
-
-
-
- ^pkg:maven/org\.keycloak/keycloak-model-infinispan@.*$
- cpe:/a:infinispan:infinispan
-
-
-
- ^pkg:maven/org\.jgroups\.azure/jgroups-azure@.*$
- cpe:/a:redhat:jgroups
-
-
-
- ^pkg:maven/com\.bornium/oauth2-openid@.*$
- cpe:/a:openid:openid
-
-
-
- ^pkg:maven/org\.hsqldb/hsqldb@.*$
- cpe:/a:hyper:hyper
-
-
-
- ^pkg:maven/org\.jboss\.activemq\.artemis\.integration/artemis-wildfly-integration@.*$
- cpe:/a:redhat:wildfly
-
-
-
- ^pkg:npm/bare-os@.*$
- cpe:/a:bareos:bareos
-
-
-
- ^pkg:maven/org\.apache\.camel\.quarkus/camel-quarkus-core@.*$
- cpe:/a:apache:camel
-
-
-
- ^pkg:maven/org\.apache\.rat/apache-rat@.*$
- cpe:/a:line:line
-
-
-
- ^pkg:nuget/MagicFileEncoding@.*$
- cpe:/a:file:file
-
-
-
- ^pkg:nuget/MongoDB\.Bson@.*$
- cpe:/a:mongodb:mongodb
-
-
-
- ^pkg:maven/io\.opentelemetry\.contrib/opentelemetry-prometheus-client-bridge@.*$
- cpe:/a:prometheus:prometheus
-
-
-
- ^pkg:maven/org\.springframework\.batch\.extensions/spring-batch-excel@.*$
- cpe:/a:pivotal_software:spring_batch
-
-
-
- ^pkg:maven/org\.glassfish(?!\.main).*$
- cpe:/a:eclipse:glassfish
-
-
-
- ^pkg:maven/org\.apache\.shiro\.crypto/shiro.*@2.0.0$
- CVE-2023-34478
- CVE-2023-46749
- CVE-2023-46750
-
-
-
- ^pkg:maven/org\.apache\.shiro/shiro.*@2.0.0$
- CVE-2023-34478
- CVE-2023-46749
- CVE-2023-46750
-
-
-
-^pkg:(?!maven/org\.clojure/clojure@).*$
-cpe:/a:clojure:clojure
-
-
-
- ^pkg:maven/io\.pivotal\.cfenv/java-cfenv@.*$
- cpe:/a:vmware:spring_framework
-
-
-
- ^pkg:maven/io\.pivotal\.cfenv/java-cfenv-jdbc@.*$
- cpe:/a:vmware:spring_framework
-
-
-
- ^pkg:maven/io\.pivotal\.cfenv/java-cfenv-boot@.*$
- cpe:/a:vmware:spring_framework
-
-
-
- ^pkg:maven/org\.togglz/togglz-mongodb@.*$
- cpe:/a:mongodb:mongodb
-
-
-
- ^pkg:nuget/dbup-postgresql@.*$
- cpe:/a:postgresql:postgresql
-
-
-
- ^pkg:maven/org\.eclipse\.jetty\.toolchain/.*@.*$
- cpe:/a:jetty:jetty
- cpe:/a:eclipse:jetty
-
-
-
-^pkg:generic/Mono.Cecil@.*$
-cpe:/a:cecil:cecil
-
-
-
- ^pkg:maven/com\.google\.http-client/google-http-client-protobuf@.*$
- cpe:/a:google:protobuf-java
-
-
-
- ^pkg:maven/io\.zipkin\.contrib\.brave-propagation-w3c/brave-propagation-tracecontext@.*$
- cpe:/a:brave:brave
-
-
-
- ^pkg:maven/io\.micrometer/micrometer-tracing-bridge-brave@.*$
- cpe:/a:brave:brave
-
-
-
-^pkg:maven/org\.junit\..*/junit-.*@.*$
-cpe:/a:1e:platform
-
-
-
- ^pkg:maven/org\.springframework\.boot/spring-boot-jarmode-tools@.*$
- cpe:/a:vmware:tools
-
-
-
- ^pkg:maven/org\.apache\.sandesha2/sandesha2.*$
- cpe:/a:apache:axis2:
- cpe:/a:apache:axis:
-
-
-
-^pkg:maven/org\.apache\.axis2.*$
-cpe:/a:apache:axis:
-
-
-
- ^pkg:maven/org\.eclipse\.jetty/jetty-openid@.*$
- cpe:/a:openid:openid
-
-
-
- ^pkg:maven/org\.springframework\.security/spring-security-oauth2-resource-server@.*$
- cpe:/a:vmware:server
-
-
-
- ^pkg:maven/io\.pivotal\.cfenv/java-cfenv-boot@.*$
- cpe:/a:vmware:spring_boot
-
-
-
- ^pkg:maven/com\.yahoo\.datasketches/sketches-core@.*$
- cpe:/a:sketch:sketch
-
-
-
- ^pkg:maven/com\.azure/azure-core-http-netty@.*$
- cpe:/a:microsoft:azure_cli
-
-
-
- ^pkg:maven/com\.azure/azure-core@.*$
- cpe:/a:microsoft:azure_cli
-
-
-
- ^pkg:maven/org\.tukaani/xz@.*$
- cpe:/a:tukaani:xz
-
-
-
- ^pkg:maven/org\.bouncycastle/bc(pg)?-fips@.*$
- cpe:/a:bouncycastle:legion-of-the-bouncy-castle
-
-
-
- ^pkg:maven/org\.bouncycastle/bc(pg)?-fips@.*$
- cpe:/a:bouncycastle:bouncy_castle_for_java
-
-
-
- ^pkg:maven/commons-discovery/commons-discovery@.*$
- cpe:/a:spirit-project:spirit
-
-
-
- ^pkg:maven/org\.jmdns/jmdns@.*$
- cpe:/a:openhab:openhab
-
-
-
- ^pkg:maven/com\.azure/azure-identity@.*$
- cpe:/a:microsoft:azure_cli
-
-
-
- ^pkg:maven/com\.apollographql\.federation/federation-graphql-java-support@.*$
- cpe:/a:apollo(_project)?:apollo.*
-
-
-
- ^pkg:maven/fi\.solita\.clamav/clamav-client@.*$
- cpe:/a:clamav:clamav
-
-
-
- ^pkg:maven/jakarta\.json/jakarta\.json-api@.*$
- cpe:/a:eclipse:glassfish
-
-
-
- ^pkg:maven/org\.glassfish\.jaxb/jaxb-runtime@.*$
- cpe:/a:eclipse:glassfish
-
-
-
- ^pkg:maven/org\.apache\.ftpserver/ftplet-api@.*$
- cpe:/a:apache:apache_http_server
-
-
-
- ^pkg:maven/org\.apache\.ftpserver/ftpserver-core@.*$
- cpe:/a:apache:apache_http_server
-
-
-
-^pkg:maven/org\.apache\.ftpserver/ftplet-api@.*$
-^cpe:/a:apache:mina:.*
-
-
-
-^pkg:maven/io\.prometheus/prometheus-.*$
-cpe:/a:prometheus:prometheus
-
-
-
- ^(mysql:mysql-connector-java|com\.mysql:mysql-connector-j|org\.drizzle\.jdbc:drizzle-jdbc):.*$
- cpe:/a:mysql:mysql:
- cpe:/a:oracle:mysql:
-
-
-
-
- ^pkg:nuget/IronPython@.*$
- cpe:/a:python:python
-
-
-
- ^pkg:(?!maven/com\.graphql-java/graphql-java@).*$
- cpe:/a:graphql-java:graphql-java:
-
-
-
- ^pkg:maven/com\.azure/azure-json@.*$
- cpe:/a:microsoft:azure_cli
-
-
-
- ^pkg:maven/org\.agrona/agrona@.*$
- cpe:/a:protonmail:protonmail
-
-
-
- ^pkg:maven/org\.mortbay\.jasper/apache-el@.*$
- cpe:/a:eclipse:jetty
-
-
-
- ^pkg:maven/com\.maciejwalkowiak\.spring/wiremock-spring-boot@.*$
- cpe:/a:wiremock:wiremock
- cpe:/a:wire:wire
-
-
-
- ^pkg:maven/com\.azure/azure-core-amqp@.*$
- cpe:/a:microsoft:azure_cli
-
-
-
- ^pkg:maven/io\.etcd/jetcd-.*@.*$
- cpe:/a:redhat:etcd
- cpe:/a:etcd:etcd
-
-
-
- ^pkg:maven/com\.azure/azure-core-management@.*$
- cpe:/a:microsoft:azure_cli
-
-
-
- ^pkg:maven/com\.amazonaws/aws-java-sdk-opensearch@.*$
- cpe:/a:amazon:opensearch
-
-
-
- ^pkg:maven/org\.webjars\.npm/url-parse@.*$
- cpe:/a:parse-url(_project)?:parse-url.*
-
-
-
- ^pkg:maven/com\.datomic/memcache-asg-java-client@.*$
- cpe:/a:memcache(_project)?:memcache.*
-
-
-
- ^pkg:maven/com\.amazonaws/aws-java-sdk-marketplacedeployment@.*$
- cpe:/a:amazon:aws_deployment_framework
-
-
-
- ^pkg:maven/cd\.go\.plugin\.base/gocd-plugin-base@.*$
- cpe:/a:thoughtworks:gocd
-
-
-
- .*/node-windows/bin/sudowin/sudo.exe
- cpe:/a:sudo:sudo
- cpe:/a:sudo(_project)?:sudo.*
-
-
-
-
- ^pkg:maven/com\.datomic/memcache-asg-java-client@.*$
- cpe:/a:memcached:memcached
-
-
-
- ^pkg:maven/fish\.payara\.security\.connectors/security-connectors-api@.*$
- cpe:/a:payara:payara
-
-
-
- ^pkg:maven/com\.microsoft\.azure/msal4j-persistence-extension@.*$
- cpe:/a:microsoft:authentication_library
-
-
-
- ^pkg:maven/com\.google\.cloud\.tools/jib-build-plan@.*$
- cpe:/a:jib(_project)?:jib.*
-
-
-
- ^pkg:maven/com\.azure/azure-identity-extensions@.*$
- cpe:/a:microsoft:azure_cli
-
-
-
- ^pkg:maven/com\.google\.cloud\.opentelemetry/detector-resources-support@.*$
- cpe:/a:opentelemetry:opentelemetry
-
-
-
- ^pkg:maven/(?!io\.grpc/).*$
- cpe:/a:grpc:grpc
-
-
-
-^pkg:maven\/.*$
-cpe:/a:sms:sms
-
-
-
- ^pkg:maven/com\.sap\.cloud\.db\.jdbc/ngdbc@.*$
- cpe:/a:sap:hana
-
-
-
- ^pkg:maven/com\.google\.cloud\.opentelemetry/shared-resourcemapping@.*$
- cpe:/a:opentelemetry:opentelemetry
-
-
-
- ^pkg:maven/com\.google\.cloud\.opentelemetry/exporter-metrics@.*$
- cpe:/a:opentelemetry:opentelemetry
-
-
-
- ^pkg:maven/dev\.zio/zio-akka-cluster_2\.13@.*$
- cpe:/a:akka:akka
-
-
-
- ^pkg:maven/org\.springframework\.boot/spring-boot-starter-data-rest@.*$
- cpe:/a:vmware:spring_data_rest
-
-
-
- ^pkg:nuget/Npgsql\.EntityFrameworkCore\.PostgreSQL@.*$
- cpe:/a:postgresql:postgresql
-
-
-
- ^pkg:nuget/System\.Threading\.Tasks\.Extensions@.*$
- cpe:/a:tasks:tasks
-
-
-
- ^pkg:maven/com\.splunk\.logging/splunk-library-javalogging@.*$
- cpe:/a:splunk:splunk
-
-
-
- ^pkg:nuget/System\.ServiceModel\.NetTcp@.*$
- cpe:/a:tcp:tcp
-
-
-
- ^pkg:maven/org\.springframework\.ai/spring-ai-spring-boot-autoconfigure@.*$
- cpe:/a:vmware:spring_boot
-
-
-
- ^pkg:nuget/Serilog\.Sinks\.Graylog@.*$
- cpe:/a:graylog:graylog
-
-
-
- ^pkg:maven/com\.hazelcast\.marketing/hazelcast-license-extractor@.*$
- cpe:/a:hazelcast:hazelcast
-
-
-
- ^pkg:maven/com\.google\.devtools\.ksp/symbol-processing@.*$
- cpe:/a:processing:processing
-
-
-
- ^pkg:maven/com\.google\.devtools\.ksp/symbol-processing-cmdline@.*$
- cpe:/a:processing:processing
-
-
-
- ^pkg:maven/com\.google\.devtools\.ksp/symbol-processing-api@.*$
- cpe:/a:processing:processing
-
-
-
-^pkg:maven/com\.azure\.resourcemanager/.*$
-cpe:/a:microsoft:azure_cli
-
-
-
- ^pkg:maven/org\.apache\.sling/org\.apache\.sling\.javax\.activation@.*$
- cpe:/a:apache:sling
-
-
-
- ^pkg:maven/org\.eclipse\.platform/org\.eclipse\.osgi@.*$
- cpe:/a:eclipse:platform
-
-
-
- ^pkg:maven/org\.eclipse\.platform/org\.eclipse\.osgi@.*$
- cpe:/a:eclipse:equinox
-
-
-
- ^pkg:maven/io\.nlopez\.compose\.rules/detekt@.*$
- cpe:/a:detekt:detekt
-
-
-
- ^pkg:maven/io\.quarkiverse\.wiremock/quarkus-wiremock@.*$
- cpe:/a:wiremock:wiremock
-
-
-
- ^pkg:maven/com\.github\.jpmsilva\.jsystemd/jsystemd-core@.*$
- cpe:/a:systemd(_project)?:systemd.*
-
-
-
- ^pkg:maven/com\.azure/azure-ai-openai@.*$
- cpe:/a:microsoft:azure_cli
-
-
-
- ^pkg:nuget/Microsoft\.AspNetCore\.Authentication\.OpenIdConnect@.*$
- cpe:/a:openid:openid_connect
-
-
-
- ^pkg:composer/phpunit/php-invoker@.*$
- cpe:/a:phpunit(_project)?:phpunit.*
-
-
-
- ^pkg:composer/phpunit/php-text-template@.*$
- cpe:/a:phpunit(_project)?:phpunit.*
-
-
-
- ^pkg:composer/spatie/laravel-.*$
- cpe:/a:laravel:laravel
-
-
-
-
- ^pkg:maven/org\.opensearch\.plugin/transport-netty4-client@.*$
- cpe:/a:netty:netty
-
-
-
- ^pkg:npm/opener@.*$
- cpe:/a:opener(_project)?:opener.*
-
-
-
- ^pkg:maven/com\.oracle\.database\.nls/orai18n@.*$
- cpe:/a:oracle:text
-
-
-
- ^pkg:nuget/Microsoft\.AspNet\.TelemetryCorrelation@.*$
- cpe:/a:microsoft:asp.net
-
-
-
- ^pkg:nuget/Akka\.Cluster\.Hosting@.*$
- cpe:/a:akka:akka
-
-
-
- ^pkg:maven/org\.eclipse\.core/org\.eclipse\.core\.jobs@.*$
- cpe:/a:jobs-plugin(_project)?:jobs-plugin.*
-
-
-
- ^pkg:maven/org\.eclipse\.core/org\.eclipse\.core\.commands@.*$
- cpe:/a:eclipse:equinox
-
-
-
- ^pkg:maven/edu\.washington\.cs\.knowitall/opennlp-chunk-models@.*$
- cpe:/a:apache:opennlp
-
-
-
- ^pkg:maven/com\.microsoft\.azure/azure-eventhubs.*$
- cpe:/a:microsoft:azure_cli
-
-
-
- ^pkg:maven/org\.nokogiri/nekodtd@.*$
- cpe:/a:nokogiri:nokogiri
-
-
-
- ^pkg:maven/io\.micrometer/micrometer-registry-prometheus-simpleclient@.*$
- cpe:/a:prometheus:prometheus
-
-
-
- ^pkg:maven/opensymphony/oscache@.*$
- cpe:/a:tag(_project)?:tag.*
-
-
-
- ^pkg:maven/org\.apache\.directory\.api/api-i18n@.*$
- cpe:/a:i18n(_project)?:i18n.*
-
-
-
- ^pkg:maven/io\.github\.x-stream/mxparser@.*$
- cpe:/a:x-stream:xstream
-
-
-
- ^pkg:maven/org\.apache\.xmlgraphics/batik-i18n@.*$
- cpe:/a:apache:xml_graphics_batik
-
-
-
- ^pkg:maven/org\.openrewrite\.recipe/rewrite-jenkins@.*$
- cpe:/a:jenkins:github
-
-
-
- ^pkg:maven/io\.grpc/grpc-netty@.*$
- cpe:/a:netty:netty
-
-
-
- ^pkg:maven/io\.grpc/grpc-netty-shaded@.*$
- cpe:/a:netty:netty
-
-
-
- ^pkg:maven/edu\.washington\.cs\.knowitall/opennlp-postag-models@.*$
- cpe:/a:apache:opennlp
-
-
-
- ^pkg:maven/io\.projectreactor\.netty/reactor-netty.*@.*$
- cpe:/a:netty:netty
-
-
-
- ^pkg:maven/org\.ccil\.cowan\.tagsoup/tagsoup@.*$
- cpe:/a:tag(_project)?:tag.*
-
-
-
- ^pkg:maven/org\.apache\.groovy/groovy-json@.*$
- cpe:/a:apache:groovy
-
-
-
-
- ^pkg:maven/javax\.resource/connector-api@.*$
- cpe:/a:sun:j2ee
-
-
-
- ^pkg:maven/io\.quarkus/quarkus-hibernate-validator@.*$
- cpe:/a:hibernate:hibernate-validator
-
-
-
- ^pkg:maven/org\.mortbay\.jasper/apache-jsp@.*$
- cpe:/a:apache:tomcat
-
-
-
- ^pkg:maven/com\.almworks\.sqlite4java/sqlite4java@.*$
- cpe:/a:sqlite:sqlite
-
-
-
- ^pkg:maven/io\.quarkiverse\.wiremock/quarkus-wiremock@.*$
- cpe:/a:wire:wire
-
-
-
- ^pkg:maven/org\.jooq.*/jooq-meta-extensions-liquibase@.*$
- cpe:/a:liquibase:liquibase
-
-
-
- ^pkg:maven/org\.springframework\.ai/spring-ai-mongodb-atlas-store@.*$
- cpe:/a:mongodb:mongodb
-
-
-
- ^pkg:maven/org\.xmlunit/xmlunit-core@.*$
- cpe:/a:ada:ada
-
-
-
- ^pkg:maven/io\.debezium/mysql-binlog-connector-java@.*$
- cpe:/a:mysql:mysql
-
-
-
- ^pkg:maven/edu\.washington\.cs\.knowitall/opennlp-tokenize-models@.*$
- cpe:/a:apache:opennlp
-
-
-
- ^pkg:nuget/Microsoft\.AspNetCore\.Authentication\.OpenIdConnect@.*$
- cpe:/a:openid:openid
-
-
-
- ^pkg:maven/co\.elastic\.apm/.*
- cpe:/a:elastic:elastic_agent
- CVE-2019-7617
-
-
-
- ^pkg:maven/org\.eclipse\.core/org\.eclipse\.core\.expressions@.*$
- cpe:/a:eclipse:org.eclipse.core.runtime
-
-
-
- ^pkg:maven/org\.eclipse\.platform/org\.eclipse\.equinox\.supplement@.*$
- cpe:/a:eclipse:platform
-
-
-
- ^pkg:maven/com\.hazelcast/hazelcast-eureka-two@.*$
- cpe:/a:hazelcast:hazelcast
-
-
-
- ^pkg:maven/com\.pinterest\.ktlint/ktlint-cli-reporter-checkstyle@.*$
- cpe:/a:checkstyle:checkstyle
-
-
-
- ^pkg:pypi/(?!sentry@).*$
- cpe:/a:sentry:sentry:
-
-
-
- ^pkg:maven/org\.spdx/spdx-java-model-2_X@.*$
- cpe:/a:x.org:x.org
-
-
-
- ^pkg:maven/org\.eclipse\.angus/angus-activation@.*$
- cpe:/a:eclipse:angus_mail
-
-
-
- ^pkg:maven/com\.azure\.resourcemanager/azure-resourcemanager-msi@.*$
- cpe:/a:microsoft:azure_identity_sdk
-
-
-
- ^pkg:maven/org\.springframework\.boot/spring-boot-starter-liquibase@.*$
- cpe:/a:liquibase:liquibase
-
-
-
- ^pkg:maven/org\.springframework\.boot/spring-boot-liquibase@.*$
- cpe:/a:liquibase:liquibase
-
-
-
- ^pkg:maven/io\.micronaut\.jsonschema/micronaut-json-schema-utils@.*$
- cpe:/a:utils(_project)?:utils.*
- cpe:/a:cron-utils(_project)?:cron-utils.*
-
-
-
- ^pkg:maven/jakarta\.enterprise/jakarta\.enterprise\.lang-model@.*$
- cpe:/a:model(_project)?:model.*
-
-
-
- ^pkg:maven/org\.jetbrains\.exposed/exposed-kotlin-datetime@.*$
- cpe:/a:jetbrains:kotlin
-
-
-
- ^pkg:maven/io\.opentelemetry/opentelemetry-exporter-prometheus@.*$
- cpe:/a:prometheus:prometheus
-
-
-
- ^pkg:maven/com\.solace/solace-messaging-client@.*$
- cpe:/a:solace:pubsub\+
-
-
-
- ^pkg:maven/org\.springframework\.boot/spring-boot-mongodb@.*$
- cpe:/a:mongodb:mongodb
-
-
-
- ^pkg:maven/org\.springframework\.boot/spring-boot-starter-mongodb@.*$
- cpe:/a:mongodb:mongodb
-
-
-
- ^pkg:maven/org\.springframework\.boot/spring-boot-data-mongodb@.*$
- cpe:/a:mongodb:mongodb
-
-
-
- ^pkg:maven/org\.apache\.hadoop\.thirdparty/hadoop-shaded-protobuf_3_25@.*$
- cpe:/a:apache:hadoop
-
-
-
- ^pkg:(?!maven/nu\.validator/validator@|npm/vnu-jar).*
- cpe:/a:validator:validator
-
-
-
- ^pkg:maven/net\.java\.dev\.jna/jna-jpms@.*$
- cpe:/a:oracle:java_se
-
-
-
- ^pkg:maven/org\.apache\.felix/org\.apache\.felix\.framework@.*$
- cpe:/a:sun:sun_ftp
-
-
-
- ^pkg:nuget/DotNumerics@.*$
- cpe:/a:lapack_project:lapack
-
-
-
- ^pkg:nuget/DotNumerics@.*$
- cpe:/a:singular:singular
-
-
-
- ^pkg:maven/org\.testcontainers/testcontainers-postgresql@.*$
- cpe:/a:postgresql:postgresql
-
-
-
- ^pkg:maven/org\.apache\.cxf\.karaf/cxf-karaf-commands@.*$
- cpe:/a:apache:karaf
-
-
-
- ^pkg:maven/org\.springframework\.boot/spring-boot-batch@.*$
- cpe:/a:pivotal_software:spring_batch
-
-
diff --git a/backend/data/dependency-check/publishedSuppressions.xml.properties b/backend/data/dependency-check/publishedSuppressions.xml.properties
deleted file mode 100644
index 446008860..000000000
--- a/backend/data/dependency-check/publishedSuppressions.xml.properties
+++ /dev/null
@@ -1,2 +0,0 @@
-#Mon Mar 09 22:21:15 CET 2026
-LAST_UPDATED=1773091275
diff --git a/backend/doc/knowledge/reports/coverage.md b/backend/doc/knowledge/reports/coverage.md
new file mode 100644
index 000000000..9d141e9c1
--- /dev/null
+++ b/backend/doc/knowledge/reports/coverage.md
@@ -0,0 +1,14 @@
+# AI Knowledge Coverage Report
+
+Generated at: 2026-03-26T14:45:27.949164400
+
+## Summary
+- Total indexed files: 3
+- Used files: 2
+- Unused (stale) files: 1
+- Knowledge coverage: 66.67%
+
+## Stale (Unused) Files
+| Path | Last Indexed |
+| :--- | :--- |
+| doc/obsolete.md | 2026-03-26T14:45:27.940784700 |
diff --git a/backend/pom.xml b/backend/pom.xml
index cbd2d297f..eaadec83c 100644
--- a/backend/pom.xml
+++ b/backend/pom.xml
@@ -5,7 +5,7 @@
ch.goodone
goodone-parent
- 1.1.1-SNAPSHOT
+ 2.1.0
../pom.xml
goodone-backend
@@ -33,6 +33,23 @@
target/site/jacoco/jacoco.xml
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+ ${jackson.version}
+
+
+ tools.jackson.core
+ jackson-databind
+
+
+ tools.jackson.core
+ jackson-core
+
+
+ org.springframework.boot
+ spring-boot-starter-jackson
+
org.springframework.boot
spring-boot-h2console
@@ -85,6 +102,12 @@
org.springframework.boot
spring-boot-starter-test
test
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+
+
org.springframework.security
@@ -106,6 +129,16 @@
swagger-request-validator-mockmvc
2.44.1
test
+
+
+ com.github.java-json-tools
+ json-schema-validator
+
+
+ com.github.java-json-tools
+ jackson-coreutils
+
+
javax.xml.bind
@@ -124,10 +157,30 @@
org.springdoc
springdoc-openapi-starter-webmvc-ui
2.8.5
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
org.flywaydb
flyway-core
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
org.flywaydb
@@ -150,12 +203,12 @@
io.jsonwebtoken
jjwt-api
- 0.13.0
+ 0.12.6
io.jsonwebtoken
jjwt-impl
- 0.13.0
+ 0.12.6
runtime
@@ -163,6 +216,16 @@
jjwt-jackson
0.12.6
runtime
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+
+
org.projectlombok
@@ -191,6 +254,17 @@
pgvector
0.1.6
+
+ com.networknt
+ json-schema-validator
+ 1.5.1
+
+
+ com.fasterxml.jackson.core
+ *
+
+
+
org.awaitility
awaitility
@@ -221,7 +295,11 @@
org.apache.maven.plugins
maven-surefire-plugin
- @{argLine} -javaagent:${org.mockito:mockito-core:jar} -XX:+EnableDynamicAgentLoading
+ @{argLine} -XX:+EnableDynamicAgentLoading
+
+
+
+
@@ -367,8 +445,22 @@
-
-
+
+ verify-retrieval
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+
+ **/RetrievalCoverageIntegrationTest.java
+ **/ProviderConsistencyTest.java
+
+
+
+
+
+
+
-
-
diff --git a/backend/src/main/java/ch/goodone/backend/ClassPathChecker.java b/backend/src/main/java/ch/goodone/backend/ClassPathChecker.java
new file mode 100644
index 000000000..30a192246
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ClassPathChecker.java
@@ -0,0 +1,35 @@
+package ch.goodone.backend;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.URL;
+import java.util.Enumeration;
+
+public class ClassPathChecker {
+ private static final Logger logger = LoggerFactory.getLogger(ClassPathChecker.class);
+
+ public static void main(String[] args) {
+ try {
+ String className = "com.fasterxml.jackson.annotation.JsonFormat";
+ String path = className.replace('.', '/') + ".class";
+ Enumeration resources = ClassLoader.getSystemClassLoader().getResources(path);
+ logger.info("Searching for {}", className);
+ while (resources.hasMoreElements()) {
+ logger.info("Found at: {}", resources.nextElement());
+ }
+
+ Class> clazz = Class.forName("com.fasterxml.jackson.annotation.JsonFormat$Shape");
+ logger.info("Loaded from: {}", clazz.getProtectionDomain().getCodeSource().getLocation());
+ Object[] constants = clazz.getEnumConstants();
+ logger.info("Constants count: {}", (constants == null ? "null" : constants.length));
+ if (constants != null) {
+ for (Object f : constants) {
+ logger.info("Constant: {}", f);
+ }
+ }
+ } catch (Exception e) {
+ logger.error("Error during classpath check", e);
+ }
+ }
+}
diff --git a/backend/src/main/java/ch/goodone/backend/DataInitializer.java b/backend/src/main/java/ch/goodone/backend/DataInitializer.java
index 8d21dd956..8e9308134 100644
--- a/backend/src/main/java/ch/goodone/backend/DataInitializer.java
+++ b/backend/src/main/java/ch/goodone/backend/DataInitializer.java
@@ -14,3 +14,4 @@ CommandLineRunner initData(DataInitializerService dataInitializerService) {
}
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/GoodoneBackendApplication.java b/backend/src/main/java/ch/goodone/backend/GoodoneBackendApplication.java
index 11f2be74a..387099aed 100644
--- a/backend/src/main/java/ch/goodone/backend/GoodoneBackendApplication.java
+++ b/backend/src/main/java/ch/goodone/backend/GoodoneBackendApplication.java
@@ -10,9 +10,16 @@
org.springframework.ai.model.openai.autoconfigure.OpenAiChatAutoConfiguration.class,
org.springframework.ai.model.openai.autoconfigure.OpenAiEmbeddingAutoConfiguration.class,
org.springframework.ai.model.openai.autoconfigure.OpenAiImageAutoConfiguration.class,
- org.springframework.ai.model.openai.autoconfigure.OpenAiModerationAutoConfiguration.class
+ org.springframework.ai.model.openai.autoconfigure.OpenAiModerationAutoConfiguration.class,
+ org.springframework.ai.model.ollama.autoconfigure.OllamaChatAutoConfiguration.class,
+ org.springframework.ai.model.ollama.autoconfigure.OllamaEmbeddingAutoConfiguration.class
+}, excludeName = {
+ "org.springframework.boot.autoconfigure.jackson.JacksonAutoConfiguration",
+ "org.springframework.ai.autoconfigure.jackson.JacksonAutoConfiguration",
+ "org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration"
})
@ConfigurationPropertiesScan
+@org.springframework.scheduling.annotation.EnableScheduling
public class GoodoneBackendApplication {
public static void main(String[] args) {
@@ -24,3 +31,4 @@ public static org.springframework.context.ConfigurableApplicationContext run(Str
}
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/AiProperties.java b/backend/src/main/java/ch/goodone/backend/ai/AiProperties.java
index 3f327ea5d..94a4cbd0f 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/AiProperties.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/AiProperties.java
@@ -1,6 +1,7 @@
package ch.goodone.backend.ai;
import lombok.Data;
+import lombok.EqualsAndHashCode;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@@ -15,7 +16,68 @@ public class AiProperties {
private CapabilityConfig architecture;
private CapabilityConfig retrospective;
private CapabilityConfig embedding;
+ private EvaluationConfig evaluation;
+ private RoutingConfig routing = new RoutingConfig();
private Map pricing;
+ private OpenAiConfig openai;
+ private OllamaConfig ollama;
+ private LocalFastPathConfig localFastPath = new LocalFastPathConfig();
+
+ public CapabilityConfig getConfigForFeature(String featureName) {
+ if (featureName == null) {
+ return architecture;
+ }
+ return switch (featureName) {
+ case "architecture", "architecture-explain", "copilot" -> getArchitecture();
+ case "quick-add", "quick-add-parse" -> getQuickAdd();
+ case "retrospective", "retrospective-cluster", "adr-drift" -> getRetrospective();
+ case "evaluation" -> getEvaluation();
+ default -> getArchitecture();
+ };
+ }
+
+ @Data
+ public static class LocalFastPathConfig {
+ private boolean enabled = false;
+ private java.util.List targetCapabilities = new java.util.ArrayList<>();
+ private String fastChatModel;
+ private Integer fastNumPredict;
+ private Integer fastTimeoutSeconds = 300;
+ }
+
+ @Data
+ @EqualsAndHashCode(callSuper = true)
+ public static class EvaluationConfig extends CapabilityConfig {
+ private boolean traceEnabled = false;
+ }
+
+ @Data
+ public static class RoutingConfig {
+ private String defaultProvider = "openai";
+ private Map featureRoutes = new java.util.HashMap<>();
+ }
+
+ @Data
+ public static class OpenAiConfig {
+ private String apiKey;
+ private String chatModel = "gpt-4o";
+ private String embeddingModel = "text-embedding-3-small";
+ private String baseUrl = "https://api.openai.com/v1";
+ private Double temperature;
+ private Integer seed;
+ private Integer timeoutSeconds = 60;
+ }
+
+ @Data
+ public static class OllamaConfig {
+ private String chatModel = "llama3.2";
+ private String embeddingModel = "nomic-embed-text";
+ private String baseUrl = "http://localhost:11434";
+ private Integer numPredict;
+ private Integer timeoutSeconds = 600;
+ private Double temperature;
+ private Integer seed;
+ }
@Data
public static class CapabilityConfig {
@@ -31,3 +93,4 @@ public static class ModelPrice {
private Double outputPricePer1k;
}
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/AiProviderService.java b/backend/src/main/java/ch/goodone/backend/ai/AiProviderService.java
index 4b0be851d..8fed6d559 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/AiProviderService.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/AiProviderService.java
@@ -1,9 +1,11 @@
package ch.goodone.backend.ai;
+import ch.goodone.backend.ai.exception.AiException;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.embedding.EmbeddingModel;
+import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Service;
@@ -26,29 +28,56 @@ public EmbeddingModel getEmbeddingModel() {
private EmbeddingModel getEmbeddingModel(AiProperties.CapabilityConfig config) {
validateConfig(config);
String provider = config.getProvider().toLowerCase();
+
+ if (provider.equals("routing")) {
+ return context.getBean(AiRoutingService.class).resolveEmbeddingDelegate();
+ }
+
String beanName = provider.equals("openai") ? "openAiEmbeddingModel" : provider + "EmbeddingModel";
- log.debug("Resolving EmbeddingModel bean: {}", beanName);
- return context.getBean(beanName, EmbeddingModel.class);
+ try {
+ return context.getBean(beanName, EmbeddingModel.class);
+ } catch (NoSuchBeanDefinitionException e) {
+ log.error("AI Embedding Provider bean '{}' not found in context.", beanName);
+ throw new AiException("AI Embedding provider '" + provider + "' is not active. Check active profiles.");
+ }
}
public ChatModel getQuickAddChatModel() {
- return getChatModel(aiProperties.getQuickAdd());
+ return getChatModel(aiProperties.getQuickAdd(), "quick-add");
}
public ChatModel getArchitectureChatModel() {
- return getChatModel(aiProperties.getArchitecture());
+ return getChatModel(aiProperties.getArchitecture(), "architecture");
}
public ChatModel getRetrospectiveChatModel() {
- return getChatModel(aiProperties.getRetrospective() != null ? aiProperties.getRetrospective() : aiProperties.getArchitecture());
+ return getChatModel(aiProperties.getRetrospective() != null ? aiProperties.getRetrospective() : aiProperties.getArchitecture(), "retrospective");
}
- private ChatModel getChatModel(AiProperties.CapabilityConfig config) {
+ public ChatModel getEvaluationChatModel() {
+ return getChatModel(aiProperties.getEvaluation(), "evaluation");
+ }
+
+ public ChatModel getChatModelForFeature(String featureName) {
+ return getChatModel(aiProperties.getConfigForFeature(featureName), featureName);
+ }
+
+ private ChatModel getChatModel(AiProperties.CapabilityConfig config, String featureName) {
validateConfig(config);
String provider = config.getProvider().toLowerCase();
+
+ if (provider.equals("routing")) {
+ log.debug("Using RoutingChatModel for feature: {}", featureName);
+ return new RoutingChatModel(featureName, context.getBean(AiRoutingService.class), aiProperties);
+ }
+
String beanName = provider.equals("openai") ? "openAiChatModel" : provider + "ChatModel";
- log.debug("Resolving ChatModel bean: {}", beanName);
- return context.getBean(beanName, ChatModel.class);
+ try {
+ return context.getBean(beanName, ChatModel.class);
+ } catch (NoSuchBeanDefinitionException e) {
+ log.error("AI Chat Provider bean '{}' not found in context for feature: {}.", beanName, featureName);
+ throw new AiException("AI provider '" + provider + "' is not active for feature '" + featureName + "'. Check active profiles.");
+ }
}
private void validateConfig(AiProperties.CapabilityConfig config) {
@@ -57,3 +86,4 @@ private void validateConfig(AiProperties.CapabilityConfig config) {
}
}
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/AiRoutingService.java b/backend/src/main/java/ch/goodone/backend/ai/AiRoutingService.java
new file mode 100644
index 000000000..da548cf91
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/AiRoutingService.java
@@ -0,0 +1,84 @@
+package ch.goodone.backend.ai;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.ai.chat.model.ChatModel;
+import org.springframework.ai.embedding.EmbeddingModel;
+import org.springframework.context.ApplicationContext;
+import org.springframework.beans.factory.NoSuchBeanDefinitionException;
+import org.springframework.stereotype.Service;
+import ch.goodone.backend.ai.exception.AiException;
+
+
+@Service
+@RequiredArgsConstructor
+@Slf4j
+public class AiRoutingService {
+
+ private final ApplicationContext context;
+ private final AiProperties aiProperties;
+ private final ch.goodone.backend.service.SystemSettingService systemSettingService;
+
+ private static final String ROUTING = "routing";
+
+ public String resolveProvider(String featureName) {
+ AiProperties.RoutingConfig config = aiProperties.getRouting();
+ String provider = config.getFeatureRoutes().get(featureName);
+
+ if (provider == null || provider.isBlank() || provider.equalsIgnoreCase(ROUTING)) {
+ provider = systemSettingService.getAiDefaultProvider();
+ if (provider == null || provider.isBlank() || provider.equalsIgnoreCase(ROUTING)) {
+ provider = config.getDefaultProvider();
+ }
+ }
+
+ if (provider == null || provider.isBlank() || provider.equalsIgnoreCase(ROUTING)) {
+ provider = "openai"; // Ultimate fallback
+ }
+ return provider;
+ }
+
+ public ChatModel resolveDelegate(String featureName) {
+ String provider = resolveProvider(featureName);
+
+ String beanName;
+ String providerLower = provider.toLowerCase();
+ if (providerLower.equals("openai")) {
+ beanName = "openAiChatModel";
+ } else if (providerLower.equals("ollama-fast")) {
+ beanName = "ollamaFastChatModel";
+ } else {
+ beanName = providerLower + "ChatModel";
+ }
+
+ try {
+ return context.getBean(beanName, ChatModel.class);
+ } catch (NoSuchBeanDefinitionException e) {
+ log.error("AI Provider bean '{}' not found in context. Ensure the corresponding profile (openai, ollama, or mock) is active.", beanName);
+ throw new AiException("AI provider '" + provider + "' is not active or configured correctly. Please check system profiles.");
+ }
+ }
+
+ public EmbeddingModel resolveEmbeddingDelegate() {
+ String provider = aiProperties.getEmbedding() != null ? aiProperties.getEmbedding().getProvider() : "openai";
+ if (provider == null || provider.isBlank() || provider.equalsIgnoreCase("routing")) {
+ provider = "openai"; // Embedding usually doesn't need complex routing but we can add it later
+ }
+
+ String beanName;
+ String providerLower = provider.toLowerCase();
+ if (providerLower.equals("openai")) {
+ beanName = "openAiEmbeddingModel";
+ } else {
+ beanName = providerLower + "EmbeddingModel";
+ }
+
+ try {
+ return context.getBean(beanName, EmbeddingModel.class);
+ } catch (NoSuchBeanDefinitionException e) {
+ log.error("AI Embedding Provider bean '{}' not found in context. Check active profiles.", beanName);
+ throw new AiException("AI Embedding provider '" + provider + "' is not active. Please check system profiles.");
+ }
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/MockAiConfiguration.java b/backend/src/main/java/ch/goodone/backend/ai/MockAiConfiguration.java
index 976765ecc..25113a48c 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/MockAiConfiguration.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/MockAiConfiguration.java
@@ -78,3 +78,4 @@ public EmbeddingResponse call(org.springframework.ai.embedding.EmbeddingRequest
};
}
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/OllamaManualConfig.java b/backend/src/main/java/ch/goodone/backend/ai/OllamaManualConfig.java
new file mode 100644
index 000000000..8ed50d39e
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/OllamaManualConfig.java
@@ -0,0 +1,378 @@
+package ch.goodone.backend.ai;
+
+import ch.goodone.backend.ai.exception.AiProviderException;
+import ch.goodone.backend.ai.performance.OllamaPerformanceService;
+import tools.jackson.databind.ObjectMapper;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.ai.chat.messages.AssistantMessage;
+import org.springframework.ai.chat.model.ChatModel;
+import org.springframework.ai.chat.model.ChatResponse;
+import org.springframework.ai.chat.model.Generation;
+import org.springframework.ai.chat.prompt.Prompt;
+import org.springframework.ai.embedding.Embedding;
+import org.springframework.ai.embedding.EmbeddingModel;
+import org.springframework.ai.embedding.EmbeddingRequest;
+import org.springframework.ai.embedding.EmbeddingResponse;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Profile;
+import org.springframework.http.MediaType;
+import org.springframework.http.client.SimpleClientHttpRequestFactory;
+import org.springframework.web.client.RestClient;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Manual implementation of Ollama Chat and Embedding models to workaround
+ * binary compatibility issues between Spring AI 1.0.0 and Spring Boot 4.x.
+ * Uses Ollama's OpenAI-compatible API endpoints.
+ */
+@Configuration
+@Profile("ollama")
+@Slf4j
+public class OllamaManualConfig {
+ private static final String MODEL_FIELD = "model";
+ private static final String ROLE_FIELD = "role";
+ private static final String CONTENT_FIELD = "content";
+
+ private final AiProperties aiProperties;
+ private final RestClient normalRestClient;
+ private final RestClient fastRestClient;
+ private final ObjectMapper objectMapper;
+ private final OllamaPerformanceService performanceService;
+
+ public OllamaManualConfig(AiProperties aiProperties, ObjectMapper objectMapper, OllamaPerformanceService performanceService) {
+ this.aiProperties = aiProperties;
+ this.objectMapper = objectMapper;
+ this.performanceService = performanceService;
+
+ String baseUrl = resolveBaseUrl(aiProperties);
+ log.info("Initializing Ollama models with base URL: {}", baseUrl);
+
+ this.normalRestClient = buildRestClient(baseUrl, aiProperties.getOllama() != null ? aiProperties.getOllama().getTimeoutSeconds() : 60);
+ this.fastRestClient = buildRestClient(baseUrl, aiProperties.getLocalFastPath() != null ? aiProperties.getLocalFastPath().getFastTimeoutSeconds() : 120);
+ }
+
+ private String resolveBaseUrl(AiProperties aiProperties) {
+ if (aiProperties.getOllama() != null && aiProperties.getOllama().getBaseUrl() != null) {
+ String baseUrl = aiProperties.getOllama().getBaseUrl();
+ return sanitizeBaseUrl(baseUrl);
+ }
+
+ return resolveFallbackBaseUrl();
+ }
+
+ private String sanitizeBaseUrl(String baseUrl) {
+ if (baseUrl.endsWith("/v1")) {
+ return baseUrl.substring(0, baseUrl.length() - 3);
+ }
+ if (baseUrl.endsWith("/v1/")) {
+ return baseUrl.substring(0, baseUrl.length() - 4);
+ }
+ return baseUrl;
+ }
+
+ private String resolveFallbackBaseUrl() {
+ boolean inDocker = java.nio.file.Files.exists(java.nio.file.Paths.get("/.dockerenv"));
+ String envHost = System.getenv("OLLAMA_HOST");
+ if (envHost != null) {
+ return envHost.startsWith("http") ? envHost : "http://" + envHost + ":11434";
+ } else if (inDocker) {
+ return "http://host.docker.internal:11434";
+ }
+ return "http://localhost:11434";
+ }
+
+ private RestClient buildRestClient(String baseUrl, Integer timeoutSeconds) {
+ return RestClient.builder()
+ .baseUrl(baseUrl)
+ .requestFactory(createRequestFactory(timeoutSeconds))
+ .build();
+ }
+
+ private SimpleClientHttpRequestFactory createRequestFactory(Integer timeoutSeconds) {
+ SimpleClientHttpRequestFactory factory = new SimpleClientHttpRequestFactory();
+ int timeoutMillis = (timeoutSeconds != null ? timeoutSeconds : 120) * 1000;
+ factory.setConnectTimeout(timeoutMillis);
+ factory.setReadTimeout(timeoutMillis);
+ return factory;
+ }
+
+ @Bean
+ public ChatModel ollamaChatModel() {
+ log.info("Creating manual OllamaChatModel bean");
+ return createChatModel(false);
+ }
+
+ @Bean
+ public ChatModel ollamaFastChatModel() {
+ log.info("Creating manual OllamaFastChatModel bean");
+ return createChatModel(true);
+ }
+
+ private ChatModel createChatModel(boolean isFastPath) {
+ RestClient client = isFastPath ? fastRestClient : normalRestClient;
+ return new ChatModel() {
+ @Override
+ public ChatResponse call(Prompt prompt) {
+ return executeOllamaChat(prompt, isFastPath, client);
+ }
+ };
+ }
+
+ private ChatResponse executeOllamaChat(Prompt prompt, boolean isFastPath, RestClient client) {
+ AiProperties.OllamaConfig config = aiProperties.getOllama();
+ String chatModel = getEffectiveChatModel(prompt, isFastPath, config);
+ Integer numPredict = getEffectiveNumPredict(isFastPath, config);
+
+ List> messages = prompt.getInstructions().stream()
+ .map(m -> Map.of(
+ ROLE_FIELD, m.getMessageType().name().toLowerCase(),
+ CONTENT_FIELD, m.getText()
+ ))
+ .toList();
+
+ boolean expectsJson = isJsonExpected(messages);
+ Map requestBody = createChatRequestBody(chatModel, messages, expectsJson, numPredict, prompt, config);
+
+ long startTime = System.currentTimeMillis();
+ final String finalChatModel = chatModel;
+ final List> finalMessages = messages;
+
+ try {
+ return performanceService.executeWithConcurrencyControl("ollama-chat-" + finalChatModel, () -> {
+ try {
+ byte[] responseBytes = executeChatApiCall(client, requestBody);
+ return processChatResponse(responseBytes, finalChatModel, finalMessages, startTime);
+ } catch (Exception e) {
+ throw new AiProviderException("Failed to execute Ollama chat", e);
+ }
+ });
+ } catch (Exception e) {
+ log.error("Error calling Ollama API: {}", e.getMessage());
+ throw new AiProviderException("Ollama API call failed: " + e.getMessage(), e);
+ }
+ }
+
+ private String getEffectiveChatModel(Prompt prompt, boolean isFastPath, AiProperties.OllamaConfig config) {
+ String chatModel = (config != null) ? config.getChatModel() : "llama3.2";
+ if (isFastPath && aiProperties.getLocalFastPath().isEnabled() &&
+ org.springframework.util.StringUtils.hasText(aiProperties.getLocalFastPath().getFastChatModel())) {
+ chatModel = aiProperties.getLocalFastPath().getFastChatModel();
+ }
+ if (prompt.getOptions() != null && org.springframework.util.StringUtils.hasText(prompt.getOptions().getModel())) {
+ chatModel = prompt.getOptions().getModel();
+ }
+ return chatModel;
+ }
+
+ private Integer getEffectiveNumPredict(boolean isFastPath, AiProperties.OllamaConfig config) {
+ Integer numPredict = (config != null) ? config.getNumPredict() : null;
+ if (isFastPath && aiProperties.getLocalFastPath().isEnabled() &&
+ aiProperties.getLocalFastPath().getFastNumPredict() != null) {
+ numPredict = aiProperties.getLocalFastPath().getFastNumPredict();
+ }
+ return numPredict;
+ }
+
+ private boolean isJsonExpected(List> messages) {
+ return messages.stream().anyMatch(m -> {
+ String content = m.get(CONTENT_FIELD);
+ return content != null && (content.toLowerCase().contains("json") || content.toLowerCase().contains("schema"));
+ });
+ }
+
+ private Map createChatRequestBody(String chatModel, List> messages,
+ boolean expectsJson, Integer numPredict, Prompt prompt, AiProperties.OllamaConfig config) {
+ Map requestBody = new java.util.HashMap<>(Map.of(
+ MODEL_FIELD, chatModel,
+ "messages", messages,
+ "stream", false
+ ));
+
+ if (expectsJson) {
+ requestBody.put("format", "json");
+ }
+
+ Map options = buildOptions(numPredict, expectsJson, prompt, config);
+ if (!options.isEmpty()) {
+ requestBody.put("options", options);
+ }
+ return requestBody;
+ }
+
+ private Map buildOptions(Integer numPredict, boolean expectsJson, Prompt prompt, AiProperties.OllamaConfig config) {
+ Map options = new java.util.HashMap<>();
+ if (numPredict != null) {
+ options.put("num_predict", numPredict);
+ }
+
+ Double temperature = resolveTemperature(expectsJson, prompt, config);
+ if (temperature != null) {
+ options.put("temperature", temperature);
+ }
+
+ Integer seed = (config != null) ? config.getSeed() : null;
+ if (seed != null) {
+ options.put("seed", seed);
+ }
+
+ return options;
+ }
+
+ private Double resolveTemperature(boolean expectsJson, Prompt prompt, AiProperties.OllamaConfig config) {
+ Double temperature = (config != null) ? config.getTemperature() : null;
+ if (prompt.getOptions() != null && prompt.getOptions().getTemperature() != null) {
+ temperature = prompt.getOptions().getTemperature();
+ }
+
+ if (expectsJson && (temperature == null || temperature > 0.0)) {
+ return 0.0;
+ }
+ return temperature;
+ }
+
+ private byte[] executeChatApiCall(RestClient client, Map requestBody) {
+ return client.post()
+ .uri("/api/chat")
+ .contentType(MediaType.APPLICATION_JSON)
+ .accept(MediaType.APPLICATION_JSON, MediaType.ALL)
+ .body(requestBody)
+ .exchange((req, res) -> {
+ if (res.getStatusCode().isError()) {
+ throw new AiProviderException("Ollama returned " + res.getStatusCode());
+ }
+ return res.getBody().readAllBytes();
+ });
+ }
+
+ private ChatResponse processChatResponse(byte[] responseBytes, String chatModel,
+ List> messages, long startTime) throws Exception {
+ if (responseBytes == null || responseBytes.length == 0) {
+ log.error("Ollama returned an empty response");
+ throw new AiProviderException("Ollama returned an empty response");
+ }
+
+ @SuppressWarnings("unchecked")
+ Map response = objectMapper.readValue(responseBytes, Map.class);
+
+ @SuppressWarnings("unchecked")
+ Map message = (Map) response.get("message");
+ if (message == null) {
+ log.error("Ollama response missing 'message' field: {}", response);
+ throw new AiProviderException("Ollama response missing 'message' field");
+ }
+
+ String content = (String) message.get(CONTENT_FIELD);
+ if (content == null) {
+ log.error("Ollama response message missing 'content' field: {}", message);
+ throw new AiProviderException("Ollama response message missing 'content' field");
+ }
+
+ long duration = System.currentTimeMillis() - startTime;
+ log.debug("Ollama chat: model={}, messages={}, responseLen={}, duration={}ms",
+ chatModel, messages.size(), content.length(), duration);
+ Generation generation = new Generation(new AssistantMessage(content));
+ return new ChatResponse(List.of(generation));
+ }
+
+ @Bean
+ public EmbeddingModel ollamaEmbeddingModel() {
+ log.info("Creating manual OllamaEmbeddingModel bean");
+ return new ManualOllamaEmbeddingModel(aiProperties, normalRestClient, performanceService, objectMapper);
+ }
+
+ private static class ManualOllamaEmbeddingModel implements EmbeddingModel {
+ private final AiProperties aiProperties;
+ private final RestClient restClient;
+ private final OllamaPerformanceService performanceService;
+ private final ObjectMapper objectMapper;
+
+ public ManualOllamaEmbeddingModel(AiProperties aiProperties, RestClient restClient,
+ OllamaPerformanceService performanceService, ObjectMapper objectMapper) {
+ this.aiProperties = aiProperties;
+ this.restClient = restClient;
+ this.performanceService = performanceService;
+ this.objectMapper = objectMapper;
+ }
+
+ @Override
+ public float[] embed(String text) {
+ EmbeddingResponse response = call(new EmbeddingRequest(List.of(text), null));
+ if (response.getResults().isEmpty()) {
+ return new float[0];
+ }
+ return response.getResults().get(0).getOutput();
+ }
+
+ @Override
+ public float[] embed(org.springframework.ai.document.Document document) {
+ return embed(document.getFormattedContent());
+ }
+
+ @Override
+ public EmbeddingResponse call(EmbeddingRequest request) {
+ String embeddingModel = resolveEmbeddingModel(request);
+ Map body = Map.of(
+ MODEL_FIELD, embeddingModel,
+ "prompt", request.getInstructions().get(0)
+ );
+
+ try {
+ return performanceService.executeWithConcurrencyControl("ollama-embed-" + embeddingModel,
+ () -> doCall(body));
+ } catch (Exception e) {
+ log.error("Error calling Ollama Embedding API: {}", e.getMessage());
+ throw new AiProviderException("Ollama Embedding API call failed: " + e.getMessage(), e);
+ }
+ }
+
+ private EmbeddingResponse doCall(Map body) {
+ byte[] responseBytes = executeEmbeddingApiCall(body);
+ return processEmbeddingResponse(responseBytes);
+ }
+
+ private String resolveEmbeddingModel(EmbeddingRequest request) {
+ AiProperties.OllamaConfig config = aiProperties.getOllama();
+ String embeddingModel = (config != null) ? config.getEmbeddingModel() : "nomic-embed-text";
+ if (request.getOptions() != null && org.springframework.util.StringUtils.hasText(request.getOptions().getModel())) {
+ embeddingModel = request.getOptions().getModel();
+ }
+ return embeddingModel;
+ }
+
+ private byte[] executeEmbeddingApiCall(Map body) {
+ return restClient.post()
+ .uri("/api/embeddings")
+ .contentType(MediaType.APPLICATION_JSON)
+ .accept(MediaType.APPLICATION_JSON, MediaType.ALL)
+ .body(body)
+ .exchange((req, res) -> {
+ if (res.getStatusCode().isError()) {
+ throw new AiProviderException("Ollama returned " + res.getStatusCode());
+ }
+ return res.getBody().readAllBytes();
+ });
+ }
+
+ private EmbeddingResponse processEmbeddingResponse(byte[] responseBytes) {
+ if (responseBytes == null || responseBytes.length == 0) {
+ throw new AiProviderException("Ollama embedding returned an empty response");
+ }
+ @SuppressWarnings("unchecked")
+ Map response = objectMapper.readValue(responseBytes, Map.class);
+ @SuppressWarnings("unchecked")
+ List list = (List) response.get("embedding");
+ if (list == null) {
+ throw new AiProviderException("Ollama embedding response missing 'embedding' field");
+ }
+ float[] vector = new float[list.size()];
+ for (int i = 0; i < list.size(); i++) {
+ vector[i] = list.get(i).floatValue();
+ }
+ return new EmbeddingResponse(List.of(new Embedding(vector, 0)));
+ }
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/OpenAiManualConfig.java b/backend/src/main/java/ch/goodone/backend/ai/OpenAiManualConfig.java
index 90d16798b..a535a785e 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/OpenAiManualConfig.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/OpenAiManualConfig.java
@@ -10,12 +10,15 @@
import org.springframework.ai.embedding.EmbeddingModel;
import org.springframework.ai.embedding.EmbeddingRequest;
import org.springframework.ai.embedding.EmbeddingResponse;
-import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Profile;
import org.springframework.http.MediaType;
+import org.springframework.http.client.JdkClientHttpRequestFactory;
import org.springframework.web.client.RestClient;
+import java.net.http.HttpClient;
+import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@@ -25,6 +28,7 @@
* binary compatibility issues between Spring AI 1.0.0 and Spring Boot 4.x.
*/
@Configuration
+@Profile("openai")
@Slf4j
public class OpenAiManualConfig {
private static final String MODEL_FIELD = "model";
@@ -33,118 +37,223 @@ public class OpenAiManualConfig {
private static final String AUTH_HEADER = "Authorization";
private static final String BEARER_PREFIX = "Bearer ";
- @Value("${spring.ai.openai.api-key}")
- private String apiKey;
+ private final AiProperties aiProperties;
+ private final RestClient restClient;
- @Value("${spring.ai.openai.chat.options.model:gpt-4o}")
- private String chatModel;
-
- @Value("${spring.ai.openai.embedding.options.model:text-embedding-3-small}")
- private String embeddingModel;
-
- private final RestClient restClient = RestClient.builder()
- .baseUrl("https://api.openai.com/v1")
- .build();
+ public OpenAiManualConfig(AiProperties aiProperties) {
+ this.aiProperties = aiProperties;
+ String baseUrl = "https://api.openai.com/v1";
+ int timeoutSeconds = 60;
+
+ if (aiProperties.getOpenai() != null) {
+ if (aiProperties.getOpenai().getBaseUrl() != null) {
+ baseUrl = aiProperties.getOpenai().getBaseUrl();
+ }
+ if (aiProperties.getOpenai().getTimeoutSeconds() != null) {
+ timeoutSeconds = aiProperties.getOpenai().getTimeoutSeconds();
+ }
+ }
+
+ // Use HTTP/1.1 to avoid RST_STREAM errors on Fargate and with some HTTP proxies
+ HttpClient httpClient = HttpClient.newBuilder()
+ .version(HttpClient.Version.HTTP_1_1)
+ .connectTimeout(Duration.ofSeconds(10))
+ .build();
+
+ JdkClientHttpRequestFactory requestFactory = new JdkClientHttpRequestFactory(httpClient);
+ requestFactory.setReadTimeout(Duration.ofSeconds(timeoutSeconds));
+
+ this.restClient = RestClient.builder()
+ .baseUrl(baseUrl)
+ .requestFactory(requestFactory)
+ .build();
+ }
@Bean
public ChatModel openAiChatModel() {
log.info("Creating manual OpenAiChatModel bean (workaround for Spring Boot 4.x compatibility)");
- return new ChatModel() {
- @Override
- public ChatResponse call(Prompt prompt) {
- if (!org.springframework.util.StringUtils.hasText(apiKey) || "dummy".equals(apiKey)) {
- log.error("OpenAI API key is missing or dummy. AI features will fail. Please set OPENAI_API_KEY environment variable.");
- throw new IllegalStateException("OpenAI API key is missing or dummy. Please set OPENAI_API_KEY environment variable.");
- }
-
- String systemMessage = prompt.getInstructions().stream()
- .filter(m -> m.getMessageType().name().equals("SYSTEM"))
- .map(org.springframework.ai.chat.messages.Message::getText)
- .collect(Collectors.joining("\n"));
-
- String userMessage = prompt.getInstructions().stream()
- .filter(m -> m.getMessageType().name().equals("USER"))
- .map(org.springframework.ai.chat.messages.Message::getText)
- .collect(Collectors.joining("\n"));
-
- Map request = Map.of(
- MODEL_FIELD, chatModel,
- "messages", List.of(
- Map.of(ROLE_FIELD, "system", CONTENT_FIELD, systemMessage),
- Map.of(ROLE_FIELD, "user", CONTENT_FIELD, userMessage)
- )
- );
-
- Map response = restClient.post()
- .uri("/chat/completions")
- .header(AUTH_HEADER, BEARER_PREFIX + apiKey)
- .contentType(MediaType.APPLICATION_JSON)
- .body(request)
- .retrieve()
- .body(Map.class);
-
- List> choices = (List>) response.get("choices");
- String content = (String) ((Map) choices.get(0).get("message")).get(CONTENT_FIELD);
-
- Generation generation = new Generation(new AssistantMessage(content));
- return new ChatResponse(List.of(generation));
+ return new ManualOpenAiChatModel(aiProperties, restClient);
+ }
+
+ private class ManualOpenAiChatModel implements ChatModel {
+ private final AiProperties aiProperties;
+ private final RestClient restClient;
+
+ public ManualOpenAiChatModel(AiProperties aiProperties, RestClient restClient) {
+ this.aiProperties = aiProperties;
+ this.restClient = restClient;
+ }
+
+ @Override
+ public ChatResponse call(Prompt prompt) {
+ AiProperties.OpenAiConfig config = aiProperties.getOpenai();
+ String apiKey = (config != null) ? config.getApiKey() : null;
+ String chatModel = resolveModel(prompt, (config != null) ? config.getChatModel() : "gpt-4o");
+ validateApiKey(apiKey);
+
+ String systemMessage = extractMessage(prompt, "SYSTEM");
+ String userMessage = extractMessage(prompt, "USER");
+
+ Map request = buildChatRequest(chatModel, systemMessage, userMessage, prompt, config);
+ Map response = executeChatCall(apiKey, request);
+ String content = extractChatContent(response);
+ return new ChatResponse(List.of(new Generation(new AssistantMessage(content))));
+ }
+
+ @SuppressWarnings("unchecked")
+ private Map executeChatCall(String apiKey, Map request) {
+ return restClient.post()
+ .uri("/chat/completions")
+ .header(AUTH_HEADER, BEARER_PREFIX + apiKey)
+ .contentType(MediaType.APPLICATION_JSON)
+ .body(request)
+ .retrieve()
+ .body(Map.class);
+ }
+
+ @SuppressWarnings("unchecked")
+ private String extractChatContent(Map response) {
+ List> choices = (List>) response.get("choices");
+ return (String) ((Map) choices.get(0).get("message")).get(CONTENT_FIELD);
+ }
+
+ private String resolveModel(Prompt prompt, String defaultModel) {
+ if (prompt.getOptions() != null && org.springframework.util.StringUtils.hasText(prompt.getOptions().getModel())) {
+ return prompt.getOptions().getModel();
}
- };
+ return defaultModel;
+ }
+
+ private void validateApiKey(String apiKey) {
+ if (!org.springframework.util.StringUtils.hasText(apiKey) || "dummy".equalsIgnoreCase(apiKey)) {
+ log.error("OpenAI API key is missing or dummy. AI features will fail. Please set app.ai.openai.api-key.");
+ throw new IllegalStateException("OpenAI API key is missing or dummy. Please set app.ai.openai.api-key.");
+ }
+ }
+
+ private String extractMessage(Prompt prompt, String type) {
+ return prompt.getInstructions().stream()
+ .filter(m -> m.getMessageType().name().equals(type))
+ .map(org.springframework.ai.chat.messages.Message::getText)
+ .collect(Collectors.joining("\n"));
+ }
+
+ private Map buildChatRequest(String chatModel, String systemMessage, String userMessage, Prompt prompt, AiProperties.OpenAiConfig config) {
+ Map request = new java.util.HashMap<>(Map.of(
+ MODEL_FIELD, chatModel,
+ "messages", List.of(
+ Map.of(ROLE_FIELD, "system", CONTENT_FIELD, systemMessage),
+ Map.of(ROLE_FIELD, "user", CONTENT_FIELD, userMessage)
+ )
+ ));
+
+ Double temperature = (config != null) ? config.getTemperature() : null;
+ Integer seed = (config != null) ? config.getSeed() : null;
+ if (prompt.getOptions() != null && prompt.getOptions().getTemperature() != null) {
+ temperature = prompt.getOptions().getTemperature();
+ }
+
+ boolean expectsJson = (systemMessage.toLowerCase().contains("json") || userMessage.toLowerCase().contains("json")
+ || systemMessage.toLowerCase().contains("schema") || userMessage.toLowerCase().contains("schema"));
+ if (expectsJson && (temperature == null || temperature > 0.0)) {
+ temperature = 0.0;
+ }
+
+ if (temperature != null) {
+ request.put("temperature", temperature);
+ }
+ if (seed != null) {
+ request.put("seed", seed);
+ }
+ return request;
+ }
}
+
@Bean
public EmbeddingModel openAiEmbeddingModel() {
log.info("Creating manual OpenAiEmbeddingModel bean (workaround for Spring Boot 4.x compatibility)");
- return new EmbeddingModel() {
- @Override
- public float[] embed(String text) {
- EmbeddingResponse response = call(new EmbeddingRequest(List.of(text), null));
- if (response.getResults().isEmpty()) {
- log.warn("Embedding response is empty, likely due to missing API key or dummy provider. Returning zero vector.");
- return new float[0];
- }
- return response.getResults().get(0).getOutput();
+ return new ManualOpenAiEmbeddingModel(aiProperties, restClient);
+ }
+
+ private class ManualOpenAiEmbeddingModel implements EmbeddingModel {
+ private final AiProperties aiProperties;
+ private final RestClient restClient;
+
+ public ManualOpenAiEmbeddingModel(AiProperties aiProperties, RestClient restClient) {
+ this.aiProperties = aiProperties;
+ this.restClient = restClient;
+ }
+
+ @Override
+ public float[] embed(String text) {
+ EmbeddingResponse response = call(new EmbeddingRequest(List.of(text), null));
+ if (response.getResults().isEmpty()) {
+ log.warn("Embedding response is empty, likely due to missing API key or dummy provider. Returning zero vector.");
+ return new float[0];
}
+ return response.getResults().get(0).getOutput();
+ }
+
+ @Override
+ public float[] embed(org.springframework.ai.document.Document document) {
+ return embed(document.getFormattedContent());
+ }
- @Override
- public float[] embed(org.springframework.ai.document.Document document) {
- return embed(document.getFormattedContent());
+ @Override
+ public EmbeddingResponse call(EmbeddingRequest request) {
+ AiProperties.OpenAiConfig config = aiProperties.getOpenai();
+ String apiKey = config != null ? config.getApiKey() : null;
+ String embeddingModel = resolveEmbeddingModel(request, config);
+
+ if (!org.springframework.util.StringUtils.hasText(apiKey) || "dummy".equalsIgnoreCase(apiKey)) {
+ log.warn("OpenAI API key is missing or dummy. Returning empty embeddings. Please set app.ai.openai.api-key.");
+ return new EmbeddingResponse(List.of());
}
- @Override
- public EmbeddingResponse call(EmbeddingRequest request) {
- if (!org.springframework.util.StringUtils.hasText(apiKey) || "dummy".equals(apiKey)) {
- log.warn("OpenAI API key is missing or dummy. Returning empty embeddings. Please set OPENAI_API_KEY.");
- return new EmbeddingResponse(List.of());
- }
-
- Map body = Map.of(
- MODEL_FIELD, embeddingModel,
- "input", request.getInstructions()
- );
-
- Map response = restClient.post()
- .uri("/embeddings")
- .header(AUTH_HEADER, BEARER_PREFIX + apiKey)
- .contentType(MediaType.APPLICATION_JSON)
- .body(body)
- .retrieve()
- .body(Map.class);
-
- List> data = (List>) response.get("data");
- List embeddings = data.stream()
- .map(d -> {
- List list = (List) d.get("embedding");
- float[] vector = new float[list.size()];
- for (int i = 0; i < list.size(); i++) {
- vector[i] = list.get(i).floatValue();
- }
- return new Embedding(vector, (Integer) d.get("index"));
- })
- .toList();
-
- return new EmbeddingResponse(embeddings);
+ Map body = Map.of(
+ MODEL_FIELD, embeddingModel,
+ "input", request.getInstructions()
+ );
+
+ Map response = executeEmbeddingCall(apiKey, body);
+ List embeddings = processEmbeddingResponse(response);
+ return new EmbeddingResponse(embeddings);
+ }
+
+ private String resolveEmbeddingModel(EmbeddingRequest request, AiProperties.OpenAiConfig config) {
+ String embeddingModel = (config != null) ? config.getEmbeddingModel() : "text-embedding-3-small";
+ if (request.getOptions() != null && org.springframework.util.StringUtils.hasText(request.getOptions().getModel())) {
+ embeddingModel = request.getOptions().getModel();
}
- };
+ return embeddingModel;
+ }
+
+ @SuppressWarnings("unchecked")
+ private Map executeEmbeddingCall(String apiKey, Map body) {
+ return restClient.post()
+ .uri("/embeddings")
+ .header(AUTH_HEADER, BEARER_PREFIX + apiKey)
+ .contentType(MediaType.APPLICATION_JSON)
+ .body(body)
+ .retrieve()
+ .body(Map.class);
+ }
+
+ @SuppressWarnings("unchecked")
+ private List processEmbeddingResponse(Map response) {
+ List> data = (List>) response.get("data");
+ return data.stream()
+ .map(d -> {
+ List list = (List) d.get("embedding");
+ float[] vector = new float[list.size()];
+ for (int i = 0; i < list.size(); i++) {
+ vector[i] = list.get(i).floatValue();
+ }
+ return new Embedding(vector, (Integer) d.get("index"));
+ })
+ .toList();
+ }
}
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/RoutingChatModel.java b/backend/src/main/java/ch/goodone/backend/ai/RoutingChatModel.java
new file mode 100644
index 000000000..a180ec13e
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/RoutingChatModel.java
@@ -0,0 +1,73 @@
+package ch.goodone.backend.ai;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.ai.chat.model.ChatModel;
+import org.springframework.ai.chat.model.ChatResponse;
+import org.springframework.ai.chat.model.StreamingChatModel;
+import org.springframework.ai.chat.prompt.Prompt;
+import org.springframework.ai.openai.OpenAiChatOptions;
+import reactor.core.publisher.Flux;
+
+/**
+ * A proxy ChatModel that delegates calls to a provider resolved by AiRoutingService.
+ */
+@RequiredArgsConstructor
+@Slf4j
+public class RoutingChatModel implements ChatModel, StreamingChatModel {
+
+ private final String featureName;
+ private final AiRoutingService routingService;
+ private final AiProperties aiProperties;
+
+ @Override
+ public ChatResponse call(Prompt prompt) {
+ log.debug("Routing chat call for feature: {}", featureName);
+ ChatModel delegate = routingService.resolveDelegate(featureName);
+
+ Prompt routedPrompt = ensureModelSet(prompt, featureName);
+ return delegate.call(routedPrompt);
+ }
+
+ @Override
+ public Flux stream(Prompt prompt) {
+ log.debug("Routing chat stream for feature: {}", featureName);
+ ChatModel delegate = routingService.resolveDelegate(featureName);
+
+ Prompt routedPrompt = ensureModelSet(prompt, featureName);
+ if (delegate instanceof StreamingChatModel streamingDelegate) {
+ return streamingDelegate.stream(routedPrompt);
+ }
+ throw new UnsupportedOperationException("Delegate does not support streaming");
+ }
+
+ private Prompt ensureModelSet(Prompt prompt, String featureName) {
+ if (prompt.getOptions() != null && org.springframework.util.StringUtils.hasText(prompt.getOptions().getModel())) {
+ return prompt;
+ }
+
+ String model = resolveModelForFeature(featureName);
+ if (model == null) {
+ return prompt;
+ }
+
+ log.debug("Applying routed model {} for feature {}", model, featureName);
+
+ // We use OpenAiChatOptions as a common way to pass the model,
+ // ManualOpenAiChatModel respects it.
+ OpenAiChatOptions options = OpenAiChatOptions.builder()
+ .model(model)
+ .build();
+
+ return new Prompt(prompt.getInstructions(), options);
+ }
+
+ private String resolveModelForFeature(String featureName) {
+ AiProperties.CapabilityConfig config = aiProperties.getConfigForFeature(featureName);
+ if (config != null && config.getModel() != null && !config.getModel().isBlank()) {
+ return config.getModel();
+ }
+ return null;
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/AdrDriftAiService.java b/backend/src/main/java/ch/goodone/backend/ai/application/AdrDriftAiService.java
index ebf7193bd..b4657c8f8 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/application/AdrDriftAiService.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/AdrDriftAiService.java
@@ -1,10 +1,11 @@
package ch.goodone.backend.ai.application;
-import ch.goodone.backend.ai.AiProviderService;
import ch.goodone.backend.ai.dto.AdrDriftRequest;
import ch.goodone.backend.ai.dto.AdrDriftResponse;
-import ch.goodone.backend.ai.prompt.StructuredOutputService;
+import ch.goodone.backend.ai.infrastructure.AiPipeline;
+import ch.goodone.backend.model.taxonomy.CopilotContextMode;
import lombok.RequiredArgsConstructor;
+import org.springframework.ai.chat.prompt.PromptTemplate;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;
@@ -16,8 +17,7 @@
@RequiredArgsConstructor
public class AdrDriftAiService {
- private final AiProviderService aiProviderService;
- private final StructuredOutputService structuredOutputService;
+ private final AiPipeline aiPipeline;
@Value("classpath:prompts/adr-drift/v1/generate.st")
private Resource generatePromptResource;
@@ -29,11 +29,27 @@ public AdrDriftResponse detect(AdrDriftRequest request, String adrContext, Strin
templateModel.put("adrContext", adrContext);
templateModel.put("taskContext", taskContext);
- return structuredOutputService.call(
- aiProviderService.getRetrospectiveChatModel(),
- generatePromptResource,
- templateModel,
- AdrDriftResponse.class
- );
+ PromptTemplate promptTemplate = new PromptTemplate(generatePromptResource);
+ String userPrompt = promptTemplate.render(templateModel);
+
+ String systemPrompt = "You are a senior architect analyzing ADR drift. Return ONLY schema-valid JSON.";
+
+ String sprintId = null;
+ if (request.getTasksets() != null && !request.getTasksets().isEmpty()) {
+ sprintId = request.getTasksets().get(0);
+ }
+
+ AiPipeline.AiRequest aiRequest = AiPipeline.AiRequest.builder()
+ .query(userPrompt)
+ .mode(CopilotContextMode.ARCHITECTURE_QA)
+ .topK(0) // Context already provided
+ .feature("adr-drift")
+ .sprintId(sprintId)
+ .systemPrompt(systemPrompt)
+ .schemaName("adrDrift")
+ .build();
+
+ return aiPipeline.execute(aiRequest, AdrDriftResponse.class);
}
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/AdrDriftUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/AdrDriftUseCase.java
index 38bce3cc1..0e1a69835 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/application/AdrDriftUseCase.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/AdrDriftUseCase.java
@@ -2,7 +2,14 @@
import ch.goodone.backend.ai.dto.AdrDriftRequest;
import ch.goodone.backend.ai.dto.AdrDriftResponse;
+import ch.goodone.backend.model.signal.EngineeringSignal;
+import java.util.List;
public interface AdrDriftUseCase {
AdrDriftResponse execute(AdrDriftRequest request);
+
+ default List emitSignals(AdrDriftRequest request) {
+ return execute(request).toSignals("adr-drift-engine");
+ }
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/AdrDriftUseCaseImpl.java b/backend/src/main/java/ch/goodone/backend/ai/application/AdrDriftUseCaseImpl.java
index b223a3a9e..967213bb2 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/application/AdrDriftUseCaseImpl.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/AdrDriftUseCaseImpl.java
@@ -2,12 +2,16 @@
import ch.goodone.backend.ai.AiProperties;
import ch.goodone.backend.ai.AiProviderService;
+import ch.goodone.backend.ai.prompt.PromptAssemblyService;
import ch.goodone.backend.ai.dto.AdrDriftRequest;
import ch.goodone.backend.ai.dto.AdrDriftResponse;
import ch.goodone.backend.ai.observability.AiObservabilityService;
+import ch.goodone.backend.ai.application.TaskGroupResolutionService;
+import ch.goodone.backend.ai.AiRoutingService;
import ch.goodone.backend.model.DocChunk;
import ch.goodone.backend.model.DocEmbedding;
import ch.goodone.backend.model.DocSource;
+import ch.goodone.backend.model.converter.VectorConverter;
import ch.goodone.backend.repository.DocChunkRepository;
import ch.goodone.backend.repository.DocEmbeddingRepository;
import ch.goodone.backend.repository.DocSourceRepository;
@@ -24,6 +28,7 @@
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import java.util.stream.Collectors;
@Service
@RequiredArgsConstructor
@@ -31,6 +36,9 @@
public class AdrDriftUseCaseImpl implements AdrDriftUseCase {
private static final String TASKSET_PREFIX = "taskset-";
+ private static final String SPRINT_PREFIX = "sprint-";
+ private static final String PROVIDER_OLLAMA = "ollama";
+ private static final String CONTEXT_SEPARATOR = "\n---\n";
private final DocSourceRepository sourceRepository;
private final DocChunkRepository chunkRepository;
@@ -39,6 +47,13 @@ public class AdrDriftUseCaseImpl implements AdrDriftUseCase {
private final AiObservabilityService observabilityService;
private final AiProperties aiProperties;
private final AiProviderService aiProviderService;
+ private final PromptAssemblyService promptAssemblyService;
+ private final TaskGroupResolutionService taskGroupResolutionService;
+ private final AiRoutingService aiRoutingService;
+ private final VectorConverter vectorConverter = new VectorConverter();
+
+ private static final Pattern ADR_PATTERN = Pattern.compile("#### (ADR-\\d+: .+)");
+ private static final Pattern DECISION_PATTERN = Pattern.compile("\\*\\*Decision:\\*\\* (.+)", Pattern.CASE_INSENSITIVE);
@Override
@Transactional(readOnly = true)
@@ -50,29 +65,80 @@ public AdrDriftResponse execute(AdrDriftRequest request) {
return emptyResponse();
}
- StringBuilder adrContext = new StringBuilder();
Set allSources = new HashSet<>();
- List queries = extractQueriesAndContext(adrSources, adrContext, allSources);
+ List adrChunks = new ArrayList<>();
+ List queries = new ArrayList<>();
+
+ collectAdrContext(adrSources, allSources, adrChunks, queries);
+
+ String providerName = aiRoutingService.resolveProvider("retrospective");
+ int adrLimit = getContextLimit(providerName);
+
+ String adrContext = assembleAdrContext(adrChunks, providerName, adrLimit);
+
+ Set relevantTaskChunks = resolveRelevantTaskChunks(request, queries);
+
+ String taskContext = assembleTaskContext(relevantTaskChunks, providerName, getContextLimit(providerName), allSources, request);
+
+ return callAiForDriftDetection(request, adrContext, taskContext);
+ }
+
+ private void collectAdrContext(List adrSources, Set allSources, List adrChunks, List queries) {
+ for (DocSource adrSource : adrSources) {
+ allSources.add(adrSource.getPath());
+ List chunks = chunkRepository.findBySource(adrSource);
+ adrChunks.addAll(chunks);
+ for (DocChunk chunk : chunks) {
+ String content = chunk.getContent();
+ extractMatches(content, ADR_PATTERN, queries);
+ extractMatches(content, DECISION_PATTERN, queries);
+ }
+ }
+ }
+ private int getContextLimit(String providerName) {
+ return providerName.toLowerCase().contains(PROVIDER_OLLAMA) ? 8000 : 20000;
+ }
+
+ private String assembleAdrContext(List adrChunks, String providerName, int limit) {
+ return promptAssemblyService.assembleContext(adrChunks, "adr-drift-base", limit, chunk -> {
+ String content = sanitizeContent(chunk.getContent(), providerName);
+ return "ADR Source: " + chunk.getSource().getPath() + "\n" + content + CONTEXT_SEPARATOR;
+ });
+ }
+
+ private Set resolveRelevantTaskChunks(AdrDriftRequest request, List queries) {
Set relevantTaskChunks = retrieveRelevantTaskChunks(request, queries);
if (relevantTaskChunks.isEmpty()) {
relevantTaskChunks = fallbackToRecentTasks(request);
}
+ return relevantTaskChunks;
+ }
- StringBuilder taskContext = new StringBuilder();
- for (DocChunk taskChunk : relevantTaskChunks) {
- taskContext.append("Task Source: ").append(taskChunk.getSource().getPath()).append("\n");
- taskContext.append(taskChunk.getContent()).append("\n---\n");
- allSources.add(taskChunk.getSource().getPath());
+ private String assembleTaskContext(Set relevantTaskChunks, String providerName, int limit, Set allSources, AdrDriftRequest request) {
+ String taskContext = promptAssemblyService.assembleContext(new ArrayList<>(relevantTaskChunks), "adr-drift-tasks", limit, chunk -> {
+ allSources.add(chunk.getSource().getPath());
+ String content = sanitizeContent(chunk.getContent(), providerName);
+ return "Task Source: " + chunk.getSource().getPath() + "\n" + content + CONTEXT_SEPARATOR;
+ });
+
+ if (request.getProposedChange() != null && !request.getProposedChange().isEmpty()) {
+ taskContext += "\nPROPOSED CHANGE TO EVALUATE:\n" + request.getProposedChange() + CONTEXT_SEPARATOR;
}
+ return taskContext;
+ }
- return callAiForDriftDetection(request, adrContext.toString(), taskContext.toString(), allSources);
+ private String sanitizeContent(String content, String providerName) {
+ if (providerName.toLowerCase().contains(PROVIDER_OLLAMA) && content.trim().startsWith("{")) {
+ return content.replaceAll("[{}\\[\\]\"]", "");
+ }
+ return content;
}
private List resolveAdrSources(AdrDriftRequest request) {
List adrPaths = request.getAdrDocPaths();
if (adrPaths == null || adrPaths.isEmpty()) {
- adrPaths = List.of("doc/knowledge/adrs/");
+ adrPaths = List.of("knowledge/adrs/");
}
List adrSources = new ArrayList<>();
@@ -89,28 +155,11 @@ private AdrDriftResponse emptyResponse() {
return AdrDriftResponse.builder()
.principles(new ArrayList<>())
.potentialDrifts(new ArrayList<>())
- .confidence(0.1)
+ .confidence(1.0)
.sources(new ArrayList<>())
.build();
}
- private List extractQueriesAndContext(List adrSources, StringBuilder contextBuilder, Set allSources) {
- List queries = new ArrayList<>();
- for (DocSource adrSource : adrSources) {
- List chunks = chunkRepository.findBySource(adrSource);
- for (DocChunk chunk : chunks) {
- String content = chunk.getContent();
- extractMatches(content, Pattern.compile("#### (ADR-\\d+: .+)"), queries);
- extractMatches(content, Pattern.compile("\\*\\*Decision:\\*\\* (.+)", Pattern.CASE_INSENSITIVE), queries);
-
- contextBuilder.append("ADR Source: ").append(adrSource.getPath()).append("\n");
- contextBuilder.append(content).append("\n---\n");
- allSources.add(adrSource.getPath());
- }
- }
- return queries;
- }
-
private void extractMatches(String content, Pattern pattern, List queries) {
Matcher matcher = pattern.matcher(content);
while (matcher.find()) {
@@ -119,22 +168,32 @@ private void extractMatches(String content, Pattern pattern, List querie
}
private Set retrieveRelevantTaskChunks(AdrDriftRequest request, List queries) {
- Set relevantTaskChunks = new HashSet<>();
if (aiProperties.getEmbedding() == null || !aiProperties.getEmbedding().isEnabled()) {
- return relevantTaskChunks;
+ return new HashSet<>();
}
+ // Performance improvement: Limit the number of unique queries to avoid excessive embedding calls
+ List limitedQueries = queries.stream().distinct().limit(10).toList();
+
String embeddingModelName = aiProperties.getEmbedding().getModel();
- for (String query : queries) {
- processQueryForRelevantChunks(request, query, embeddingModelName, relevantTaskChunks);
- }
- return relevantTaskChunks;
+ return limitedQueries.parallelStream()
+ .flatMap(query -> {
+ Set chunks = new HashSet<>();
+ processQueryForRelevantChunks(request, query, embeddingModelName, chunks);
+ return chunks.stream();
+ })
+ .collect(Collectors.toSet());
}
private void processQueryForRelevantChunks(AdrDriftRequest request, String query, String modelName, Set relevantChunks) {
try {
float[] queryVector = aiProviderService.getEmbeddingModel().embed(query);
- List similarEmbeddings = embeddingRepository.findTopKSimilar(Arrays.toString(queryVector), modelName, 5);
+ if (queryVector == null || queryVector.length == 0) {
+ log.warn("Skipping semantic search for query '{}': empty embedding vector", query);
+ return;
+ }
+ String vectorString = vectorConverter.convertToDatabaseColumn(queryVector);
+ List similarEmbeddings = embeddingRepository.findTopKSimilar(vectorString, modelName, 5);
for (DocEmbedding embedding : similarEmbeddings) {
DocChunk chunk = embedding.getChunk();
if (isChunkRelevant(chunk, request)) {
@@ -148,18 +207,31 @@ private void processQueryForRelevantChunks(AdrDriftRequest request, String query
private boolean isChunkRelevant(DocChunk chunk, AdrDriftRequest request) {
DocSource source = chunk.getSource();
- String path = source.getPath();
- return path.contains(TASKSET_PREFIX)
- && isWithinDateRange(source, request.getFromDate(), request.getToDate())
- && isSelectedTaskset(source, request.getTasksets());
+
+ // Resolve task keys for the requested sprints/tasksets if not already in request
+ Set resolvedKeys = new HashSet<>();
+ if (request.getTasksets() != null) {
+ for (String ts : request.getTasksets()) {
+ resolvedKeys.addAll(taskGroupResolutionService.resolveSprintTaskKeys(ts));
+ }
+ }
+ if (request.getTaskKeys() != null) {
+ resolvedKeys.addAll(request.getTaskKeys());
+ }
+
+ return isWithinDateRange(source, request.getFromDate(), request.getToDate())
+ && isSelectedTaskset(source, request.getTasksets(), new ArrayList<>(resolvedKeys));
}
private Set fallbackToRecentTasks(AdrDriftRequest request) {
log.info("Using fallback (recent tasks).");
Set relevantTaskChunks = new HashSet<>();
- List recentTaskSources = sourceRepository.findByPathContaining(TASKSET_PREFIX).stream()
+ List taskSources = sourceRepository.findByPathContaining(TASKSET_PREFIX);
+ taskSources.addAll(sourceRepository.findByPathContaining(SPRINT_PREFIX));
+
+ List recentTaskSources = taskSources.stream()
.filter(s -> isWithinDateRange(s, request.getFromDate(), request.getToDate()))
- .filter(s -> isSelectedTaskset(s, request.getTasksets()))
+ .filter(s -> isSelectedTaskset(s, request.getTasksets(), request.getTaskKeys()))
.limit(20)
.toList();
for (DocSource s : recentTaskSources) {
@@ -168,35 +240,37 @@ private Set fallbackToRecentTasks(AdrDriftRequest request) {
return relevantTaskChunks;
}
- private AdrDriftResponse callAiForDriftDetection(AdrDriftRequest request, String adrContext, String taskContext, Set allSources) {
+ private AdrDriftResponse callAiForDriftDetection(AdrDriftRequest request, String adrContext, String taskContext) {
String provider = aiProperties.getRetrospective() != null
? aiProperties.getRetrospective().getProvider()
: aiProperties.getArchitecture().getProvider();
+
+ if ("routing".equalsIgnoreCase(provider)) {
+ provider = aiRoutingService.resolveProvider("retrospective");
+ }
+
String model = aiProperties.getRetrospective() != null
? aiProperties.getRetrospective().getModel()
: aiProperties.getArchitecture().getModel();
try {
- AdrDriftResponse response = observabilityService.recordCall(
+ return observabilityService.recordCall(
"adr-drift-detect",
provider,
model,
"v1",
"ADR Drift Detection Request",
- () -> aiService.detect(request, adrContext, taskContext)
+ () -> {
+ if (request.getTasksets() != null && !request.getTasksets().isEmpty()) {
+ String sprintId = request.getTasksets().get(0);
+ observabilityService.updateTraceMetadata(m -> m.setSprint(sprintId));
+ }
+ return aiService.detect(request, adrContext, taskContext);
+ }
);
- if (response.getSources() == null || response.getSources().isEmpty()) {
- response.setSources(new ArrayList<>(allSources));
- }
- return response;
} catch (Exception e) {
log.error("AI ADR Drift detection failed: {}", e.getMessage());
- return AdrDriftResponse.builder()
- .principles(new ArrayList<>())
- .potentialDrifts(new ArrayList<>())
- .confidence(0.0)
- .sources(new ArrayList<>(allSources))
- .build();
+ return emptyResponse();
}
}
@@ -206,15 +280,47 @@ private boolean isWithinDateRange(DocSource source, LocalDate from, LocalDate to
(to == null || !lastIndexedDate.isAfter(to));
}
- private boolean isSelectedTaskset(DocSource source, List tasksets) {
+ private boolean isSelectedTaskset(DocSource source, List tasksets, List taskKeys) {
+ boolean hasTasksets = tasksets != null && !tasksets.isEmpty();
+ boolean hasTaskKeys = taskKeys != null && !taskKeys.isEmpty();
+
+ if (!hasTasksets && !hasTaskKeys) {
+ return isDefaultTaskSource(source);
+ }
+
+ String path = source.getPath().replace('\\', '/');
+ return matchesTaskKeys(path, taskKeys) || matchesTasksets(path, tasksets);
+ }
+
+ private boolean isDefaultTaskSource(DocSource source) {
+ String path = source.getPath();
+ return path.contains(TASKSET_PREFIX) || path.contains(SPRINT_PREFIX);
+ }
+
+ private boolean matchesTaskKeys(String path, List taskKeys) {
+ if (taskKeys == null || taskKeys.isEmpty()) {
+ return false;
+ }
+ for (String key : taskKeys) {
+ if (path.contains("/" + key + "/") || path.contains("/" + key + " ") || path.contains("/" + key + "-") || path.endsWith("/" + key + ".md")) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private boolean matchesTasksets(String path, List tasksets) {
if (tasksets == null || tasksets.isEmpty()) {
- return true;
+ return false;
}
for (String ts : tasksets) {
- if (source.getPath().contains(TASKSET_PREFIX + ts) || source.getPath().contains("taskset/" + ts)) {
+ if (path.contains("/" + TASKSET_PREFIX + ts + "/") || path.contains("/" + TASKSET_PREFIX + ts + "-")
+ || path.contains("/" + SPRINT_PREFIX + ts + "/") || path.contains("/" + SPRINT_PREFIX + ts + "-")
+ || path.contains(TASKSET_PREFIX + ts + "/") || path.contains(SPRINT_PREFIX + ts + "/")) {
return true;
}
}
return false;
}
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/AiApplicationService.java b/backend/src/main/java/ch/goodone/backend/ai/application/AiApplicationService.java
index e459735b5..1d6e92809 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/application/AiApplicationService.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/AiApplicationService.java
@@ -1,29 +1,87 @@
package ch.goodone.backend.ai.application;
+import ch.goodone.backend.ai.dto.SprintRiskResponse;
+import ch.goodone.backend.ai.dto.ReleaseReadinessResponse;
+import ch.goodone.backend.ai.dto.ImpactAnalysisRequest;
+import ch.goodone.backend.ai.dto.ImpactAnalysisResponse;
+import ch.goodone.backend.ai.dto.DecisionProposalRequest;
+import ch.goodone.backend.ai.dto.DecisionProposalResponse;
+import ch.goodone.backend.ai.dto.BacklogAnalysisResponse;
+import ch.goodone.backend.ai.dto.EngineeringArtifact;
+import ch.goodone.backend.ai.dto.AdrMetadata;
import ch.goodone.backend.ai.dto.ArchitectureExplainRequest;
-import ch.goodone.backend.ai.dto.ArchitectureExplainResult;
+import ch.goodone.backend.ai.dto.CopilotResponse;
import ch.goodone.backend.ai.dto.AdrDriftRequest;
import ch.goodone.backend.ai.dto.AdrDriftResponse;
import ch.goodone.backend.ai.dto.QuickAddParseRequest;
import ch.goodone.backend.ai.dto.QuickAddParseResult;
import ch.goodone.backend.ai.dto.RiskRadarRequest;
import ch.goodone.backend.ai.dto.RiskRadarResponse;
+import ch.goodone.backend.ai.dto.AiIntelligenceDashboardDto;
+import ch.goodone.backend.ai.dto.DashboardProgressUpdate;
+import ch.goodone.backend.ai.dto.EngineeringChatRequest;
+import ch.goodone.backend.ai.dto.CodeChangeRequest;
+import ch.goodone.backend.ai.dto.OnboardingRequest;
+import ch.goodone.backend.ai.dto.TaskRelationship;
import ch.goodone.backend.ai.exception.AiDisabledException;
-import lombok.RequiredArgsConstructor;
+import ch.goodone.backend.ai.knowledge.AdrIndexService;
+import ch.goodone.backend.ai.knowledge.EngineeringContextService;
+import ch.goodone.backend.ai.routing.CopilotRouterService;
+import ch.goodone.backend.model.taxonomy.CopilotCapability;
+import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
+import java.util.List;
+
/**
* Application service facade for AI operations.
*/
@Service
-@RequiredArgsConstructor
+@Slf4j
public class AiApplicationService {
private final QuickAddParseUseCase quickAddParseUseCase;
- private final ArchitectureExplainUseCase architectureExplainUseCase;
private final RiskRadarUseCase riskRadarUseCase;
private final AdrDriftUseCase adrDriftUseCase;
+ private final AdrIndexService adrIndexService;
+ private final EngineeringContextService engineeringContextService;
+ private final BacklogAnalyzerUseCase backlogAnalyzerUseCase;
+ private final DecisionAssistantUseCase decisionAssistantUseCase;
+ private final ImpactSimulatorUseCase impactSimulatorUseCase;
+ private final ReleaseIntelligenceUseCase releaseIntelligenceUseCase;
+ private final SprintRiskPredictorUseCase sprintRiskPredictorUseCase;
+ private final AiIntelligenceService aiIntelligenceService;
+ private final TaskRelationshipService taskRelationshipService;
+ private final CopilotRouterService copilotRouterService;
+
+ public AiApplicationService(QuickAddParseUseCase quickAddParseUseCase,
+ RiskRadarUseCase riskRadarUseCase,
+ AdrDriftUseCase adrDriftUseCase,
+ AdrIndexService adrIndexService,
+ EngineeringContextService engineeringContextService,
+ BacklogAnalyzerUseCase backlogAnalyzerUseCase,
+ DecisionAssistantUseCase decisionAssistantUseCase,
+ ImpactSimulatorUseCase impactSimulatorUseCase,
+ ReleaseIntelligenceUseCase releaseIntelligenceUseCase,
+ SprintRiskPredictorUseCase sprintRiskPredictorUseCase,
+ AiIntelligenceService aiIntelligenceService,
+ TaskRelationshipService taskRelationshipService,
+ CopilotRouterService copilotRouterService) {
+ this.quickAddParseUseCase = quickAddParseUseCase;
+ this.riskRadarUseCase = riskRadarUseCase;
+ this.adrDriftUseCase = adrDriftUseCase;
+ this.adrIndexService = adrIndexService;
+ this.engineeringContextService = engineeringContextService;
+ this.backlogAnalyzerUseCase = backlogAnalyzerUseCase;
+ this.decisionAssistantUseCase = decisionAssistantUseCase;
+ this.impactSimulatorUseCase = impactSimulatorUseCase;
+ this.releaseIntelligenceUseCase = releaseIntelligenceUseCase;
+ this.sprintRiskPredictorUseCase = sprintRiskPredictorUseCase;
+ this.aiIntelligenceService = aiIntelligenceService;
+ this.taskRelationshipService = taskRelationshipService;
+ this.copilotRouterService = copilotRouterService;
+ }
@Value("${app.ai.enabled:false}")
private boolean aiEnabled;
@@ -47,11 +105,12 @@ public QuickAddParseResult parseQuickAdd(QuickAddParseRequest request, String lo
* Explains the project architecture.
*
* @param request The explanation request.
+ * @param login The login of the user.
* @return The explanation result.
*/
- public ArchitectureExplainResult explainArchitecture(ArchitectureExplainRequest request) {
+ public CopilotResponse explainArchitecture(ArchitectureExplainRequest request, String login) {
checkAiEnabled();
- return architectureExplainUseCase.execute(request);
+ return (CopilotResponse) copilotRouterService.route(CopilotCapability.ARCHITECTURE_QA, request, login);
}
/**
@@ -76,9 +135,151 @@ public AdrDriftResponse detectAdrDrift(AdrDriftRequest request) {
return adrDriftUseCase.execute(request);
}
+ /**
+ * Retrieve all Architecture Decision Records.
+ *
+ * @param query Optional search query.
+ * @return List of ADR metadata.
+ */
+ public List getAdrs(String query) {
+ // We don't checkAiEnabled() here as documentation is always useful
+ return adrIndexService.search(query);
+ }
+
+ /**
+ * Retrieve engineering context artifacts.
+ *
+ * @param query Optional search query.
+ * @return List of engineering artifacts.
+ */
+ public List getEngineeringContext(String query) {
+ return engineeringContextService.search(query);
+ }
+
+ /**
+ * Analyze the task backlog.
+ *
+ * @return Backlog analysis report.
+ */
+ public BacklogAnalysisResponse analyzeBacklog() {
+ return backlogAnalyzerUseCase.execute();
+ }
+
+ /**
+ * Proposes a decision based on project context.
+ *
+ * @param request The decision request.
+ * @return Decision proposal.
+ */
+ public DecisionProposalResponse proposeDecision(DecisionProposalRequest request) {
+ checkAiEnabled();
+ return decisionAssistantUseCase.execute(request);
+ }
+
+ /**
+ * Simulates the impact of a proposed change.
+ *
+ * @param request The impact request.
+ * @return Impact analysis.
+ */
+ public ImpactAnalysisResponse simulateImpact(ImpactAnalysisRequest request) {
+ checkAiEnabled();
+ return impactSimulatorUseCase.execute(request);
+ }
+
+ /**
+ * Summarizes release readiness and risks.
+ *
+ * @return Release readiness report.
+ */
+ public ReleaseReadinessResponse getReleaseReadiness() {
+ return releaseIntelligenceUseCase.execute();
+ }
+
+ /**
+ * Predicts risk for a given sprint.
+ *
+ * @param sprint The sprint identifier.
+ * @return Sprint risk report.
+ */
+ public SprintRiskResponse getSprintRisk(String sprint) {
+ return sprintRiskPredictorUseCase.execute(sprint);
+ }
+
+ /**
+ * Retrieve the comprehensive AI Intelligence Dashboard for a sprint.
+ */
+ public AiIntelligenceDashboardDto getIntelligenceDashboard(String sprint) {
+ checkAiEnabled();
+ return aiIntelligenceService.getDashboard(sprint);
+ }
+
+ /**
+ * Discover all available sprints.
+ */
+ public List getAvailableSprints() {
+ return aiIntelligenceService.getAvailableSprints();
+ }
+
+ public List getTaskGroups() {
+ return aiIntelligenceService.getTaskGroups();
+ }
+
+ /**
+ * Stream the AI Intelligence Dashboard progress and result.
+ */
+ public void streamIntelligenceDashboard(String sprint, java.util.function.Consumer progressConsumer) {
+ checkAiEnabled();
+ aiIntelligenceService.streamDashboard(sprint, progressConsumer);
+ }
+
+ /**
+ * Conversational engineering help grounded in project knowledge.
+ *
+ * @param request The engineering chat request.
+ * @param login The login of the user.
+ * @return The chat result.
+ */
+ public CopilotResponse askEngineeringChat(EngineeringChatRequest request, String login) {
+ checkAiEnabled();
+ return (CopilotResponse) copilotRouterService.route(CopilotCapability.ENGINEERING_CHAT, request, login);
+ }
+
+ /**
+ * Explains code changes in a diff.
+ *
+ * @param request The code change request.
+ * @param login The login of the user.
+ * @return The explanation result.
+ */
+ public CopilotResponse explainCodeChange(CodeChangeRequest request, String login) {
+ checkAiEnabled();
+ return (CopilotResponse) copilotRouterService.route(CopilotCapability.CODE_EXPLANATION, request, login);
+ }
+
+ /**
+ * Provides onboarding guidance for new developers.
+ *
+ * @param request The onboarding request.
+ * @param login The login of the user.
+ * @return The onboarding result.
+ */
+ public CopilotResponse getOnboardingHelp(OnboardingRequest request, String login) {
+ checkAiEnabled();
+ return (CopilotResponse) copilotRouterService.route(CopilotCapability.ONBOARDING, request, login);
+ }
+
+ /**
+ * Detects relationships between tasks in a taskset.
+ */
+ public List detectTaskRelationships(String taskset) {
+ return taskRelationshipService.analyzeTaskset(taskset);
+ }
+
private void checkAiEnabled() {
if (!aiEnabled) {
throw new AiDisabledException(aiDisabledMessage);
}
}
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/AiDashboardExplanationService.java b/backend/src/main/java/ch/goodone/backend/ai/application/AiDashboardExplanationService.java
new file mode 100644
index 000000000..cdf3ff2f3
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/AiDashboardExplanationService.java
@@ -0,0 +1,127 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.AiProperties;
+import ch.goodone.backend.ai.AiProviderService;
+import ch.goodone.backend.ai.dto.AiIntelligenceDashboardDto;
+import ch.goodone.backend.ai.observability.AiCallParams;
+import ch.goodone.backend.ai.observability.AiObservabilityService;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.ai.chat.messages.SystemMessage;
+import org.springframework.ai.chat.messages.UserMessage;
+import org.springframework.ai.chat.model.ChatModel;
+import org.springframework.ai.chat.model.ChatResponse;
+import org.springframework.ai.chat.prompt.Prompt;
+import org.springframework.ai.openai.OpenAiChatOptions;
+import org.springframework.stereotype.Service;
+
+import java.util.List;
+
+@Service
+@RequiredArgsConstructor
+@Slf4j
+public class AiDashboardExplanationService {
+
+ private final AiProviderService aiProviderService;
+ private final AiProperties aiProperties;
+ private final AiObservabilityService observabilityService;
+
+ public String explainHealthScore(Double score, List warnings, String sprintId) {
+ if (score == null) {
+ return null;
+ }
+
+ String prompt = String.format("""
+ Explain why the engineering health score is %.1f%%.
+ Warnings detected: %s
+ Provide a one-sentence, concise executive summary.
+ """, score * 100, String.join(", ", warnings));
+
+ return callJudge(prompt, "health-explanation", sprintId);
+ }
+
+ public String explainSprintProgress(AiIntelligenceDashboardDto.SprintProgressSummary summary, String sprintId) {
+ if (summary == null) {
+ return null;
+ }
+
+ String prompt = String.format("""
+ Explain the sprint progress: %.1f%% completed, %d days remaining, velocity %.1f.
+ Status: %s
+ Provide a one-sentence, concise summary of the current pace.
+ """, summary.getCompletedPercentage(), summary.getRemainingDays(), summary.getVelocity(), summary.getStatus());
+
+ return callJudge(prompt, "progress-explanation", sprintId);
+ }
+
+ public String explainBacklogLeakage(AiIntelligenceDashboardDto.BacklogLeakageSummary summary, String sprintId) {
+ if (summary == null) {
+ return null;
+ }
+
+ String prompt = String.format("""
+ Explain backlog leakage: %d items detected.
+ Categories: %s
+ High risk items: %s
+ Provide a one-sentence summary of the leakage impact.
+ """, summary.getDetectedCount(), summary.getCategories(), summary.getHighRiskItems());
+
+ return callJudge(prompt, "leakage-explanation", sprintId);
+ }
+
+ private String callJudge(String promptText, String operation, String sprintId) {
+ ChatModel model = aiProviderService.getEvaluationChatModel();
+ String modelName = aiProperties.getEvaluation().getModel();
+ String provider = aiProperties.getEvaluation().getProvider();
+
+ try {
+ AiCallParams params = AiCallParams.builder()
+ .operation(operation)
+ .provider(provider)
+ .model(modelName)
+ .promptVersion("v1")
+ .input(promptText)
+ .call(() -> {
+ String systemPrompt = "You are an executive engineering assistant. Be concise (max 20 words).";
+ Prompt prompt = new Prompt(List.of(
+ new SystemMessage(systemPrompt),
+ new UserMessage(promptText)
+ ), OpenAiChatOptions.builder()
+ .temperature(0.0)
+ .model(modelName)
+ .build());
+
+ observabilityService.updateTraceMetadata(m -> {
+ m.setSystemPrompt(systemPrompt);
+ m.setUserPrompt(promptText);
+ m.setFullPrompt(systemPrompt + "\n\n" + promptText);
+ if (sprintId != null) {
+ m.setSprint(sprintId);
+ }
+ });
+
+ ChatResponse response = model.call(prompt);
+ String output = (response != null && response.getResult() != null)
+ ? response.getResult().getOutput().getText().trim()
+ : "Summary unavailable.";
+
+ if (response != null && response.getMetadata() != null) {
+ observabilityService.reportUsage(response.getMetadata().getUsage(), output);
+ }
+
+ observabilityService.updateTraceMetadata(m -> {
+ m.setRawResponse(output);
+ m.setFinalResponse(output);
+ });
+
+ return output;
+ })
+ .build();
+
+ return observabilityService.recordCall(params);
+ } catch (Exception e) {
+ log.error("Failed to generate dashboard explanation: {}", e.getMessage());
+ return "Summary unavailable.";
+ }
+ }
+}
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/AiIntelligenceService.java b/backend/src/main/java/ch/goodone/backend/ai/application/AiIntelligenceService.java
new file mode 100644
index 000000000..7211ce1d7
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/AiIntelligenceService.java
@@ -0,0 +1,650 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.application.provider.AiDashboardProviders;
+import ch.goodone.backend.ai.dto.AiIntelligenceDashboardDto;
+import ch.goodone.backend.ai.dto.DashboardProgressUpdate;
+import ch.goodone.backend.ai.dto.RiskRadarRequest;
+import ch.goodone.backend.ai.dto.RiskRadarResponse;
+import ch.goodone.backend.ai.dto.AdrDriftRequest;
+import ch.goodone.backend.ai.dto.AdrDriftResponse;
+import ch.goodone.backend.ai.dto.DeliveryForecast;
+import ch.goodone.backend.ai.dto.SprintRiskResponse;
+import ch.goodone.backend.ai.dto.TaskRelationship;
+import ch.goodone.backend.ai.observability.AiObservabilityService;
+import ch.goodone.backend.model.signal.EngineeringSignal;
+import ch.goodone.backend.model.taxonomy.EngineeringSignalSeverity;
+import ch.goodone.backend.model.taxonomy.OutlookStatus;
+import ch.goodone.backend.model.Task;
+import ch.goodone.backend.model.TaskStatus;
+import ch.goodone.backend.service.ArchitectureRecommendationService;
+import ch.goodone.backend.dto.TaskDTO;
+import ch.goodone.backend.repository.TaskRepository;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Service;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Consumer;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+
+@Service
+@RequiredArgsConstructor
+@Slf4j
+public class AiIntelligenceService {
+
+ private final AiUseCaseFacade useCases;
+ private final AiDashboardProviders providers;
+ private final TaskRepository taskRepository;
+ private final SprintResolutionService sprintResolutionService;
+ private final EngineeringIntelligenceAggregationService aggregationService;
+ private final ArchitectureRecommendationService recommendationService;
+ private final AiObservabilityService observabilityService;
+
+ private final Map dashboardCache = new java.util.concurrent.ConcurrentHashMap<>();
+
+ private final ExecutorService dashboardExecutor = Executors.newFixedThreadPool(12);
+
+ private static final Map EPIC_TITLES = Map.ofEntries(
+ Map.entry("AI-INFRA", "Platform Infrastructure"),
+ Map.entry("AI-OPS", "AI Operations"),
+ Map.entry("AI-OBS", "AI Observability"),
+ Map.entry("AI-BE", "AI Backend Platform"),
+ Map.entry("AI-INT", "Intelligence Integration"),
+ Map.entry("AI-EVAL", "AI Evaluation"),
+ Map.entry("AI-GOV", "AI Governance"),
+ Map.entry("AI-AI", "Engineering Intelligence"),
+ Map.entry("AI-ARCH", "Architecture Intelligence"),
+ Map.entry("AI-DEC", "Decision Intelligence"),
+ Map.entry("AI-IMP", "Impact Analysis"),
+ Map.entry("AI-REL", "Release Intelligence"),
+ Map.entry("AI-SPR", "Sprint Intelligence"),
+ Map.entry("AI-COP", "Developer Copilot"),
+ Map.entry("AI-UI", "AI Interface"),
+ Map.entry("AI-UX", "User Experience")
+ );
+
+ private static final Map EPIC_DESCRIPTIONS = Map.ofEntries(
+ Map.entry("AI-INFRA", "Runtime configuration, model providers, and deployment infrastructure."),
+ Map.entry("AI-OPS", "AI operations and reliability tasks including operational stabilization."),
+ Map.entry("AI-OBS", "Dashboards, metrics, cost monitoring, and operational visibility."),
+ Map.entry("AI-BE", "Backend platform tasks related to AI services and persistence."),
+ Map.entry("AI-INT", "Canonical models, service extraction, and taxonomy harmonization."),
+ Map.entry("AI-EVAL", "Benchmark datasets, retrieval tests, and AI trace tools."),
+ Map.entry("AI-GOV", "Task contract standards and CI enforcement of engineering policies."),
+ Map.entry("AI-AI", "Engineering knowledge and features that improve project context understanding."),
+ Map.entry("AI-ARCH", "ADR analysis, architecture explanation, and drift detection."),
+ Map.entry("AI-DEC", "Engineering decision intelligence tasks."),
+ Map.entry("AI-IMP", "Impact analysis for engineering change simulations."),
+ Map.entry("AI-REL", "Release intelligence tasks including readiness analysis."),
+ Map.entry("AI-SPR", "Sprint risk prediction and delivery forecasting."),
+ Map.entry("AI-COP", "Interactive AI engineering assistance for developer workflows."),
+ Map.entry("AI-UI", "User interface component and layout tasks for AI features."),
+ Map.entry("AI-UX", "User experience and UI improvements for AI features.")
+ );
+
+ // Timeout constants for subtasks
+ private static final long FAST_DATA_TIMEOUT_SEC = 60;
+ private static final long MEDIUM_AI_TIMEOUT_SEC = 150;
+ private static final long HEAVY_AI_TIMEOUT_SEC = 600;
+
+ public AiIntelligenceDashboardDto getDashboard(String sprintId) {
+ String trimmedId = (sprintId != null) ? sprintId.trim() : null;
+
+ if (trimmedId != null && dashboardCache.containsKey(trimmedId)) {
+ log.info("Returning cached AI Intelligence Dashboard for sprint: {}", trimmedId);
+ return dashboardCache.get(trimmedId);
+ }
+
+ log.info("Aggregating AI Intelligence Dashboard for sprint: {} in parallel", trimmedId);
+
+ // Resolve authoritative task keys first to ensure consistent scoping across all sections
+ Set authoritativeKeys = sprintResolutionService.resolveSprintTaskKeys(trimmedId);
+ List taskKeysList = new ArrayList<>(authoritativeKeys);
+
+ // Load authoritative tasks (authoritative source)
+ List authoritativeTasks = sprintResolutionService.resolveSprintTasks(trimmedId);
+
+ var sprintRiskFuture = runSubtask(trimmedId, "sprint-risk",
+ () -> useCases.sprintRiskPredictor.execute(trimmedId, authoritativeTasks),
+ 0, null, FAST_DATA_TIMEOUT_SEC, null);
+
+ var forecastFuture = runSubtask(trimmedId, "forecast",
+ () -> useCases.deliveryForecaster.forecast(trimmedId, authoritativeTasks),
+ 0, null, FAST_DATA_TIMEOUT_SEC, null);
+
+ var riskRadarFuture = runSubtask(trimmedId, "risk-radar",
+ () -> useCases.riskRadarUseCase.execute(RiskRadarRequest.builder()
+ .tasksets(List.of(trimmedId))
+ .taskKeys(taskKeysList)
+ .build()),
+ 0, null, MEDIUM_AI_TIMEOUT_SEC, null);
+
+ var driftFuture = runSubtask(trimmedId, "adr-drift",
+ () -> useCases.adrDriftUseCase.execute(AdrDriftRequest.builder()
+ .tasksets(List.of(trimmedId))
+ .taskKeys(taskKeysList)
+ .build()),
+ 0, null, HEAVY_AI_TIMEOUT_SEC, null);
+
+ var tasksFuture = CompletableFuture.completedFuture(authoritativeTasks);
+
+ var relationshipsFuture = runSubtask(trimmedId, "task-relationships",
+ () -> useCases.taskRelationshipService.analyzeTaskset(trimmedId),
+ 0, null, HEAVY_AI_TIMEOUT_SEC, null);
+
+ var signalsFuture = CompletableFuture.allOf(sprintRiskFuture, forecastFuture, riskRadarFuture, driftFuture, relationshipsFuture)
+ .handle((v, ex) -> {
+ List aggregated = new java.util.ArrayList<>();
+ addSignalsIfDone(riskRadarFuture, "risk-radar-engine", aggregated);
+ addSignalsIfDone(forecastFuture, "forecast-engine", aggregated);
+ addSignalsIfDone(driftFuture, "drift-engine", aggregated);
+ if (relationshipsFuture.isDone() && !relationshipsFuture.isCompletedExceptionally()) {
+ var rels = relationshipsFuture.join();
+ if (rels != null) {
+ aggregated.addAll(rels.stream().map(r -> r.toSignal("task-relationship-engine")).toList());
+ }
+ }
+ return aggregated;
+ });
+
+ var regressionFuture = runSubtask(trimmedId, "ai-regression",
+ providers.regressionTrendProvider::provide,
+ 0, null, FAST_DATA_TIMEOUT_SEC, null);
+
+ var leakageFuture = runSubtask(trimmedId, "backlog-leakage",
+ providers.backlogLeakageProvider::provide,
+ 0, null, FAST_DATA_TIMEOUT_SEC, null);
+
+ try {
+ CompletableFuture.allOf(sprintRiskFuture, forecastFuture, riskRadarFuture, driftFuture,
+ tasksFuture, relationshipsFuture, signalsFuture, regressionFuture, leakageFuture)
+ .orTimeout(HEAVY_AI_TIMEOUT_SEC + 5, TimeUnit.SECONDS)
+ .join();
+ } catch (Exception e) {
+ log.warn("Dashboard aggregation partially timed out: {}", e.getMessage());
+ }
+
+ DashboardFutures futures = new DashboardFutures(
+ sprintRiskFuture, forecastFuture, riskRadarFuture, driftFuture,
+ tasksFuture, relationshipsFuture, signalsFuture, regressionFuture, leakageFuture
+ );
+ var dashboard = assembleDashboard(trimmedId, futures);
+ String outcome = dashboard.isPartial() ? "partial" : "success";
+ observabilityService.recordDashboardOutcome("intelligence", outcome);
+
+ if (trimmedId != null && !dashboard.isPartial()) {
+ dashboardCache.put(trimmedId, dashboard);
+ }
+
+ return dashboard;
+ }
+
+ public List getAvailableSprints() {
+ return sprintResolutionService.discoverAvailableSprints();
+ }
+
+ public List getTaskGroups() {
+ return sprintResolutionService.discoverTaskGroups();
+ }
+
+ private void addSignalsIfDone(CompletableFuture> future, String engine, List aggregated) {
+ if (future.isDone() && !future.isCompletedExceptionally()) {
+ var result = future.getNow(null);
+ if (result instanceof RiskRadarResponse r) {
+ aggregated.addAll(r.toSignals(engine));
+ } else if (result instanceof DeliveryForecast d) {
+ aggregated.addAll(d.toSignals(engine));
+ } else if (result instanceof AdrDriftResponse a) {
+ aggregated.addAll(a.toSignals(engine));
+ }
+ }
+ }
+
+ /**
+ * Streams the dashboard progress and data via a consumer.
+ */
+ public void streamDashboard(String sprintId, Consumer progressConsumer) {
+ String trimmedId = (sprintId != null) ? sprintId.trim() : null;
+
+ if (trimmedId != null && dashboardCache.containsKey(trimmedId)) {
+ log.info("Streaming cached AI Intelligence Dashboard for sprint: {}", trimmedId);
+ progressConsumer.accept(DashboardProgressUpdate.builder()
+ .progress(100)
+ .status("Loading cached analysis...")
+ .dashboard(dashboardCache.get(trimmedId))
+ .completed(true)
+ .build());
+ return;
+ }
+
+ log.info("Streaming AI Intelligence Dashboard for sprint: {}", trimmedId);
+
+ progressConsumer.accept(DashboardProgressUpdate.builder().progress(5).status("Initializing analysis...").build());
+
+ // Resolve authoritative task keys first to ensure consistent scoping across all sections
+ Set authoritativeKeys = sprintResolutionService.resolveSprintTaskKeys(trimmedId);
+ List taskKeysList = new ArrayList<>(authoritativeKeys);
+
+ // 1. FAST PATH: Roadmap data (authoritative source)
+ var tasksFuture = CompletableFuture.supplyAsync(() -> {
+ try {
+ log.debug("Loading authoritative tasks for stream path (sprint: {})", trimmedId);
+ List combined = sprintResolutionService.resolveSprintTasks(trimmedId);
+
+ var dashboard = AiIntelligenceDashboardDto.builder()
+ .sprintId(trimmedId)
+ .epics(calculateEpics(combined, null, null))
+ .partial(true)
+ .build();
+
+ progressConsumer.accept(DashboardProgressUpdate.builder()
+ .progress(15)
+ .status("Roadmap data loaded")
+ .dashboard(dashboard)
+ .build());
+ return combined;
+ } catch (Exception e) {
+ log.error("Roadmap data loading failed: {}", e.getMessage());
+ return List.of();
+ }
+ }, dashboardExecutor);
+
+ // 2. AI SUBTASKS with individual timeouts and fallbacks
+ // We wait for the authoritative task list before starting some subtasks to ensure consistent scoping
+ var sprintRiskFuture = tasksFuture.thenCompose(tasks ->
+ runSubtask(trimmedId, "sprint-risk",
+ () -> useCases.sprintRiskPredictor.execute(trimmedId, tasks),
+ 30, "Sprint risk assessment complete", FAST_DATA_TIMEOUT_SEC, progressConsumer));
+
+ var forecastFuture = tasksFuture.thenCompose(tasks ->
+ runSubtask(trimmedId, "forecast",
+ () -> useCases.deliveryForecaster.forecast(trimmedId, tasks),
+ 45, "Delivery forecast updated", FAST_DATA_TIMEOUT_SEC, progressConsumer));
+
+ var riskRadarFuture = runSubtask(trimmedId, "risk-radar",
+ () -> useCases.riskRadarUseCase.execute(RiskRadarRequest.builder()
+ .tasksets(List.of(trimmedId))
+ .taskKeys(taskKeysList)
+ .build()),
+ 60, "Risk patterns analyzed", HEAVY_AI_TIMEOUT_SEC, progressConsumer);
+
+ var driftFuture = runSubtask(trimmedId, "adr-drift",
+ () -> useCases.adrDriftUseCase.execute(AdrDriftRequest.builder()
+ .tasksets(List.of(trimmedId))
+ .taskKeys(taskKeysList)
+ .build()),
+ 75, "Architecture drift detection complete", HEAVY_AI_TIMEOUT_SEC, progressConsumer);
+
+ var relationshipsFuture = runSubtask(trimmedId, "task-relationships",
+ () -> useCases.taskRelationshipService.analyzeTaskset(trimmedId),
+ 85, "Task relationships analyzed", HEAVY_AI_TIMEOUT_SEC, progressConsumer);
+
+ var signalsFuture = CompletableFuture.allOf(sprintRiskFuture, forecastFuture, riskRadarFuture, driftFuture, relationshipsFuture)
+ .handle((v, ex) -> {
+ List aggregated = new java.util.ArrayList<>();
+ addSignalsIfDone(riskRadarFuture, "risk-radar-engine", aggregated);
+ addSignalsIfDone(forecastFuture, "forecast-engine", aggregated);
+ addSignalsIfDone(driftFuture, "drift-engine", aggregated);
+ if (relationshipsFuture.isDone() && !relationshipsFuture.isCompletedExceptionally()) {
+ var rels = relationshipsFuture.join();
+ if (rels != null) {
+ aggregated.addAll(rels.stream().map(r -> r.toSignal("task-relationship-engine")).toList());
+ }
+ }
+ progressConsumer.accept(DashboardProgressUpdate.builder().progress(95).status("Signal aggregation complete").build());
+ return aggregated;
+ });
+
+ var regressionFuture = runSubtask(trimmedId, "ai-regression",
+ providers.regressionTrendProvider::provide,
+ 90, "Analyzing AI regressions...", FAST_DATA_TIMEOUT_SEC, progressConsumer);
+
+ var leakageFuture = runSubtask(trimmedId, "backlog-leakage",
+ providers.backlogLeakageProvider::provide,
+ 92, "Checking backlog leakage...", FAST_DATA_TIMEOUT_SEC, progressConsumer);
+
+ // Wait for all to finish OR for global timeout
+ try {
+ CompletableFuture.allOf(tasksFuture, sprintRiskFuture, forecastFuture, riskRadarFuture,
+ driftFuture, relationshipsFuture, signalsFuture, regressionFuture, leakageFuture)
+ .orTimeout(HEAVY_AI_TIMEOUT_SEC + 5, TimeUnit.SECONDS)
+ .join();
+ } catch (Exception e) {
+ log.warn("Dashboard stream partially timed out in stream: {}", e.getMessage());
+ }
+
+ // Send final results
+ DashboardFutures futures = new DashboardFutures(
+ sprintRiskFuture, forecastFuture, riskRadarFuture, driftFuture,
+ tasksFuture, relationshipsFuture, signalsFuture, regressionFuture, leakageFuture
+ );
+ var dashboard = assembleDashboard(trimmedId, futures);
+
+ if (trimmedId != null && !dashboard.isPartial()) {
+ dashboardCache.put(trimmedId, dashboard);
+ }
+
+ String outcome = dashboard.isPartial() ? "partial" : "success";
+ observabilityService.recordDashboardOutcome("intelligence", outcome);
+ progressConsumer.accept(DashboardProgressUpdate.builder()
+ .progress(100)
+ .status("Analysis complete")
+ .dashboard(dashboard)
+ .completed(true)
+ .timedOutSections(dashboard.getTimedOutSections())
+ .build());
+ }
+
+ private CompletableFuture runSubtask(String sprintId, String name, Supplier task, int progress, String status, long timeoutSec, Consumer progressConsumer) {
+ long startTime = System.currentTimeMillis();
+ return CompletableFuture.supplyAsync(() -> {
+ try {
+ T result = task.get();
+ long duration = System.currentTimeMillis() - startTime;
+ observabilityService.recordDashboardSubtask(name, null, null, duration, true, false, false);
+ if (progressConsumer != null) {
+ progressConsumer.accept(DashboardProgressUpdate.builder().progress(progress).status(status).build());
+ }
+ return result;
+ } catch (Exception e) {
+ log.error("AI Intelligence subtask '{}' failed for sprint {}: {}", name, sprintId, e.getMessage(), e);
+ throw (e instanceof RuntimeException re ? re : new RuntimeException(e));
+ }
+ }, dashboardExecutor)
+ .orTimeout(timeoutSec, TimeUnit.SECONDS)
+ .whenComplete((result, ex) -> {
+ if (ex != null) {
+ long duration = System.currentTimeMillis() - startTime;
+ boolean isTimeout = ex instanceof java.util.concurrent.TimeoutException || ex.getCause() instanceof java.util.concurrent.TimeoutException;
+ observabilityService.recordDashboardSubtask(name, null, null, duration, false, isTimeout, isTimeout);
+ }
+ });
+ }
+
+ private record DashboardFutures(
+ CompletableFuture sprintRiskFuture,
+ CompletableFuture forecastFuture,
+ CompletableFuture riskRadarFuture,
+ CompletableFuture driftFuture,
+ CompletableFuture> tasksFuture,
+ CompletableFuture> relationshipsFuture,
+ CompletableFuture> signalsFuture,
+ CompletableFuture regressionFuture,
+ CompletableFuture leakageFuture
+ ) {}
+
+ private AiIntelligenceDashboardDto assembleDashboard(String sprintId, DashboardFutures futures) {
+
+ DashboardAssemblyContext ctx = new DashboardAssemblyContext(sprintId);
+
+ var sprintRisk = getWithTimeoutHandling(
+ futures.sprintRiskFuture,
+ SprintRiskResponse.builder().sprint(sprintId).build(),
+ "Sprint Risk Assessment",
+ "Sprint risk assessment failed.",
+ ctx.warnings,
+ ctx.timedOutSections);
+
+ var forecast = getWithTimeoutHandling(
+ futures.forecastFuture,
+ DeliveryForecast.builder().status(OutlookStatus.AT_RISK).build(),
+ "Delivery Forecast",
+ "Delivery forecast failed.",
+ ctx.warnings,
+ ctx.timedOutSections);
+
+ var riskRadar = getWithTimeoutHandling(
+ futures.riskRadarFuture,
+ RiskRadarResponse.builder().build(),
+ "Risk Radar",
+ "Risk patterns analysis failed.",
+ ctx.warnings,
+ ctx.timedOutSections);
+
+ var driftResponse = getWithTimeoutHandling(
+ futures.driftFuture,
+ AdrDriftResponse.builder().build(),
+ "Architecture Drift Detection",
+ "Architecture drift detection failed.",
+ ctx.warnings,
+ ctx.timedOutSections);
+
+ var tasks = getSafe(futures.tasksFuture, List.of());
+ if (tasks.isEmpty()) {
+ ctx.warnings.add("No task data found or loaded.");
+ }
+
+ var relationships = getSafe(futures.relationshipsFuture, List.of());
+ if (relationships.isEmpty() && isTimedOut(futures.relationshipsFuture)) {
+ ctx.timedOutSections.add("Task Relationships");
+ }
+
+ var regression = getSafe(futures.regressionFuture, null);
+ if (regression == null && isTimedOut(futures.regressionFuture)) {
+ ctx.timedOutSections.add("AI Regression Trends");
+ }
+
+ var leakage = getSafe(futures.leakageFuture, null);
+ if (leakage == null && isTimedOut(futures.leakageFuture)) {
+ ctx.timedOutSections.add("Backlog Leakage Detection");
+ }
+
+ var sprintProgress = providers.sprintProgressProvider.provide(tasks, forecast);
+ var signals = getSafe(futures.signalsFuture, List.of());
+ var epics = calculateEpics(tasks, forecast, riskRadar);
+ double healthScore = aggregationService.calculateOverallHealth(signals);
+
+ OutlookStatus healthStatus = providers.healthPredictorService.predictHealth(sprintProgress, leakage, healthScore);
+ if (sprintProgress != null) {
+ sprintProgress.setStatus(healthStatus);
+ sprintProgress.setExplanation(providers.explanationService.explainSprintProgress(sprintProgress, sprintId));
+ }
+ if (leakage != null) {
+ leakage.setExplanation(providers.explanationService.explainBacklogLeakage(leakage, sprintId));
+ }
+
+ var recommendations = recommendationService.generateRecommendations(sprintId);
+
+ return AiIntelligenceDashboardDto.builder()
+ .sprintId(sprintId)
+ .currentRisk(sprintRisk)
+ .deliveryForecast(forecast)
+ .topRisks(riskRadar.getHighRisks())
+ .architectureDrifts(driftResponse.getPotentialDrifts())
+ .aiRegression(regression)
+ .backlogLeakage(leakage)
+ .sprintProgress(sprintProgress)
+ .epics(epics)
+ .taskRelationships(mapToRelationshipDtos(relationships))
+ .suggestions(mapToSuggestionDtos(recommendations))
+ .healthScore(healthScore)
+ .healthExplanation(providers.explanationService.explainHealthScore(healthScore, ctx.warnings, sprintId))
+ .warnings(ctx.warnings)
+ .timedOutSections(ctx.timedOutSections)
+ .partial(!ctx.timedOutSections.isEmpty())
+ .build();
+ }
+
+ private static class DashboardAssemblyContext {
+ final List warnings = new java.util.ArrayList<>();
+ final List timedOutSections = new java.util.ArrayList<>();
+ final String sprintId;
+
+ DashboardAssemblyContext(String sprintId) {
+ this.sprintId = sprintId;
+ }
+ }
+
+ private List mapToRelationshipDtos(List relationships) {
+ return relationships.stream()
+ .map(r -> AiIntelligenceDashboardDto.TaskRelationshipDto.builder()
+ .sourceTitle(r.getSourceTaskId())
+ .targetTitle(r.getTargetTaskId())
+ .type(r.getRelationshipType())
+ .build())
+ .toList();
+ }
+
+ private List mapToSuggestionDtos(List recommendations) {
+ return recommendations.stream()
+ .map(r -> AiIntelligenceDashboardDto.AiSuggestionDto.builder()
+ .id(r.getId())
+ .title(r.getTitle())
+ .description(r.getDescription())
+ .severity(r.getSeverity())
+ .impact(r.getImpact())
+ .category(r.getCategory())
+ .recommendedAction(r.getRecommendedAction())
+ .build())
+ .toList();
+ }
+
+ private boolean isTimedOut(CompletableFuture> future) {
+ if (future == null || !future.isCompletedExceptionally()) {
+ return false;
+ }
+ try {
+ future.join();
+ return false;
+ } catch (Exception e) {
+ // Check if it's a timeout
+ Throwable cause = e.getCause();
+ while (cause != null) {
+ if (cause instanceof java.util.concurrent.TimeoutException) {
+ return true;
+ }
+ cause = cause.getCause();
+ }
+ return e instanceof java.util.concurrent.TimeoutException;
+ }
+ }
+
+ private T getSafe(CompletableFuture future, T defaultValue) {
+ if (future == null) {
+ return defaultValue;
+ }
+ try {
+ return future.getNow(defaultValue);
+ } catch (Exception e) {
+ log.warn("Failed to get value from future, using default. Error: {}", e.getMessage());
+ return defaultValue;
+ }
+ }
+
+ private T getWithTimeoutHandling(CompletableFuture future, T defaultValue, String sectionLabel, String failureMessage,
+ List warnings, List timedOutSections) {
+ T value = getSafe(future, null);
+ if (value != null) {
+ return value;
+ }
+ if (isTimedOut(future)) {
+ timedOutSections.add(sectionLabel);
+ } else if (failureMessage != null) {
+ warnings.add(failureMessage);
+ }
+ return defaultValue;
+ }
+
+
+ private List calculateEpics(
+ List tasks,
+ DeliveryForecast forecast,
+ RiskRadarResponse riskRadar) {
+ if (tasks.isEmpty()) {
+ return List.of();
+ }
+
+ // Improved grouping by ID prefix
+ final String GROUP_GENERAL = "General";
+ Map> groups = tasks.stream()
+ .collect(Collectors.groupingBy(t -> {
+ String title = t.getTitle();
+ if (title.contains("-")) {
+ int firstDash = title.indexOf("-");
+ int secondDash = title.indexOf("-", firstDash + 1);
+ return (secondDash != -1) ? title.substring(0, secondDash) : title.substring(0, firstDash);
+ }
+ return GROUP_GENERAL;
+ }));
+
+ log.debug("Calculating status for {} epics", groups.size());
+
+ return groups.entrySet().stream()
+ .map(e -> epicDtoFromGroup(e, forecast, riskRadar))
+ .sorted((e1, e2) -> {
+ if (e1.getId().equals(GROUP_GENERAL)) {
+ return 1;
+ }
+ if (e2.getId().equals(GROUP_GENERAL)) {
+ return -1;
+ }
+ return e1.getId().compareTo(e2.getId());
+ })
+ .toList();
+ }
+
+ private AiIntelligenceDashboardDto.EpicProgressDto epicDtoFromGroup(Map.Entry> e,
+ DeliveryForecast forecast,
+ RiskRadarResponse riskRadar) {
+ long total = e.getValue().size();
+ long completed = e.getValue().stream()
+ .filter(t -> t.getStatus() != null && TaskStatus.DONE.equals(t.getStatus()))
+ .count();
+
+ String epicId = e.getKey();
+ String title = EPIC_TITLES.getOrDefault(epicId, epicId + " Roadmap");
+ String description = EPIC_DESCRIPTIONS.getOrDefault(epicId, "General project tasks.");
+
+ double progress = total > 0 ? (double) completed / total : 0.0;
+
+ OutlookStatus status = OutlookStatus.ON_TRACK;
+ if (completed == total && total > 0) {
+ status = OutlookStatus.STABLE;
+ } else if (forecast != null && (forecast.getStatus() == OutlookStatus.DELAYED || forecast.getStatus() == OutlookStatus.AT_RISK)) {
+ status = forecast.getStatus();
+ } else if (riskRadar != null && hasHighRisksForEpic(riskRadar, epicId)) {
+ status = OutlookStatus.AT_RISK;
+ } else if (progress < 0.3 && total > 0 && (forecast != null || riskRadar != null)) {
+ status = OutlookStatus.AT_RISK;
+ }
+
+ return AiIntelligenceDashboardDto.EpicProgressDto.builder()
+ .id(epicId)
+ .title(title)
+ .description(description)
+ .totalTasks((int) total)
+ .completedTasks((int) completed)
+ .progress(progress)
+ .status(status)
+ .tasks(e.getValue().stream()
+ .map(TaskDTO::fromEntity)
+ .toList())
+ .build();
+ }
+
+ private boolean hasHighRisksForEpic(RiskRadarResponse riskRadar, String epicId) {
+ if (riskRadar.getHighRisks() == null) {
+ return false;
+ }
+ return riskRadar.getHighRisks().stream()
+ .anyMatch(r -> (r.getPattern() != null && r.getPattern().contains(epicId)) ||
+ (r.getEvidence() != null && r.getEvidence().stream().anyMatch(ev -> ev.contains(epicId))));
+ }
+
+
+ public void invalidateCache(String sprintId) {
+ if (sprintId != null) {
+ log.info("Invalidating dashboard cache for sprint: {}", sprintId);
+ dashboardCache.remove(sprintId.trim());
+ }
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/AiUseCaseFacade.java b/backend/src/main/java/ch/goodone/backend/ai/application/AiUseCaseFacade.java
new file mode 100644
index 000000000..cdfb5db4f
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/AiUseCaseFacade.java
@@ -0,0 +1,17 @@
+package ch.goodone.backend.ai.application;
+
+import lombok.RequiredArgsConstructor;
+import org.springframework.stereotype.Component;
+
+/**
+ * Facade grouping AI use cases to reduce parameter count in AiIntelligenceService.
+ */
+@Component
+@RequiredArgsConstructor
+public class AiUseCaseFacade {
+ public final SprintRiskPredictorUseCase sprintRiskPredictor;
+ public final DeliveryForecasterUseCase deliveryForecaster;
+ public final RiskRadarUseCase riskRadarUseCase;
+ public final AdrDriftUseCase adrDriftUseCase;
+ public final TaskRelationshipService taskRelationshipService;
+}
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/ArchitectureExplainUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/ArchitectureExplainUseCase.java
index 9abf61f5f..a87c3ef7b 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/application/ArchitectureExplainUseCase.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/ArchitectureExplainUseCase.java
@@ -2,37 +2,56 @@
import ch.goodone.backend.ai.AiProperties;
import ch.goodone.backend.ai.AiProviderService;
+import ch.goodone.backend.ai.context.AssembledContext;
+import ch.goodone.backend.ai.context.CopilotContextOrchestrator;
import ch.goodone.backend.ai.dto.ArchitectureExplainRequest;
-import ch.goodone.backend.ai.dto.ArchitectureExplainResult;
+import ch.goodone.backend.ai.dto.CopilotResponse;
+import ch.goodone.backend.ai.infrastructure.AiPipeline;
+import ch.goodone.backend.ai.observability.AiCallParams;
import ch.goodone.backend.ai.observability.AiObservabilityService;
-import ch.goodone.backend.ai.prompt.StructuredOutputService;
-import ch.goodone.backend.docs.retrieval.DocRetrievalService;
-import ch.goodone.backend.model.DocChunk;
+import ch.goodone.backend.ai.prompt.DeterministicPromptBuilder;
+import ch.goodone.backend.ai.prompt.PromptBuildResult;
+import ch.goodone.backend.ai.prompt.PromptManifestService;
+import ch.goodone.backend.ai.governance.AiFailureClassifier;
+import ch.goodone.backend.model.taxonomy.CopilotCapability;
+import ch.goodone.backend.model.taxonomy.CopilotContextMode;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Use case for explaining the project architecture using AI based on a question.
+ * Migrated to AiPipeline for strict structured output enforcement.
*/
@Service
@RequiredArgsConstructor
@Slf4j
-public class ArchitectureExplainUseCase {
+public class ArchitectureExplainUseCase implements CopilotUseCase {
- private final AiProviderService aiProviderService;
- private final StructuredOutputService structuredOutputService;
- private final DocRetrievalService retrievalService;
+ private static final String FEATURE_ARCH_EXPLAIN = "architecture-explain";
+
+ private final AiPipeline aiPipeline;
+ private final CopilotContextOrchestrator contextOrchestrator;
private final AiObservabilityService observabilityService;
private final AiProperties aiProperties;
+ private final PromptManifestService promptManifestService;
+ private final DeterministicPromptBuilder promptBuilder;
+ private final AiFailureClassifier failureClassifier;
- @Value("classpath:prompts/architecture/v1/explain.st")
- private Resource explainPromptResource;
+ /**
+ * Executes the architecture explanation logic.
+ *
+ * @param request The request object.
+ * @return The explanation result.
+ */
+ @Override
+ public CopilotResponse execute(Object request) {
+ return execute((ArchitectureExplainRequest) request);
+ }
/**
* Executes the architecture explanation logic.
@@ -40,63 +59,128 @@ public class ArchitectureExplainUseCase {
* @param request The request containing the question.
* @return The explanation result.
*/
- public ArchitectureExplainResult execute(ArchitectureExplainRequest request) {
+ public CopilotResponse execute(ArchitectureExplainRequest request) {
if (aiProperties.getArchitecture() != null && !aiProperties.getArchitecture().isEnabled()) {
- return new ArchitectureExplainResult(
- "AI Architecture Explanation is currently disabled by configuration.",
- List.of("Disabled"),
- List.of()
- );
+ return CopilotResponse.builder()
+ .answer("AI Architecture Explanation is currently disabled by configuration.")
+ .suggestedActions(List.of("Contact Admin"))
+ .build();
}
int topK = aiProperties.getArchitecture().getTopK();
- List chunks = retrievalService.retrieve(request.getQuestion(), topK);
-
- String context = chunks.isEmpty()
- ? "No additional context available."
- : formatContext(chunks);
+ CopilotContextMode mode = request.getContextMode() != null ? request.getContextMode() : CopilotContextMode.ARCHITECTURE_QA;
+ AssembledContext contextResult = contextOrchestrator.assemble(request.getQuestion(), mode, topK, request.getSprintId());
String provider = aiProperties.getArchitecture().getProvider();
String model = aiProperties.getArchitecture().getModel();
+ String promptVersion = promptManifestService.getPromptInfo(FEATURE_ARCH_EXPLAIN).getVersion();
+
+ // Calculate prompt hash for transparency
+ PromptBuildResult buildResult = promptBuilder.build(
+ FEATURE_ARCH_EXPLAIN, // Using prompt ID as surrogate for system prompt for hashing
+ request.getQuestion(),
+ contextResult.getRetrievedChunks(),
+ mode.name()
+ );
try {
- ArchitectureExplainResult result = observabilityService.recordCall(
- "architecture-explain",
- provider,
- model,
- "v1",
- request.getQuestion(),
- () -> structuredOutputService.call(
- aiProviderService.getArchitectureChatModel(),
- explainPromptResource,
- Map.of(
- "userInput", request.getQuestion(),
- "context", context
- ),
- ArchitectureExplainResult.class
- )
- );
- log.debug("AI Architecture Explain Result: {}", result);
- return result;
+ long startTime = System.currentTimeMillis();
+ AiCallParams params = AiCallParams.builder()
+ .operation(FEATURE_ARCH_EXPLAIN)
+ .provider(provider)
+ .model(model)
+ .promptVersion(promptVersion)
+ .promptHash(buildResult.promptHash())
+ .input(request.getQuestion())
+ .capability(getCapability().name())
+ .contextMode(mode.name())
+ .call(() -> performAiExplanation(request, buildResult, topK, mode, contextResult, provider, model))
+ .build();
+
+ CopilotResponse response = observabilityService.recordCall(params);
+
+ // Set latency and other fields after recording
+ if (response != null) {
+ response.setLatencyMs(System.currentTimeMillis() - startTime);
+ if (response.getMetadata() == null) {
+ response.setMetadata(new HashMap<>());
+ }
+ if (response.getEvidence() != null) {
+ response.getMetadata().put("sources", response.getEvidence());
+ }
+ if (contextResult.hasFailures()) {
+ response.setPartialFailures(contextResult.getPartialFailures());
+ }
+ }
+
+ return response;
} catch (Exception e) {
log.error("AI Architecture Explain failed: {}", e.getMessage());
- return new ArchitectureExplainResult(
- "AI service failed to explain architecture: " + e.getMessage(),
- List.of("Error"),
- List.of()
- );
+ return CopilotResponse.builder()
+ .answer("AI service failed to explain architecture: " + e.getMessage())
+ .build();
}
}
- private String formatContext(List chunks) {
- StringBuilder sb = new StringBuilder();
- for (DocChunk chunk : chunks) {
- sb.append("Source: ").append(chunk.getSource().getPath()).append("\n");
- if (chunk.getHeading() != null) {
- sb.append("Heading: ").append(chunk.getHeading()).append("\n");
+ private CopilotResponse performAiExplanation(ArchitectureExplainRequest request, PromptBuildResult buildResult, int topK, CopilotContextMode mode, AssembledContext contextResult, String provider, String model) {
+ // Populate early trace metadata
+ observabilityService.updateTraceMetadata(m -> {
+ m.setSystemPrompt(buildResult.systemPrompt());
+ m.setUserPrompt(buildResult.userPrompt());
+ m.setFullPrompt(buildResult.fullPrompt());
+ m.setPromptHash(buildResult.promptHash());
+ if (request.getSprintId() != null) {
+ m.setSprint(request.getSprintId());
}
- sb.append("Content: ").append(chunk.getContent()).append("\n\n");
+ });
+
+ CopilotResponse result = aiPipeline.execute(AiPipeline.AiRequest.builder()
+ .query(request.getQuestion())
+ .mode(mode)
+ .topK(topK)
+ .sprintId(request.getSprintId())
+ .feature(FEATURE_ARCH_EXPLAIN)
+ .systemPrompt(buildResult.systemPrompt())
+ .schemaName("copilotAnswer")
+ .build(),
+ CopilotResponse.class);
+
+ if (result != null) {
+ updateResultMetadata(result, buildResult, contextResult, provider, model);
}
- return sb.toString();
+ return result;
+ }
+
+ private void updateResultMetadata(CopilotResponse result, PromptBuildResult buildResult, AssembledContext contextResult, String provider, String model) {
+ String answer = result.getAnswer();
+ AiFailureClassifier.ClassificationResult classification = failureClassifier.classify(answer);
+ double qualityScore = failureClassifier.calculateQualityScore(answer);
+
+ observabilityService.updateTraceMetadata(m -> {
+ m.setRetrievedDocumentPaths(contextResult.getRetrievedChunks());
+ m.setFailureClassification(classification.getFailureMode());
+ m.setQualityScore(qualityScore);
+ });
+
+ Map metadata = result.getMetadata() != null ?
+ new HashMap<>(result.getMetadata()) : new HashMap<>();
+ metadata.put("model", model);
+ metadata.put("provider", provider);
+ metadata.put("promptHash", buildResult.promptHash());
+ result.setMetadata(metadata);
+
+ // Explicit transparency fields (Phase 5)
+ result.setProvider(provider);
+ result.setModel(model);
+ result.setPromptHash(buildResult.promptHash());
+ result.setRetrievedDocumentCount(contextResult.getRetrievedChunks() != null ? contextResult.getRetrievedChunks().size() : 0);
+ result.setRetrievedDocumentPaths(contextResult.getRetrievedChunks());
+ result.setFallbackUsed(false); // Initial call is not a fallback
+ }
+
+ @Override
+ public CopilotCapability getCapability() {
+ return CopilotCapability.ARCHITECTURE_QA;
}
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/BacklogAnalyzerUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/BacklogAnalyzerUseCase.java
new file mode 100644
index 000000000..83e6ac7d2
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/BacklogAnalyzerUseCase.java
@@ -0,0 +1,98 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.dto.BacklogAnalysisResponse;
+import ch.goodone.backend.ai.dto.EngineeringArtifact;
+import ch.goodone.backend.ai.knowledge.EngineeringContextService;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Service;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Use case for analyzing the backlog tasks and deriving signals.
+ */
+@Service
+@RequiredArgsConstructor
+@Slf4j
+public class BacklogAnalyzerUseCase {
+
+ private static final String UNKNOWN = "Unknown";
+
+ private final EngineeringContextService contextService;
+
+ public BacklogAnalysisResponse execute() {
+ List allTasks = contextService.getAll().stream()
+ .filter(a -> a.getType() == EngineeringArtifact.Type.TASK)
+ .toList();
+
+ Map byPriority = new HashMap<>();
+ Map byStatus = new HashMap<>();
+ Map> clustersMap = new HashMap<>();
+
+ for (EngineeringArtifact task : allTasks) {
+ String priority = task.getPriority() != null ? task.getPriority() : UNKNOWN;
+ String status = task.getStatus() != null ? task.getStatus() : UNKNOWN;
+
+ byPriority.merge(priority, 1, Integer::sum);
+ byStatus.merge(status, 1, Integer::sum);
+
+ // Simple clustering by ID prefix (e.g., AI-ARCH)
+ String prefix = extractPrefix(task.getId());
+ clustersMap.computeIfAbsent(prefix, k -> new ArrayList<>()).add(task.getId());
+ }
+
+ List clusters = clustersMap.entrySet().stream()
+ .map(e -> BacklogAnalysisResponse.BacklogCluster.builder()
+ .name(e.getKey())
+ .taskIds(e.getValue())
+ .summary("Cluster of " + e.getValue().size() + " tasks in " + e.getKey() + " domain.")
+ .build())
+ .toList();
+
+ List gaps = identifyGaps(clustersMap);
+
+ String summary = String.format("Analyzed %d tasks. Most tasks are in %s status. Identified %d thematic clusters.",
+ allTasks.size(),
+ byStatus.entrySet().stream().max(Map.Entry.comparingByValue()).map(Map.Entry::getKey).orElse(UNKNOWN),
+ clusters.size());
+
+ return BacklogAnalysisResponse.builder()
+ .totalTasks(allTasks.size())
+ .tasksByPriority(byPriority)
+ .tasksByStatus(byStatus)
+ .clusters(clusters)
+ .identifiedGaps(gaps)
+ .executiveSummary(summary)
+ .build();
+ }
+
+ private String extractPrefix(String id) {
+ if (id == null) {
+ return "Other";
+ }
+ int lastDash = id.lastIndexOf("-");
+ if (lastDash == -1) {
+ return "Other";
+ }
+ return id.substring(0, lastDash);
+ }
+
+ private List identifyGaps(Map> clustersMap) {
+ List gaps = new ArrayList<>();
+ if (!clustersMap.containsKey("AI-SEC") && !clustersMap.containsKey("SEC")) {
+ gaps.add("No explicit security tasks found in backlog.");
+ }
+ if (!clustersMap.containsKey("AI-OBS") && !clustersMap.containsKey("OBS")) {
+ gaps.add("Observability backlog seems thin or missing.");
+ }
+ if (!clustersMap.containsKey("AI-UX") && !clustersMap.containsKey("UX")) {
+ gaps.add("No explicit UX tasks found in backlog.");
+ }
+ return gaps;
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/CodeChangeExplainerUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/CodeChangeExplainerUseCase.java
new file mode 100644
index 000000000..aaa3a41c5
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/CodeChangeExplainerUseCase.java
@@ -0,0 +1,15 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.dto.CodeChangeRequest;
+import ch.goodone.backend.ai.dto.CopilotResponse;
+import ch.goodone.backend.model.taxonomy.CopilotCapability;
+
+public interface CodeChangeExplainerUseCase extends CopilotUseCase {
+ CopilotResponse explain(CodeChangeRequest request);
+
+ @Override
+ default CopilotCapability getCapability() {
+ return CopilotCapability.CODE_EXPLANATION;
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/CodeChangeExplainerUseCaseImpl.java b/backend/src/main/java/ch/goodone/backend/ai/application/CodeChangeExplainerUseCaseImpl.java
new file mode 100644
index 000000000..7d17ae9b1
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/CodeChangeExplainerUseCaseImpl.java
@@ -0,0 +1,110 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.AiProperties;
+import ch.goodone.backend.ai.dto.CodeChangeRequest;
+import ch.goodone.backend.ai.dto.CopilotResponse;
+import ch.goodone.backend.ai.governance.AiFailureClassifier;
+import ch.goodone.backend.ai.exception.AiException;
+import ch.goodone.backend.ai.infrastructure.StructuredAiClient;
+import ch.goodone.backend.ai.observability.AiCallParams;
+import ch.goodone.backend.ai.observability.AiObservabilityService;
+import ch.goodone.backend.ai.observability.trace.AiTraceMetadata;
+import ch.goodone.backend.ai.prompt.PromptManifestService;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Service;
+import org.springframework.util.StreamUtils;
+
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.Map;
+
+@Service
+@RequiredArgsConstructor
+@Slf4j
+public class CodeChangeExplainerUseCaseImpl implements CodeChangeExplainerUseCase {
+
+ private final StructuredAiClient structuredAiClient;
+ private final AiObservabilityService observabilityService;
+ private final AiProperties aiProperties;
+ private final PromptManifestService promptManifestService;
+ private final AiFailureClassifier failureClassifier;
+
+ @Override
+ public CopilotResponse execute(Object request) {
+ return explain((CodeChangeRequest) request);
+ }
+
+ @Override
+ public CopilotResponse explain(CodeChangeRequest request) {
+ log.info("Explaining code changes for file: {}", request.getFilename());
+
+ String provider = aiProperties.getArchitecture().getProvider();
+ String model = aiProperties.getArchitecture().getModel();
+ String promptVersion = promptManifestService.getPromptInfo("engineering-explain-diff").getVersion();
+
+ try {
+ AiCallParams params = AiCallParams.builder()
+ .operation("code-change-explain")
+ .provider(provider)
+ .model(model)
+ .promptVersion(promptVersion)
+ .input("File: " + request.getFilename())
+ .capability(getCapability().name())
+ .call(() -> {
+ String promptTemplate = "";
+ try {
+ promptTemplate = StreamUtils.copyToString(promptManifestService.getPrompt("engineering-explain-diff").getInputStream(), StandardCharsets.UTF_8);
+ } catch (Exception e) {
+ throw new AiException("Failed to load Code Change prompt template", e);
+ }
+
+ String systemPrompt = promptTemplate
+ .replace("{filename}", request.getFilename())
+ .replace("{diff}", request.getDiff());
+
+ CopilotResponse response = structuredAiClient.call(
+ "architecture",
+ systemPrompt,
+ "Explain diff: " + request.getFilename(),
+ "copilotAnswer",
+ CopilotResponse.class
+ );
+
+ if (response != null) {
+ String answer = response.getAnswer();
+ AiFailureClassifier.ClassificationResult classification = failureClassifier.classify(answer);
+ double qualityScore = failureClassifier.calculateQualityScore(answer);
+
+ AiTraceMetadata metadataCapture = AiTraceMetadata.builder()
+ .userPrompt("Explain diff: " + request.getFilename())
+ .rawResponse(answer)
+ .finalResponse(answer)
+ .feature(getCapability().name())
+ .sprint(request.getSprintId())
+ .failureClassification(classification.getFailureMode())
+ .qualityScore(qualityScore)
+ .build();
+ observabilityService.reportTraceMetadata(metadataCapture);
+
+ response.setProvider(provider);
+ response.setModel(model);
+ response.setFallbackUsed(false);
+ response.setRetrievedDocumentCount(0);
+ }
+
+ return response;
+ })
+ .build();
+
+ return observabilityService.recordCall(params);
+ } catch (Exception e) {
+ log.error("Failed to explain code changes: {}", e.getMessage());
+ return CopilotResponse.builder()
+ .answer("Failed to analyze diff: " + e.getMessage())
+ .confidence(0.0)
+ .build();
+ }
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/CopilotUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/CopilotUseCase.java
new file mode 100644
index 000000000..5b50f550b
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/CopilotUseCase.java
@@ -0,0 +1,24 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.dto.CopilotResponse;
+import ch.goodone.backend.model.taxonomy.CopilotCapability;
+
+/**
+ * Unified contract for all Copilot-related backend use cases.
+ */
+public interface CopilotUseCase {
+ /**
+ * Executes the specific Copilot capability.
+ *
+ * @param request The request object (specific to the implementation).
+ * @return The Copilot response.
+ */
+ CopilotResponse execute(Object request);
+
+ /**
+ * Provide the capability this use case handles.
+ *
+ * @return The capability.
+ */
+ CopilotCapability getCapability();
+}
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/DecisionAssistantUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/DecisionAssistantUseCase.java
new file mode 100644
index 000000000..2fd512eeb
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/DecisionAssistantUseCase.java
@@ -0,0 +1,113 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.AiProperties;
+import ch.goodone.backend.ai.dto.DecisionProposalRequest;
+import ch.goodone.backend.ai.dto.DecisionProposalResponse;
+import ch.goodone.backend.ai.exception.AiException;
+import ch.goodone.backend.ai.infrastructure.StructuredAiClient;
+import ch.goodone.backend.ai.observability.AiCallParams;
+import ch.goodone.backend.ai.observability.AiObservabilityService;
+import ch.goodone.backend.ai.prompt.PromptAssemblyService;
+import ch.goodone.backend.docs.retrieval.DocRetrievalService;
+import ch.goodone.backend.model.DocChunk;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.core.io.Resource;
+import org.springframework.stereotype.Service;
+import org.springframework.util.StreamUtils;
+
+import java.nio.charset.StandardCharsets;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Use case for helping propose architecture or engineering decisions.
+ * Migrated to StructuredAiClient for strict structured output enforcement.
+ */
+@Service
+@RequiredArgsConstructor
+@Slf4j
+public class DecisionAssistantUseCase {
+
+ private final StructuredAiClient structuredAiClient;
+ private final DocRetrievalService retrievalService;
+ private final AiObservabilityService observabilityService;
+ private final AiProperties aiProperties;
+ private final PromptAssemblyService promptAssemblyService;
+
+ @Value("classpath:prompts/decision/v1/propose.st")
+ private Resource proposePromptResource;
+
+ public DecisionProposalResponse execute(DecisionProposalRequest request) {
+ int topK = 10;
+ if (aiProperties.getArchitecture() != null) {
+ topK = aiProperties.getArchitecture().getTopK();
+ }
+
+ // Use "architecture-explain" feature name to trigger boosting of ADRs
+ List chunks = retrievalService.retrieve(request.getTopic(), "architecture-explain", topK);
+ String context = promptAssemblyService.assembleContext(chunks, "decision-propose");
+
+ String provider = "openai";
+ String model = "gpt-4o";
+ if (aiProperties.getArchitecture() != null) {
+ provider = aiProperties.getArchitecture().getProvider();
+ model = aiProperties.getArchitecture().getModel();
+ }
+
+ try {
+ AiCallParams params = AiCallParams.builder()
+ .operation("decision-propose")
+ .provider(provider)
+ .model(model)
+ .promptVersion("v1")
+ .input(request.getTopic())
+ .call(() -> {
+ String promptTemplate = "";
+ try {
+ promptTemplate = StreamUtils.copyToString(proposePromptResource.getInputStream(), StandardCharsets.UTF_8);
+ } catch (Exception e) {
+ throw new AiException("Failed to load Decision prompt template", e);
+ }
+
+ String systemPrompt = promptTemplate
+ .replace("{topic}", request.getTopic())
+ .replace("{userInputContext}", request.getContext() != null ? request.getContext() : "")
+ .replace("{context}", context);
+
+ if (request.getSprintId() != null) {
+ observabilityService.updateTraceMetadata(m -> m.setSprint(request.getSprintId()));
+ }
+
+ DecisionProposalResponse result = structuredAiClient.call(
+ "architecture",
+ systemPrompt,
+ "Propose decision for: " + request.getTopic(),
+ "decisionProposal",
+ DecisionProposalResponse.class
+ );
+
+ // Add grounded artifact IDs from chunks
+ List artifactIds = chunks.stream()
+ .map(DocChunk::getId)
+ .distinct()
+ .toList();
+ result.setGroundedArtifactIds(artifactIds);
+
+ return result;
+ })
+ .build();
+
+ return observabilityService.recordCall(params);
+ } catch (Exception e) {
+ log.error("Decision proposal failed: {}", e.getMessage());
+ return DecisionProposalResponse.builder()
+ .executiveSummary("Decision proposal failed: " + e.getMessage())
+ .recommendation("Error")
+ .options(List.of())
+ .build();
+ }
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/DeliveryForecasterUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/DeliveryForecasterUseCase.java
new file mode 100644
index 000000000..78852091d
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/DeliveryForecasterUseCase.java
@@ -0,0 +1,17 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.dto.DeliveryForecast;
+import ch.goodone.backend.model.Task;
+import ch.goodone.backend.model.signal.EngineeringSignal;
+import java.util.List;
+
+public interface DeliveryForecasterUseCase {
+ DeliveryForecast forecast(String tasksetId);
+
+ DeliveryForecast forecast(String tasksetId, List authoritativeTasks);
+
+ default List emitSignals(String tasksetId) {
+ return forecast(tasksetId).toSignals("delivery-forecaster");
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/DeliveryForecasterUseCaseImpl.java b/backend/src/main/java/ch/goodone/backend/ai/application/DeliveryForecasterUseCaseImpl.java
new file mode 100644
index 000000000..60f8149bb
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/DeliveryForecasterUseCaseImpl.java
@@ -0,0 +1,176 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.dto.DeliveryForecast;
+import ch.goodone.backend.model.Task;
+import ch.goodone.backend.model.taxonomy.OutlookStatus;
+import ch.goodone.backend.model.DocChunk;
+import ch.goodone.backend.model.DocSource;
+import ch.goodone.backend.repository.DocChunkRepository;
+import ch.goodone.backend.repository.DocSourceRepository;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Service;
+
+import java.time.LocalDate;
+import java.time.temporal.ChronoUnit;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+@Service
+@RequiredArgsConstructor
+@Slf4j
+public class DeliveryForecasterUseCaseImpl implements DeliveryForecasterUseCase {
+
+ private final DocSourceRepository sourceRepository;
+ private final DocChunkRepository chunkRepository;
+
+ @Override
+ public DeliveryForecast forecast(String tasksetId) {
+ return forecast(tasksetId, null);
+ }
+
+ @Override
+ public DeliveryForecast forecast(String tasksetId, List authoritativeTasks) {
+ log.info("Forecasting delivery for taskset: {}", tasksetId);
+ List allTasks = resolveTaskMetadata(tasksetId, authoritativeTasks);
+
+ if (allTasks.isEmpty()) {
+ return DeliveryForecast.builder().status(OutlookStatus.STABLE).build();
+ }
+
+ List doneTasks = filterByStatus(allTasks, "DONE");
+ List openTasks = filterByNotStatus(allTasks, "DONE");
+
+ double velocity = calculateVelocity(doneTasks);
+ double adjustedVelocity = velocity * 0.9;
+ long remainingTasks = openTasks.size();
+
+ LocalDate completionDate = calculateCompletionDate(adjustedVelocity, remainingTasks);
+ List signals = generateSignals(velocity, remainingTasks);
+ OutlookStatus status = determineStatus(completionDate, allTasks);
+
+ return buildForecast(tasksetId, completionDate, velocity, status, signals, adjustedVelocity);
+ }
+
+ private List filterByStatus(List tasks, String status) {
+ return tasks.stream().filter(t -> status.equalsIgnoreCase(t.getStatus())).toList();
+ }
+
+ private List filterByNotStatus(List tasks, String status) {
+ return tasks.stream().filter(t -> !status.equalsIgnoreCase(t.getStatus())).toList();
+ }
+
+ private DeliveryForecast buildForecast(String tasksetId, LocalDate completionDate, double velocity, OutlookStatus status, List signals, double adjustedVelocity) {
+ return DeliveryForecast.builder()
+ .tasksetId(tasksetId)
+ .estimatedCompletionDate(completionDate)
+ .velocity(velocity)
+ .status(status)
+ .signals(signals)
+ .confidence(0.7)
+ .calibration(DeliveryForecast.CalibrationData.builder()
+ .optimismBias(0.1)
+ .historicalAccuracy(0.85)
+ .adjustedVelocity(adjustedVelocity)
+ .build())
+ .build();
+ }
+
+ private List resolveTaskMetadata(String tasksetId, List authoritativeTasks) {
+ if (authoritativeTasks != null && !authoritativeTasks.isEmpty()) {
+ return mapAuthoritativeTasks(authoritativeTasks);
+ }
+
+ List sources = sourceRepository.findByPathContaining(tasksetId);
+ return resolveMetadataFromSources(sources);
+ }
+
+ private List mapAuthoritativeTasks(List tasks) {
+ return tasks.stream()
+ .map(t -> TaskMetadata.builder()
+ .id(t.getTitle())
+ .status(t.getStatus() != null ? t.getStatus().name() : "OPEN")
+ .created(t.getCreatedAt() != null ? t.getCreatedAt().toLocalDate() : LocalDate.now())
+ .plannedFrom(t.getDueDate() != null ? t.getDueDate().minusWeeks(2) : null)
+ .plannedTo(t.getDueDate())
+ .build())
+ .toList();
+ }
+
+ private List resolveMetadataFromSources(List sources) {
+ return sources.stream()
+ .map(s -> {
+ List chunks = chunkRepository.findBySource(s);
+ String content = chunks.stream().map(DocChunk::getContent).collect(Collectors.joining("\n"));
+ return TaskMetadata.parse(content);
+ })
+ .toList();
+ }
+
+ private LocalDate calculateCompletionDate(double adjustedVelocity, long remainingTasks) {
+ LocalDate completionDate = LocalDate.now();
+ if (adjustedVelocity > 0) {
+ return completionDate.plusDays((long) (remainingTasks / (adjustedVelocity / 7.0)));
+ }
+ return completionDate.plusWeeks(remainingTasks); // Default fallback: 1 task per week
+ }
+
+ private List generateSignals(double velocity, long remainingTasks) {
+ List signals = new ArrayList<>();
+ signals.add(DeliveryForecast.ForecastSignal.builder()
+ .description("Historical velocity: " + String.format("%.2f", velocity) + " tasks/week")
+ .impact(velocity > 2 ? "POSITIVE" : "NEUTRAL")
+ .weight(0.5)
+ .baseValue(velocity)
+ .calculatedValue(velocity)
+ .evidenceKey("HISTORICAL_VELOCITY")
+ .build());
+
+ if (remainingTasks > 10 && velocity < 1) {
+ signals.add(DeliveryForecast.ForecastSignal.builder()
+ .description("Large backlog with low velocity")
+ .impact("NEGATIVE")
+ .weight(0.8)
+ .baseValue((double) remainingTasks)
+ .calculatedValue(velocity)
+ .evidenceKey("BACKLOG_RISK")
+ .build());
+ }
+ return signals;
+ }
+
+ private double calculateVelocity(List doneTasks) {
+ if (doneTasks.size() < 2) {
+ return 1.0; // Assume 1 task/week if little data
+ }
+
+ LocalDate minDate = doneTasks.stream().map(TaskMetadata::getCreated).min(LocalDate::compareTo).orElse(LocalDate.now().minusMonths(1));
+ long days = ChronoUnit.DAYS.between(minDate, LocalDate.now());
+ if (days <= 0) {
+ return doneTasks.size();
+ }
+
+ return doneTasks.size() / (days / 7.0);
+ }
+
+ private OutlookStatus determineStatus(LocalDate completionDate, List allTasks) {
+ LocalDate latestPlanned = allTasks.stream()
+ .map(TaskMetadata::getPlannedTo)
+ .filter(d -> d != null)
+ .max(LocalDate::compareTo)
+ .orElse(null);
+
+ if (latestPlanned == null) {
+ return OutlookStatus.ON_TRACK;
+ }
+ if (completionDate.isAfter(latestPlanned.plusWeeks(2))) {
+ return OutlookStatus.DELAYED;
+ }
+ if (completionDate.isAfter(latestPlanned)) {
+ return OutlookStatus.AT_RISK;
+ }
+ return OutlookStatus.ON_TRACK;
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/EngineeringChatUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/EngineeringChatUseCase.java
new file mode 100644
index 000000000..06613a79c
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/EngineeringChatUseCase.java
@@ -0,0 +1,15 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.dto.CopilotResponse;
+import ch.goodone.backend.ai.dto.EngineeringChatRequest;
+import ch.goodone.backend.model.taxonomy.CopilotCapability;
+
+public interface EngineeringChatUseCase extends CopilotUseCase {
+ CopilotResponse ask(EngineeringChatRequest request);
+
+ @Override
+ default CopilotCapability getCapability() {
+ return CopilotCapability.ENGINEERING_CHAT;
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/EngineeringChatUseCaseImpl.java b/backend/src/main/java/ch/goodone/backend/ai/application/EngineeringChatUseCaseImpl.java
new file mode 100644
index 000000000..223b18359
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/EngineeringChatUseCaseImpl.java
@@ -0,0 +1,272 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.AiProperties;
+import ch.goodone.backend.ai.AiProviderService;
+import ch.goodone.backend.ai.context.AssembledContext;
+import ch.goodone.backend.ai.context.CopilotContextOrchestrator;
+import ch.goodone.backend.ai.dto.CopilotResponse;
+import ch.goodone.backend.ai.dto.EngineeringChatRequest;
+import ch.goodone.backend.ai.dto.EngineeringChatResponse;
+import ch.goodone.backend.ai.prompt.DeterministicPromptBuilder;
+import ch.goodone.backend.ai.prompt.PromptBuildResult;
+import ch.goodone.backend.ai.prompt.SystemPromptConstants;
+import ch.goodone.backend.ai.governance.AiFailureClassifier;
+import ch.goodone.backend.ai.observability.AiObservabilityService;
+import ch.goodone.backend.ai.observability.trace.AiTraceMetadata;
+import ch.goodone.backend.model.taxonomy.CopilotContextMode;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.ai.chat.messages.AssistantMessage;
+import org.springframework.ai.chat.messages.Message;
+import org.springframework.ai.chat.messages.SystemMessage;
+import org.springframework.ai.chat.messages.UserMessage;
+import org.springframework.ai.chat.model.ChatModel;
+import org.springframework.ai.chat.prompt.Prompt;
+import org.springframework.stereotype.Service;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Use case for chat-based engineering support.
+ * Refactored to remove all JSON repair and stabilization logic (AI-BE-52).
+ */
+@Service
+@RequiredArgsConstructor
+@Slf4j
+public class EngineeringChatUseCaseImpl implements EngineeringChatUseCase {
+
+ private final CopilotContextOrchestrator contextOrchestrator;
+ private final AiProviderService aiProviderService;
+ private final AiObservabilityService observabilityService;
+ private final AiProperties aiProperties;
+ private final DeterministicPromptBuilder promptBuilder;
+ private final AiFailureClassifier failureClassifier;
+
+ @Override
+ public CopilotResponse execute(Object request) {
+ return ask((EngineeringChatRequest) request);
+ }
+
+ @Override
+ public CopilotResponse ask(EngineeringChatRequest request) {
+ log.info("Processing engineering chat query: {}", request.getQuery());
+
+ try {
+ CopilotContextMode mode = resolveContextMode(request);
+ AssembledContext contextResult = contextOrchestrator.assemble(request.getQuery(), mode, 10, request.getSprintId());
+
+ PromptBuildResult buildResult = promptBuilder.build(
+ SystemPromptConstants.KNOWLEDGE_QA,
+ request.getQuery(),
+ contextResult.getRetrievedChunks(),
+ mode.name()
+ );
+
+ return recordChatCall(request, contextResult, buildResult, mode);
+ } catch (Exception e) {
+ log.error("Engineering chat failed: {}", e.getMessage());
+ return createErrorResponse();
+ }
+ }
+
+ private CopilotContextMode resolveContextMode(EngineeringChatRequest request) {
+ return request.getContextMode() != null ? request.getContextMode() : CopilotContextMode.ENGINEERING_CHAT;
+ }
+
+ private CopilotResponse recordChatCall(EngineeringChatRequest request, AssembledContext contextResult, PromptBuildResult buildResult, CopilotContextMode mode) {
+ List messages = buildChatMessages(request, buildResult);
+ ChatModel chatModel = aiProviderService.getArchitectureChatModel();
+ String model = aiProperties.getArchitecture().getModel();
+ String provider = aiProperties.getArchitecture().getProvider();
+
+ return observabilityService.recordCall(ch.goodone.backend.ai.observability.AiCallParams.builder()
+ .operation("engineering-chat-ask")
+ .provider(provider)
+ .model(model)
+ .promptVersion("v2")
+ .promptHash(buildResult.promptHash())
+ .input(request.getQuery())
+ .capability(getCapability().name())
+ .contextMode(mode.name())
+ .call(() -> performEngineeringChatCall(new ChatCallParams(request, contextResult, buildResult, messages, chatModel, provider, model, mode)))
+ .build());
+ }
+
+ private List buildChatMessages(EngineeringChatRequest request, PromptBuildResult buildResult) {
+ List messages = new ArrayList<>();
+ messages.add(new SystemMessage(buildResult.systemPrompt()));
+
+ if (request.getHistory() != null) {
+ for (EngineeringChatRequest.ChatMessage msg : request.getHistory()) {
+ if ("user".equalsIgnoreCase(msg.getRole())) {
+ messages.add(new UserMessage(msg.getContent()));
+ } else {
+ messages.add(new AssistantMessage(msg.getContent()));
+ }
+ }
+ }
+
+ messages.add(new UserMessage(buildResult.userPrompt()));
+ return messages;
+ }
+
+ private CopilotResponse createErrorResponse() {
+ return CopilotResponse.builder()
+ .answer("I'm sorry, I encountered an error while processing your request.")
+ .evidence(new ArrayList<>())
+ .confidence(0.0)
+ .build();
+ }
+
+ private record ChatCallParams(
+ EngineeringChatRequest request,
+ AssembledContext contextResult,
+ PromptBuildResult buildResult,
+ List messages,
+ ChatModel chatModel,
+ String provider,
+ String model,
+ CopilotContextMode mode
+ ) {}
+
+ private CopilotResponse performEngineeringChatCall(ChatCallParams params) {
+ observabilityService.updateTraceMetadata(m -> {
+ m.setSystemPrompt(params.buildResult().systemPrompt());
+ m.setUserPrompt(params.buildResult().userPrompt());
+ m.setFullPrompt(params.buildResult().fullPrompt());
+ m.setPromptHash(params.buildResult().promptHash());
+ if (params.request().getSprintId() != null) {
+ m.setSprint(params.request().getSprintId());
+ }
+ });
+
+ Prompt prompt = new Prompt(params.messages());
+ ChatExecutionResult executionResult = executeWithRetries(params.chatModel(), prompt);
+
+ AiFailureClassifier.ClassificationResult classification = failureClassifier.classify(executionResult.rawAnswer());
+ String finalAnswer = executionResult.rawAnswer();
+ if (finalAnswer == null || finalAnswer.trim().isEmpty()) {
+ finalAnswer = "I'm sorry, I couldn't generate a response. Please try again or rephrase your question.";
+ }
+
+ double qualityScore = failureClassifier.calculateQualityScore(finalAnswer);
+ updateTraceMetadata(params.buildResult(), params.request(), params.contextResult(), params.mode(), finalAnswer, classification, qualityScore);
+
+ CopilotResponse copilotResponse = buildCopilotResponse(finalAnswer, params.contextResult(), params.buildResult(), params.provider(), params.model());
+ if (params.contextResult().hasFailures()) {
+ copilotResponse.setPartialFailures(params.contextResult().getPartialFailures());
+ }
+ return copilotResponse;
+ }
+
+ private ChatExecutionResult executeWithRetries(ChatModel chatModel, Prompt prompt) {
+ int maxRetries = 3;
+ int attempt = 0;
+ long backoffMs = 2000;
+ String rawAnswer = null;
+
+ while (attempt < maxRetries) {
+ try {
+ rawAnswer = executeSingleAttempt(chatModel, prompt);
+ AiFailureClassifier.ClassificationResult classification = failureClassifier.classify(rawAnswer);
+ if (!shouldRetry(classification, attempt, maxRetries)) {
+ break;
+ }
+ attempt++;
+ } catch (Exception e) {
+ attempt++;
+ if (!shouldRetryException(e, attempt, maxRetries, backoffMs)) {
+ throw e;
+ }
+ backoffMs *= 2;
+ }
+ }
+ return new ChatExecutionResult(rawAnswer);
+ }
+
+ private boolean shouldRetry(AiFailureClassifier.ClassificationResult classification, int attempt, int maxRetries) {
+ if (classification.isFailed() && classification.isRetryable() && attempt + 1 < maxRetries) {
+ log.warn("Semantic AI failure detected ({}). Retrying (attempt {}/{})...",
+ classification.getFailureMode(), attempt + 1, maxRetries);
+ return true;
+ }
+ return false;
+ }
+
+ private boolean shouldRetryException(Exception e, int attempt, int maxRetries, long backoffMs) {
+ if (isRateLimit(e) && attempt < maxRetries) {
+ handleRateLimitWait(attempt, maxRetries, backoffMs);
+ return true;
+ }
+ return false;
+ }
+
+ private String executeSingleAttempt(ChatModel chatModel, Prompt prompt) {
+ org.springframework.ai.chat.model.ChatResponse response = chatModel.call(prompt);
+ String rawAnswer = (response != null && response.getResult() != null && response.getResult().getOutput() != null)
+ ? response.getResult().getOutput().getText()
+ : null;
+
+ final String currentAnswer = rawAnswer;
+ observabilityService.updateTraceMetadata(m -> m.setRawResponse(currentAnswer));
+ return rawAnswer;
+ }
+
+ private boolean isRateLimit(Exception e) {
+ String msg = e.getMessage().toLowerCase();
+ return msg.contains("429") || msg.contains("rate limit");
+ }
+
+ private void handleRateLimitWait(int attempt, int maxRetries, long backoffMs) {
+ log.warn("Engineering chat rate limit hit (attempt {}/{}). Retrying in {}ms...", attempt, maxRetries, backoffMs);
+ try {
+ Thread.sleep(backoffMs);
+ } catch (InterruptedException ie) {
+ Thread.currentThread().interrupt();
+ throw new ch.goodone.backend.ai.exception.AiException("Interrupted while waiting for rate limit backoff", ie);
+ }
+ }
+
+ private void updateTraceMetadata(PromptBuildResult buildResult, EngineeringChatRequest request, AssembledContext contextResult, CopilotContextMode mode, String finalAnswer, AiFailureClassifier.ClassificationResult classification, double qualityScore) {
+ AiTraceMetadata metadataCapture = AiTraceMetadata.builder()
+ .promptHash(buildResult.promptHash())
+ .systemPrompt(buildResult.systemPrompt())
+ .userPrompt(buildResult.userPrompt())
+ .fullPrompt(buildResult.fullPrompt())
+ .sprint(request.getSprintId())
+ .retrievedDocumentPaths(contextResult.getRetrievedChunks())
+ .rawResponse(finalAnswer)
+ .finalResponse(finalAnswer)
+ .feature(getCapability().name())
+ .section(mode.name())
+ .failureClassification(classification != null ? classification.getFailureMode() : "NONE")
+ .qualityScore(qualityScore)
+ .build();
+ observabilityService.reportTraceMetadata(metadataCapture);
+ }
+
+ private CopilotResponse buildCopilotResponse(String answer, AssembledContext contextResult, PromptBuildResult buildResult, String provider, String model) {
+ java.util.Map metadata = new java.util.HashMap<>();
+ metadata.put("model", model);
+ metadata.put("provider", provider);
+ metadata.put("promptHash", buildResult.promptHash());
+ metadata.put("sources", contextResult.getRetrievedChunks());
+
+ return CopilotResponse.builder()
+ .answer(answer)
+ .evidence(contextResult.getRetrievedChunks())
+ .confidence(0.9)
+ .metadata(metadata)
+ .provider(provider)
+ .model(model)
+ .promptHash(buildResult.promptHash())
+ .retrievedDocumentCount(contextResult.getRetrievedChunks() != null ? contextResult.getRetrievedChunks().size() : 0)
+ .retrievedDocumentPaths(contextResult.getRetrievedChunks())
+ .fallbackUsed(false)
+ .build();
+ }
+
+ private record ChatExecutionResult(String rawAnswer) {}
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/EngineeringIntelligenceAggregationService.java b/backend/src/main/java/ch/goodone/backend/ai/application/EngineeringIntelligenceAggregationService.java
new file mode 100644
index 000000000..3c2579e0e
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/EngineeringIntelligenceAggregationService.java
@@ -0,0 +1,151 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.dto.RiskRadarRequest;
+import ch.goodone.backend.ai.dto.AdrDriftRequest;
+import ch.goodone.backend.model.signal.EngineeringSignal;
+import ch.goodone.backend.model.taxonomy.EngineeringSignalSeverity;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Service;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Stream;
+
+/**
+ * Service responsible for aggregating intelligence signals from various engines.
+ * Ensures that the platform provides a unified view of engineering health and risks.
+ */
+@Service
+@RequiredArgsConstructor
+@Slf4j
+public class EngineeringIntelligenceAggregationService {
+
+ private final RiskRadarUseCase riskRadarUseCase;
+ private final DeliveryForecasterUseCase deliveryForecasterUseCase;
+ private final AdrDriftUseCase adrDriftUseCase;
+ private final SprintRiskPredictorUseCase sprintRiskPredictorUseCase;
+ private final TaskRelationshipService taskRelationshipService;
+
+ /**
+ * Aggregates all signals for a given sprint/taskset in parallel to minimize latency.
+ */
+ public List aggregateSignals(String sprintId) {
+ log.info("[DIAGNOSTIC] Aggregating signals for sprint: {}", sprintId);
+
+ // 1. Collect Risks in parallel
+ var risksFuture = CompletableFuture.supplyAsync(() -> emitRiskSignals(sprintId));
+
+ // 2. Collect Forecasts in parallel
+ var forecastsFuture = CompletableFuture.supplyAsync(() -> emitForecastSignals(sprintId));
+
+ // 3. Collect Architecture Drifts in parallel
+ var driftsFuture = CompletableFuture.supplyAsync(() -> emitDriftSignals(sprintId));
+
+ // 4. Collect Task Relationships in parallel
+ var relationshipsFuture = CompletableFuture.supplyAsync(() -> emitRelationshipSignals(sprintId));
+
+ // Combine all results with a timeout to avoid hanging the entire dashboard
+ try {
+ return CompletableFuture.allOf(risksFuture, forecastsFuture, driftsFuture, relationshipsFuture)
+ .orTimeout(120, TimeUnit.SECONDS) // Total timeout for dashboard responses
+ .thenApply(v -> {
+ List aggregated = Stream.of(risksFuture, forecastsFuture, driftsFuture, relationshipsFuture)
+ .map(CompletableFuture::join)
+ .flatMap(List::stream)
+ .toList();
+ log.info("[DIAGNOSTIC] Signal aggregation complete. TotalSignals: {}", aggregated.size());
+ return aggregated;
+ }).get();
+ } catch (InterruptedException e) {
+ log.error("Signal aggregation interrupted: {}", e.getMessage());
+ Thread.currentThread().interrupt();
+ return collectPartialResults(risksFuture, forecastsFuture, driftsFuture, relationshipsFuture);
+ } catch (Exception e) {
+ log.error("Signal aggregation timed out or failed partially: {}", e.getMessage());
+ return collectPartialResults(risksFuture, forecastsFuture, driftsFuture, relationshipsFuture);
+ }
+ }
+
+ private List collectPartialResults(
+ CompletableFuture> risksFuture,
+ CompletableFuture> forecastsFuture,
+ CompletableFuture> driftsFuture,
+ CompletableFuture> relationshipsFuture) {
+ // Return whatever we have already finished
+ List partial = new ArrayList<>();
+ if (risksFuture.isDone() && !risksFuture.isCompletedExceptionally()) {
+ partial.addAll(risksFuture.join());
+ }
+ if (forecastsFuture.isDone() && !forecastsFuture.isCompletedExceptionally()) {
+ partial.addAll(forecastsFuture.join());
+ }
+ if (driftsFuture.isDone() && !driftsFuture.isCompletedExceptionally()) {
+ partial.addAll(driftsFuture.join());
+ }
+ if (relationshipsFuture.isDone() && !relationshipsFuture.isCompletedExceptionally()) {
+ partial.addAll(relationshipsFuture.join());
+ }
+ return partial;
+ }
+
+ public List emitRiskSignals(String sprintId) {
+ try {
+ return riskRadarUseCase.emitSignals(RiskRadarRequest.builder().tasksets(List.of(sprintId)).build());
+ } catch (Exception e) {
+ log.error("Failed to collect risk signals for sprint {}: {}", sprintId, e.getMessage());
+ return Collections.emptyList();
+ }
+ }
+
+ public List emitForecastSignals(String sprintId) {
+ try {
+ return deliveryForecasterUseCase.emitSignals(sprintId);
+ } catch (Exception e) {
+ log.error("Failed to collect forecast signals for sprint {}: {}", sprintId, e.getMessage());
+ return Collections.emptyList();
+ }
+ }
+
+ public List emitDriftSignals(String sprintId) {
+ try {
+ return adrDriftUseCase.emitSignals(AdrDriftRequest.builder().tasksets(List.of(sprintId)).build());
+ } catch (Exception e) {
+ log.error("Failed to collect drift signals for sprint {}: {}", sprintId, e.getMessage());
+ return Collections.emptyList();
+ }
+ }
+
+ public List emitRelationshipSignals(String sprintId) {
+ try {
+ var relationships = taskRelationshipService.analyzeTaskset(sprintId);
+ if (relationships != null) {
+ return relationships.stream().map(r -> r.toSignal("task-relationship-engine")).toList();
+ }
+ return Collections.emptyList();
+ } catch (Exception e) {
+ log.error("Failed to collect relationship signals for sprint {}: {}", sprintId, e.getMessage(), e);
+ return Collections.emptyList();
+ }
+ }
+
+ /**
+ * Calculates an overall health score based on aggregated signals.
+ */
+ public double calculateOverallHealth(List signals) {
+ if (signals.isEmpty()) {
+ return 1.0;
+ }
+
+ long criticalCount = signals.stream().filter(s -> EngineeringSignalSeverity.CRITICAL.equals(s.getSeverity())).count();
+ long highCount = signals.stream().filter(s -> EngineeringSignalSeverity.HIGH.equals(s.getSeverity())).count();
+ long mediumCount = signals.stream().filter(s -> EngineeringSignalSeverity.MEDIUM.equals(s.getSeverity())).count();
+
+ double penalty = (criticalCount * 0.2) + (highCount * 0.1) + (mediumCount * 0.05);
+ return Math.max(0.0, 1.0 - penalty);
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/ImpactSimulatorUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/ImpactSimulatorUseCase.java
new file mode 100644
index 000000000..875aca435
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/ImpactSimulatorUseCase.java
@@ -0,0 +1,112 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.AiProperties;
+import ch.goodone.backend.ai.dto.ImpactAnalysisRequest;
+import ch.goodone.backend.ai.dto.ImpactAnalysisResponse;
+import ch.goodone.backend.ai.exception.AiException;
+import ch.goodone.backend.ai.infrastructure.StructuredAiClient;
+import ch.goodone.backend.ai.observability.AiCallParams;
+import ch.goodone.backend.ai.observability.AiObservabilityService;
+import ch.goodone.backend.ai.prompt.PromptAssemblyService;
+import ch.goodone.backend.docs.retrieval.DocRetrievalService;
+import ch.goodone.backend.model.DocChunk;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.core.io.Resource;
+import org.springframework.stereotype.Service;
+import org.springframework.util.StreamUtils;
+
+import java.nio.charset.StandardCharsets;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * Use case for estimating the engineering impact of a proposed change.
+ * Migrated to StructuredAiClient for strict structured output enforcement.
+ */
+@Service
+@RequiredArgsConstructor
+@Slf4j
+public class ImpactSimulatorUseCase {
+
+ private final StructuredAiClient structuredAiClient;
+ private final DocRetrievalService retrievalService;
+ private final AiObservabilityService observabilityService;
+ private final AiProperties aiProperties;
+ private final PromptAssemblyService promptAssemblyService;
+
+ @Value("classpath:prompts/impact/v1/simulate.st")
+ private Resource simulatePromptResource;
+
+ public ImpactAnalysisResponse execute(ImpactAnalysisRequest request) {
+ int topK = 10;
+ if (aiProperties.getArchitecture() != null) {
+ topK = aiProperties.getArchitecture().getTopK();
+ }
+
+ // Use "architecture-explain" feature name to trigger boosting of ADRs
+ List chunks = retrievalService.retrieve(request.getScenario(), "architecture-explain", topK);
+ String context = promptAssemblyService.assembleContext(chunks, "impact-simulate");
+
+ String provider = "openai";
+ String model = "gpt-4o";
+ if (aiProperties.getArchitecture() != null) {
+ provider = aiProperties.getArchitecture().getProvider();
+ model = aiProperties.getArchitecture().getModel();
+ }
+
+ try {
+ AiCallParams params = AiCallParams.builder()
+ .operation("impact-simulate")
+ .provider(provider)
+ .model(model)
+ .promptVersion("v1")
+ .input(request.getScenario())
+ .call(() -> {
+ String promptTemplate = "";
+ try {
+ promptTemplate = StreamUtils.copyToString(simulatePromptResource.getInputStream(), StandardCharsets.UTF_8);
+ } catch (Exception e) {
+ throw new AiException("Failed to load Impact simulation prompt template", e);
+ }
+
+ String systemPrompt = promptTemplate
+ .replace("{scenario}", request.getScenario())
+ .replace("{context}", context);
+
+ if (request.getSprintId() != null) {
+ observabilityService.updateTraceMetadata(m -> m.setSprint(request.getSprintId()));
+ }
+
+ ImpactAnalysisResponse result = structuredAiClient.call(
+ "architecture",
+ systemPrompt,
+ "Simulate impact for: " + request.getScenario(),
+ "impactSimulation",
+ ImpactAnalysisResponse.class
+ );
+
+ List artifactIds = chunks.stream()
+ .map(DocChunk::getId)
+ .distinct()
+ .toList();
+ result.setGroundedArtifactIds(artifactIds);
+
+ return result;
+ })
+ .build();
+
+ return observabilityService.recordCall(params);
+ } catch (Exception e) {
+ log.error("Impact simulation failed: {}", e.getMessage());
+ return ImpactAnalysisResponse.builder()
+ .executiveSummary("Impact simulation failed: " + e.getMessage())
+ .affectedAreas(List.of())
+ .potentialRisks(List.of())
+ .build();
+ }
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/InsightRankingService.java b/backend/src/main/java/ch/goodone/backend/ai/application/InsightRankingService.java
new file mode 100644
index 000000000..75803bfdb
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/InsightRankingService.java
@@ -0,0 +1,82 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.model.signal.EngineeringSignal;
+import ch.goodone.backend.model.taxonomy.EngineeringSignalSeverity;
+import ch.goodone.backend.ai.dto.RiskRadarResponse.RiskItem;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Service;
+
+import java.util.Comparator;
+import java.util.List;
+
+@Service
+@Slf4j
+public class InsightRankingService {
+
+ public List rankSignals(List signals) {
+ log.info("Ranking {} engineering signals.", signals.size());
+
+ return signals.stream()
+ .sorted(Comparator.comparingDouble(this::calculateImportanceScore).reversed())
+ .toList();
+ }
+
+ public List rankRisks(List risks) {
+ if (risks == null) {
+ return List.of();
+ }
+ return risks.stream()
+ .sorted(Comparator.comparingDouble(this::calculateRiskScore).reversed())
+ .toList();
+ }
+
+ private double calculateRiskScore(RiskItem risk) {
+ double score = 0.0;
+
+ // Severity (Weight: 70%)
+ score += getSeverityScore(risk.getSeverity()) * 0.7;
+
+ // Evidence Count (Weight: 30%)
+ int evidenceCount = (risk.getEvidence() != null) ? risk.getEvidence().size() : 0;
+ double impactScore = Math.min(evidenceCount / 5.0, 1.0);
+ score += impactScore * 0.3;
+
+ return score;
+ }
+
+
+ public double calculateImportanceScore(EngineeringSignal signal) {
+ double score = 0.0;
+
+ // Factor 1: Severity (Weight: 50%)
+ score += getSeverityScore(signal.getSeverity()) * 0.5;
+
+ // Factor 2: Confidence (Weight: 20%)
+ // We prefer high-confidence signals, but even low-confidence critical issues are important.
+ score += (signal.getConfidence() != null ? signal.getConfidence() : 0.5) * 0.2;
+
+ // Factor 3: Urgency / Age (Weight: 10%)
+ // Newer signals might be more relevant, but for simplicity we'll skip time for now
+ // and focus on evidence count as a proxy for impact.
+
+ // Factor 4: Impact / Evidence Count (Weight: 20%)
+ int evidenceCount = (signal.getEvidence() != null) ? signal.getEvidence().size() : 0;
+ double impactScore = Math.min(evidenceCount / 10.0, 1.0); // Cap at 1.0 for 10+ evidence items
+ score += impactScore * 0.2;
+
+ return score;
+ }
+
+ private double getSeverityScore(EngineeringSignalSeverity severity) {
+ if (severity == null) {
+ return 0.5;
+ }
+ return switch (severity) {
+ case CRITICAL -> 1.0;
+ case HIGH -> 0.8;
+ case MEDIUM -> 0.5;
+ case LOW -> 0.2;
+ default -> 0.5;
+ };
+ }
+}
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/OnboardingAssistantUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/OnboardingAssistantUseCase.java
new file mode 100644
index 000000000..71afbdf3e
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/OnboardingAssistantUseCase.java
@@ -0,0 +1,15 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.dto.CopilotResponse;
+import ch.goodone.backend.ai.dto.OnboardingRequest;
+import ch.goodone.backend.model.taxonomy.CopilotCapability;
+
+public interface OnboardingAssistantUseCase extends CopilotUseCase {
+ CopilotResponse getOnboardingHelp(OnboardingRequest request);
+
+ @Override
+ default CopilotCapability getCapability() {
+ return CopilotCapability.ONBOARDING;
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/OnboardingAssistantUseCaseImpl.java b/backend/src/main/java/ch/goodone/backend/ai/application/OnboardingAssistantUseCaseImpl.java
new file mode 100644
index 000000000..09ce80009
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/OnboardingAssistantUseCaseImpl.java
@@ -0,0 +1,176 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.AiProperties;
+import ch.goodone.backend.ai.context.AssembledContext;
+import ch.goodone.backend.ai.context.CopilotContextOrchestrator;
+import ch.goodone.backend.ai.dto.CopilotResponse;
+import ch.goodone.backend.ai.dto.OnboardingRequest;
+import ch.goodone.backend.ai.exception.AiException;
+import ch.goodone.backend.ai.infrastructure.StructuredAiClient;
+import ch.goodone.backend.ai.observability.AiCallParams;
+import ch.goodone.backend.ai.observability.AiObservabilityService;
+import ch.goodone.backend.ai.prompt.DeterministicPromptBuilder;
+import ch.goodone.backend.ai.prompt.PromptBuildResult;
+import ch.goodone.backend.ai.prompt.PromptManifestService;
+import ch.goodone.backend.ai.governance.AiFailureClassifier;
+import ch.goodone.backend.model.taxonomy.CopilotCapability;
+import ch.goodone.backend.model.taxonomy.CopilotContextMode;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Service;
+import org.springframework.util.StreamUtils;
+
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@Service
+@RequiredArgsConstructor
+@Slf4j
+public class OnboardingAssistantUseCaseImpl implements OnboardingAssistantUseCase {
+
+ private static final String FEATURE_ONBOARDING = "engineering-onboarding";
+
+ private final CopilotContextOrchestrator contextOrchestrator;
+ private final StructuredAiClient structuredAiClient;
+ private final AiObservabilityService observabilityService;
+ private final AiProperties aiProperties;
+ private final PromptManifestService promptManifestService;
+ private final DeterministicPromptBuilder promptBuilder;
+ private final AiFailureClassifier failureClassifier;
+
+ @Override
+ public CopilotCapability getCapability() {
+ return CopilotCapability.ONBOARDING;
+ }
+
+ @Override
+ public CopilotResponse execute(Object request) {
+ return getOnboardingHelp((OnboardingRequest) request);
+ }
+
+ @Override
+ public CopilotResponse getOnboardingHelp(OnboardingRequest request) {
+ log.info("Onboarding request: {}", request.getQuery());
+
+ CopilotContextMode mode = resolveContextMode(request);
+ AssembledContext contextResult = contextOrchestrator.assemble(request.getQuery(), mode, 10, request.getSprintId());
+
+ String provider = aiProperties.getArchitecture().getProvider();
+ String model = aiProperties.getArchitecture().getModel();
+ String promptVersion = promptManifestService.getPromptInfo(FEATURE_ONBOARDING).getVersion();
+
+ PromptBuildResult buildResult = promptBuilder.build(
+ FEATURE_ONBOARDING,
+ request.getQuery(),
+ contextResult.getRetrievedChunks(),
+ mode.name()
+ );
+
+ return recordOnboardingCall(request, contextResult, buildResult, provider, model, promptVersion, mode);
+ }
+
+ private CopilotContextMode resolveContextMode(OnboardingRequest request) {
+ return request.getContextMode() != null ? request.getContextMode() : CopilotContextMode.ONBOARDING;
+ }
+
+ private CopilotResponse recordOnboardingCall(OnboardingRequest request, AssembledContext contextResult, PromptBuildResult buildResult, String provider, String model, String promptVersion, CopilotContextMode mode) {
+ try {
+ AiCallParams params = AiCallParams.builder()
+ .operation("onboarding-help")
+ .provider(provider)
+ .model(model)
+ .promptVersion(promptVersion)
+ .promptHash(buildResult.promptHash())
+ .input(request.getQuery())
+ .capability(getCapability().name())
+ .contextMode(mode.name())
+ .call(() -> performOnboardingAiCall(request, contextResult, buildResult, provider, model))
+ .build();
+
+ return observabilityService.recordCall(params);
+ } catch (Exception e) {
+ log.error("Onboarding help failed: {}", e.getMessage());
+ return buildErrorResponse(e);
+ }
+ }
+
+ private CopilotResponse buildErrorResponse(Exception e) {
+ return CopilotResponse.builder()
+ .answer("I'm sorry, I couldn't generate onboarding help right now: " + e.getMessage())
+ .suggestedActions(List.of("Check README.md", "Contact team lead"))
+ .evidence(new ArrayList<>())
+ .build();
+ }
+
+ private CopilotResponse performOnboardingAiCall(OnboardingRequest request, AssembledContext contextResult, PromptBuildResult buildResult, String provider, String model) {
+ observabilityService.updateTraceMetadata(m -> {
+ m.setSystemPrompt(buildResult.systemPrompt());
+ m.setUserPrompt(buildResult.userPrompt());
+ m.setFullPrompt(buildResult.fullPrompt());
+ m.setPromptHash(buildResult.promptHash());
+ if (request.getSprintId() != null) {
+ m.setSprint(request.getSprintId());
+ }
+ });
+
+ String promptTemplate;
+ try {
+ promptTemplate = StreamUtils.copyToString(promptManifestService.getPrompt(FEATURE_ONBOARDING).getInputStream(), StandardCharsets.UTF_8);
+ } catch (Exception e) {
+ throw new AiException("Failed to load Onboarding prompt template", e);
+ }
+
+ String systemPrompt = promptTemplate
+ .replace("{userInput}", request.getQuery())
+ .replace("{context}", contextResult.getContext())
+ .replace("{promptHash}", buildResult.promptHash());
+
+ CopilotResponse response = structuredAiClient.call(
+ "architecture",
+ systemPrompt,
+ request.getQuery(),
+ "copilotAnswer",
+ CopilotResponse.class
+ );
+
+ if (response != null) {
+ updateOnboardingMetadata(response, contextResult, buildResult, provider, model);
+ }
+
+ return response;
+ }
+
+ private void updateOnboardingMetadata(CopilotResponse response, AssembledContext contextResult, PromptBuildResult buildResult, String provider, String model) {
+ String answer = response.getAnswer();
+ AiFailureClassifier.ClassificationResult classification = failureClassifier.classify(answer);
+ double qualityScore = failureClassifier.calculateQualityScore(answer);
+
+ observabilityService.updateTraceMetadata(m -> {
+ m.setRetrievedDocumentPaths(contextResult.getRetrievedChunks());
+ m.setFailureClassification(classification.getFailureMode());
+ m.setQualityScore(qualityScore);
+ });
+
+ Map metadata = response.getMetadata() != null ?
+ new HashMap<>(response.getMetadata()) : new HashMap<>();
+ metadata.put("model", model);
+ metadata.put("provider", provider);
+ metadata.put("promptHash", buildResult.promptHash());
+ response.setMetadata(metadata);
+
+ response.setProvider(provider);
+ response.setModel(model);
+ response.setPromptHash(buildResult.promptHash());
+ response.setRetrievedDocumentCount(contextResult.getRetrievedChunks() != null ? contextResult.getRetrievedChunks().size() : 0);
+ response.setRetrievedDocumentPaths(contextResult.getRetrievedChunks());
+ response.setFallbackUsed(false);
+
+ if (contextResult.hasFailures()) {
+ response.setPartialFailures(contextResult.getPartialFailures());
+ }
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/QuickAddParseUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/QuickAddParseUseCase.java
index 6e4dd4826..b8f4deef5 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/application/QuickAddParseUseCase.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/QuickAddParseUseCase.java
@@ -1,31 +1,32 @@
package ch.goodone.backend.ai.application;
import ch.goodone.backend.ai.AiProperties;
-import ch.goodone.backend.ai.AiProviderService;
import ch.goodone.backend.ai.dto.QuickAddParseRequest;
import ch.goodone.backend.ai.dto.QuickAddParseResult;
+import ch.goodone.backend.ai.infrastructure.StructuredAiClient;
import ch.goodone.backend.ai.observability.AiObservabilityService;
-import ch.goodone.backend.ai.prompt.StructuredOutputService;
import ch.goodone.backend.service.TaskParserService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;
+import org.springframework.util.StreamUtils;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
/**
* Use case for parsing a quick-add task string into structured data using AI.
+ * Migrated to StructuredAiClient for strict structured output enforcement.
*/
@Service
@RequiredArgsConstructor
@Slf4j
public class QuickAddParseUseCase {
- private final AiProviderService aiProviderService;
- private final StructuredOutputService structuredOutputService;
+ private final StructuredAiClient structuredAiClient;
private final TaskParserService taskParserService;
private final AiObservabilityService observabilityService;
private final AiProperties aiProperties;
@@ -97,22 +98,25 @@ private QuickAddParseResult fromDeterministic(TaskParserService.ParsedTask d) {
}
private QuickAddParseResult callAi(String input) {
- String provider = aiProperties.getQuickAdd().getProvider();
- String model = aiProperties.getQuickAdd().getModel();
-
+ String promptTemplate = "";
try {
- return observabilityService.recordCall(
- "quick-add-parse",
- provider,
- model,
- "v2",
- input,
- () -> structuredOutputService.call(
- aiProviderService.getQuickAddChatModel(),
- parsePromptResource,
- Map.of("userInput", input),
- QuickAddParseResult.class
- )
+ promptTemplate = StreamUtils.copyToString(parsePromptResource.getInputStream(), StandardCharsets.UTF_8);
+ } catch (Exception e) {
+ log.error("Failed to load Quick Add prompt template", e);
+ return new QuickAddParseResult(null, null, null, 0.0, List.of(), List.of("Failed to load prompt template"), null, null, null, null, false);
+ }
+
+ String systemPrompt = promptTemplate
+ .replace("{userInput}", input)
+ .replace("{currentDate}", java.time.LocalDate.now().toString());
+
+ try {
+ return structuredAiClient.call(
+ "quick-add",
+ systemPrompt,
+ "Parse: " + input,
+ "quickAddParse",
+ QuickAddParseResult.class
);
} catch (Exception e) {
log.error("AI Quick Add Parse failed: {}", e.getMessage());
@@ -123,37 +127,30 @@ private QuickAddParseResult callAi(String input) {
private QuickAddParseResult merge(QuickAddParseResult ai, TaskParserService.ParsedTask deterministic, boolean aiUsed) {
// Deterministic attributes take precedence as they are very reliable
- String finalDueDate = deterministic.dueDate() != null ? deterministic.dueDate().toString() : ai.dueDate();
- String finalDueTime = deterministic.dueTime() != null ? deterministic.dueTime().toString() : ai.dueTime();
- String finalPriority = deterministic.priority() != null ? deterministic.priority().name() : ai.priority();
- String finalStatus = deterministic.status() != null ? deterministic.status().name() : ai.status();
+ String finalDueDate = resolveDueDate(ai, deterministic);
+ String finalDueTime = resolveDueTime(ai, deterministic);
+ String finalPriority = resolvePriority(ai, deterministic);
+ String finalStatus = resolveStatus(ai, deterministic);
// For Title and Description, we trust AI if confidence is high,
// because it is better at summarizing and splitting into title/description.
// Otherwise, we use the cleaned title from the deterministic parser.
- String finalTitle;
- String finalDescription;
- if (ai.confidence() != null && ai.confidence() >= 0.8 && ai.title() != null && !ai.title().isEmpty()) {
- finalTitle = ai.title();
- finalDescription = ai.description();
- } else {
- finalTitle = (deterministic.title() != null && !deterministic.title().isEmpty()) ? deterministic.title() : ai.title();
- finalDescription = (ai.description() != null && !ai.description().isEmpty()) ? ai.description() : deterministic.description();
- }
+ String finalTitle = resolveTitle(ai, deterministic);
+ String finalDescription = resolveDescription(ai, deterministic);
// For tags, we merge both lists
java.util.Set allTags = new java.util.HashSet<>(deterministic.tags());
- if (ai.tags() != null) {
+ if (ai != null && ai.tags() != null) {
allTags.addAll(ai.tags());
}
return new QuickAddParseResult(
finalTitle,
finalDescription,
- ai.category(),
- ai.confidence(),
+ ai != null ? ai.category() : null,
+ ai != null ? ai.confidence() : 1.0,
new java.util.ArrayList<>(allTags),
- ai.assumptions(),
+ ai != null ? ai.assumptions() : List.of("AI parsing skipped or failed; using deterministic parse"),
finalDueDate,
finalDueTime,
finalPriority,
@@ -161,4 +158,50 @@ private QuickAddParseResult merge(QuickAddParseResult ai, TaskParserService.Pars
aiUsed
);
}
+
+ private String resolveDueDate(QuickAddParseResult ai, TaskParserService.ParsedTask deterministic) {
+ if (deterministic.dueDate() != null) {
+ return deterministic.dueDate().toString();
+ }
+ return (ai != null) ? ai.dueDate() : null;
+ }
+
+ private String resolveDueTime(QuickAddParseResult ai, TaskParserService.ParsedTask deterministic) {
+ if (deterministic.dueTime() != null) {
+ return deterministic.dueTime().toString();
+ }
+ return (ai != null) ? ai.dueTime() : null;
+ }
+
+ private String resolvePriority(QuickAddParseResult ai, TaskParserService.ParsedTask deterministic) {
+ if (deterministic.priority() != null) {
+ return deterministic.priority().name();
+ }
+ return (ai != null) ? ai.priority() : null;
+ }
+
+ private String resolveStatus(QuickAddParseResult ai, TaskParserService.ParsedTask deterministic) {
+ if (deterministic.status() != null) {
+ return deterministic.status().name();
+ }
+ return (ai != null) ? ai.status() : null;
+ }
+
+ private String resolveTitle(QuickAddParseResult ai, TaskParserService.ParsedTask deterministic) {
+ if (ai != null && ai.confidence() != null && ai.confidence() >= 0.8 && ai.title() != null && !ai.title().isEmpty()) {
+ return ai.title();
+ }
+ if (deterministic.title() != null && !deterministic.title().isEmpty()) {
+ return deterministic.title();
+ }
+ return (ai != null) ? ai.title() : null;
+ }
+
+ private String resolveDescription(QuickAddParseResult ai, TaskParserService.ParsedTask deterministic) {
+ if (ai != null && ai.confidence() != null && ai.confidence() >= 0.8 && ai.title() != null && !ai.title().isEmpty()) {
+ return ai.description();
+ }
+ return (ai != null && ai.description() != null && !ai.description().isEmpty()) ? ai.description() : deterministic.description();
+ }
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/ReleaseIntelligenceUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/ReleaseIntelligenceUseCase.java
new file mode 100644
index 000000000..b9e4b6202
--- /dev/null
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/ReleaseIntelligenceUseCase.java
@@ -0,0 +1,74 @@
+package ch.goodone.backend.ai.application;
+
+import ch.goodone.backend.ai.dto.EngineeringArtifact;
+import ch.goodone.backend.ai.dto.ReleaseReadinessResponse;
+import ch.goodone.backend.model.taxonomy.OutlookStatus;
+import ch.goodone.backend.ai.knowledge.EngineeringContextService;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Service;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Use case for summarizing release readiness and highlighting risks.
+ */
+@Service
+@RequiredArgsConstructor
+@Slf4j
+public class ReleaseIntelligenceUseCase {
+
+ private final EngineeringContextService contextService;
+
+ public ReleaseReadinessResponse execute() {
+ List allTasks = contextService.getAll().stream()
+ .filter(a -> a.getType() == EngineeringArtifact.Type.TASK)
+ .toList();
+
+ List blockers = new ArrayList<>();
+ List risks = new ArrayList<>();
+ List missingDocs = new ArrayList<>();
+
+ for (EngineeringArtifact task : allTasks) {
+ String status = task.getStatus();
+ String priority = task.getPriority();
+
+ // P0/P1 tasks that are not DONE are blockers/risks
+ if (!"DONE".equalsIgnoreCase(status)) {
+ if ("P0".equalsIgnoreCase(priority)) {
+ blockers.add(String.format("Critical task %s (%s) is %s.", task.getId(), task.getTitle(), status));
+ } else if ("P1".equalsIgnoreCase(priority)) {
+ risks.add(String.format("Priority task %s (%s) is %s.", task.getId(), task.getTitle(), status));
+ }
+ }
+ }
+
+ // Basic check for ADR coverage of new features (example heuristic)
+ long adrCount = contextService.getAll().stream()
+ .filter(a -> a.getType() == EngineeringArtifact.Type.ADR)
+ .count();
+ if (adrCount < 50) { // Arbitrary low threshold for demo purposes
+ risks.add("Architecture documentation (ADRs) seems incomplete for a stable release.");
+ }
+
+ OutlookStatus finalStatus = OutlookStatus.READY;
+ if (!blockers.isEmpty()) {
+ finalStatus = OutlookStatus.BLOCKED;
+ } else if (!risks.isEmpty()) {
+ finalStatus = OutlookStatus.CAUTION;
+ }
+
+ String summary = String.format("Release evaluation: %s. Identified %d blockers and %d risks from the current engineering context.",
+ finalStatus, blockers.size(), risks.size());
+
+ return ReleaseReadinessResponse.builder()
+ .status(finalStatus)
+ .majorBlockers(blockers)
+ .risks(risks)
+ .missingDocumentation(missingDocs)
+ .executiveSummary(summary)
+ .build();
+ }
+}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/RetrospectiveAiService.java b/backend/src/main/java/ch/goodone/backend/ai/application/RetrospectiveAiService.java
index a8b6ed8b7..462d49982 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/application/RetrospectiveAiService.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/RetrospectiveAiService.java
@@ -1,14 +1,16 @@
package ch.goodone.backend.ai.application;
-import ch.goodone.backend.ai.AiProviderService;
import ch.goodone.backend.ai.dto.RetrospectiveRequest;
import ch.goodone.backend.ai.dto.RetrospectiveResponse;
-import ch.goodone.backend.ai.prompt.StructuredOutputService;
+import ch.goodone.backend.ai.exception.AiException;
+import ch.goodone.backend.ai.infrastructure.StructuredAiClient;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;
+import org.springframework.util.StreamUtils;
+import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
@@ -16,13 +18,19 @@
@RequiredArgsConstructor
public class RetrospectiveAiService {
- private final AiProviderService aiProviderService;
- private final StructuredOutputService structuredOutputService;
+ private final StructuredAiClient structuredAiClient;
@Value("classpath:prompts/retrospective/v1/generate.st")
private Resource generatePromptResource;
public RetrospectiveResponse generate(RetrospectiveRequest request, String context) {
+ String promptTemplate = "";
+ try {
+ promptTemplate = StreamUtils.copyToString(generatePromptResource.getInputStream(), StandardCharsets.UTF_8);
+ } catch (Exception e) {
+ throw new AiException("Failed to load Retrospective prompt template", e);
+ }
+
Map templateModel = new HashMap<>();
templateModel.put("fromDate", request.getFromDate());
templateModel.put("toDate", request.getToDate());
@@ -31,11 +39,23 @@ public RetrospectiveResponse generate(RetrospectiveRequest request, String conte
templateModel.put("mode", request.getMode() != null ? request.getMode() : "Standard");
templateModel.put("context", context);
- return structuredOutputService.call(
- aiProviderService.getRetrospectiveChatModel(),
- generatePromptResource,
- templateModel,
+ String systemPrompt = renderTemplate(promptTemplate, templateModel);
+
+ return structuredAiClient.call(
+ "retrospective",
+ systemPrompt,
+ "Generate retrospective report",
+ "retrospectiveCluster",
RetrospectiveResponse.class
);
}
+
+ private String renderTemplate(String template, Map model) {
+ String rendered = template;
+ for (Map.Entry entry : model.entrySet()) {
+ rendered = rendered.replace("{" + entry.getKey() + "}", entry.getValue() != null ? entry.getValue().toString() : "");
+ }
+ return rendered;
+ }
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/RetrospectiveUseCase.java b/backend/src/main/java/ch/goodone/backend/ai/application/RetrospectiveUseCase.java
index 548bef92e..830d87955 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/application/RetrospectiveUseCase.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/RetrospectiveUseCase.java
@@ -11,3 +11,4 @@ public interface RetrospectiveUseCase {
List getAvailableTasksets();
}
+
diff --git a/backend/src/main/java/ch/goodone/backend/ai/application/RetrospectiveUseCaseImpl.java b/backend/src/main/java/ch/goodone/backend/ai/application/RetrospectiveUseCaseImpl.java
index 11b7b79c3..565b27587 100644
--- a/backend/src/main/java/ch/goodone/backend/ai/application/RetrospectiveUseCaseImpl.java
+++ b/backend/src/main/java/ch/goodone/backend/ai/application/RetrospectiveUseCaseImpl.java
@@ -1,10 +1,15 @@
package ch.goodone.backend.ai.application;
import ch.goodone.backend.ai.AiProperties;
+import ch.goodone.backend.ai.cache.AiResponseCacheService;
+import ch.goodone.backend.ai.prompt.PromptAssemblyService;
import ch.goodone.backend.ai.dto.RetrospectiveRequest;
import ch.goodone.backend.ai.dto.RetrospectiveResponse;
import ch.goodone.backend.ai.dto.TasksetInfo;
+import ch.goodone.backend.ai.observability.AiCallParams;
import ch.goodone.backend.ai.observability.AiObservabilityService;
+import ch.goodone.backend.ai.AiRoutingService;
+import ch.goodone.backend.ai.application.TaskGroupResolutionService;
import ch.goodone.backend.model.DocChunk;
import ch.goodone.backend.model.DocSource;
import ch.goodone.backend.repository.DocChunkRepository;
@@ -21,12 +26,23 @@
@Slf4j
public class RetrospectiveUseCaseImpl implements RetrospectiveUseCase {
+ private static final String OP_RETROSPECTIVE_GENERATE = "retrospective-generate";
+
private final DocSourceRepository sourceRepository;
private final DocChunkRepository chunkRepository;
private final RetrospectiveAiService aiService;
private final AiObservabilityService observabilityService;
private final AiProperties aiProperties;
private final TasksetService tasksetService;
+ private final PromptAssemblyService promptAssemblyService;
+ private final TaskGroupResolutionService taskGroupResolutionService;
+ private final AiResponseCacheService aiCacheService;
+ private final AiRoutingService aiRoutingService;
+
+ private static final String PATH_TASKSET_PREFIX = "/taskset-";
+ private static final String PATH_TASKSET_PREFIX_DASH = "taskset-";
+ private static final String PATH_SPRINT_PREFIX = "/sprint-";
+ private static final String PATH_SPRINT_PREFIX_DASH = "sprint-";
@Override
public List getAvailableTasksets() {
@@ -37,63 +53,115 @@ public List getAvailableTasksets() {
public RetrospectiveResponse generateRetrospective(RetrospectiveRequest request) {
log.info("Generating retrospective for request: {}", request);
- // 1. Find relevant sources (tasks)
- List