From 6e572328f557bcf584077f00696e294fb67f4d0d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Bre=C3=B1a=20Moral?= Date: Thu, 7 May 2026 14:46:01 +0200 Subject: [PATCH 1/3] feat(Skills): Adding Kafka & MongoDB support for Spring Boot, Quarkus, Micronaut & Associated Agents --- .cursor/agents/robot-coordinator.md | 12 +- .cursor/agents/robot-micronaut-coder.md | 4 + .cursor/agents/robot-quarkus-coder.md | 8 +- .cursor/agents/robot-spring-boot-coder.md | 8 +- .../resources/skill-indexes/314-skill.xml | 41 ++ .../resources/skill-indexes/315-skill.xml | 41 ++ .../resources/skill-indexes/414-skill.xml | 41 ++ .../resources/skill-indexes/415-skill.xml | 41 ++ .../resources/skill-indexes/514-skill.xml | 41 ++ .../resources/skill-indexes/515-skill.xml | 41 ++ .../314-frameworks-spring-kafka.xml | 195 ++++++++ .../315-frameworks-spring-mongodb.xml | 198 ++++++++ .../414-frameworks-quarkus-kafka.xml | 164 +++++++ .../415-frameworks-quarkus-mongodb.xml | 192 ++++++++ .../514-frameworks-micronaut-kafka.xml | 171 +++++++ .../515-frameworks-micronaut-mongodb.xml | 197 ++++++++ .../assets/agents/robot-coordinator.md | 12 +- .../assets/agents/robot-micronaut-coder.md | 4 + .../assets/agents/robot-quarkus-coder.md | 8 +- .../assets/agents/robot-spring-boot-coder.md | 8 +- .../src/main/resources/skills.xml | 30 ++ .../references/003-agents-installation.md | 32 +- skills/314-frameworks-spring-kafka/SKILL.md | 48 ++ .../references/314-frameworks-spring-kafka.md | 396 ++++++++++++++++ skills/315-frameworks-spring-mongodb/SKILL.md | 48 ++ .../315-frameworks-spring-mongodb.md | 439 ++++++++++++++++++ skills/414-frameworks-quarkus-kafka/SKILL.md | 48 ++ .../414-frameworks-quarkus-kafka.md | 320 +++++++++++++ .../415-frameworks-quarkus-mongodb/SKILL.md | 48 ++ .../415-frameworks-quarkus-mongodb.md | 350 ++++++++++++++ .../514-frameworks-micronaut-kafka/SKILL.md | 48 ++ .../514-frameworks-micronaut-kafka.md | 362 +++++++++++++++ .../515-frameworks-micronaut-mongodb/SKILL.md | 48 ++ .../515-frameworks-micronaut-mongodb.md | 412 ++++++++++++++++ 34 files changed, 4026 insertions(+), 30 deletions(-) create mode 100644 skills-generator/src/main/resources/skill-indexes/314-skill.xml create mode 100644 skills-generator/src/main/resources/skill-indexes/315-skill.xml create mode 100644 skills-generator/src/main/resources/skill-indexes/414-skill.xml create mode 100644 skills-generator/src/main/resources/skill-indexes/415-skill.xml create mode 100644 skills-generator/src/main/resources/skill-indexes/514-skill.xml create mode 100644 skills-generator/src/main/resources/skill-indexes/515-skill.xml create mode 100644 skills-generator/src/main/resources/skill-references/314-frameworks-spring-kafka.xml create mode 100644 skills-generator/src/main/resources/skill-references/315-frameworks-spring-mongodb.xml create mode 100644 skills-generator/src/main/resources/skill-references/414-frameworks-quarkus-kafka.xml create mode 100644 skills-generator/src/main/resources/skill-references/415-frameworks-quarkus-mongodb.xml create mode 100644 skills-generator/src/main/resources/skill-references/514-frameworks-micronaut-kafka.xml create mode 100644 skills-generator/src/main/resources/skill-references/515-frameworks-micronaut-mongodb.xml create mode 100644 skills/314-frameworks-spring-kafka/SKILL.md create mode 100644 skills/314-frameworks-spring-kafka/references/314-frameworks-spring-kafka.md create mode 100644 skills/315-frameworks-spring-mongodb/SKILL.md create mode 100644 skills/315-frameworks-spring-mongodb/references/315-frameworks-spring-mongodb.md create mode 100644 skills/414-frameworks-quarkus-kafka/SKILL.md create mode 100644 skills/414-frameworks-quarkus-kafka/references/414-frameworks-quarkus-kafka.md create mode 100644 skills/415-frameworks-quarkus-mongodb/SKILL.md create mode 100644 skills/415-frameworks-quarkus-mongodb/references/415-frameworks-quarkus-mongodb.md create mode 100644 skills/514-frameworks-micronaut-kafka/SKILL.md create mode 100644 skills/514-frameworks-micronaut-kafka/references/514-frameworks-micronaut-kafka.md create mode 100644 skills/515-frameworks-micronaut-mongodb/SKILL.md create mode 100644 skills/515-frameworks-micronaut-mongodb/references/515-frameworks-micronaut-mongodb.md diff --git a/.cursor/agents/robot-coordinator.md b/.cursor/agents/robot-coordinator.md index 8226e585..09a0f2ea 100644 --- a/.cursor/agents/robot-coordinator.md +++ b/.cursor/agents/robot-coordinator.md @@ -15,9 +15,9 @@ You are a **Coordinator** for Java Enterprise Development. Your primary responsi ### Collaboration partners - **[@robot-java-coder](robot-java-coder.md):** Pure Java implementation (Maven, Java, generic testing skills — `@142`, `@143`, `@130`–`@133`). Use when **Framework identification** yields plain Java, CLI-only, or a stack without a dedicated framework agent here. -- **[@robot-spring-boot-coder](robot-spring-boot-coder.md):** Spring Boot implementation (controllers, REST, Spring Test slices, Spring Data/JDBC, Flyway migrations, etc. — `@301`, `@302`, `@311`–`@313`, `@321`–`@323`). Use when **Framework identification** yields **Spring Boot** as the application framework. -- **[@robot-quarkus-coder](robot-quarkus-coder.md):** Quarkus implementation (Jakarta REST resources, CDI, Panache/JDBC, Flyway migrations, Quarkus tests — `@401`, `@402`, `@411`–`@413`, `@421`–`@423`). Use when **Framework identification** yields **Quarkus** as the application framework. -- **[@robot-micronaut-coder](robot-micronaut-coder.md):** Micronaut implementation (`@Controller`, programmatic JDBC, Micronaut Data, Flyway migrations, `Micronaut.run`, CDI-style beans, Micronaut tests — `@501`, `@502`, `@511`–`@513`, `@521`–`@523`). Use when **Framework identification** yields **Micronaut** as the application framework. +- **[@robot-spring-boot-coder](robot-spring-boot-coder.md):** Spring Boot implementation (controllers, REST, Spring Test slices, Spring Data/JDBC, Flyway migrations, Kafka messaging, MongoDB — `@301`, `@302`, `@311`–`@315`, `@321`–`@323`). Use when **Framework identification** yields **Spring Boot** as the application framework. +- **[@robot-quarkus-coder](robot-quarkus-coder.md):** Quarkus implementation (Jakarta REST resources, CDI, Panache/JDBC, Flyway migrations, Kafka messaging, MongoDB, Quarkus tests — `@401`, `@402`, `@411`–`@415`, `@421`–`@423`). Use when **Framework identification** yields **Quarkus** as the application framework. +- **[@robot-micronaut-coder](robot-micronaut-coder.md):** Micronaut implementation (`@Controller`, programmatic JDBC, Micronaut Data, Flyway migrations, Kafka messaging, MongoDB, `Micronaut.run`, CDI-style beans, Micronaut tests — `@501`, `@502`, `@511`–`@515`, `@521`–`@523`). Use when **Framework identification** yields **Micronaut** as the application framework. - **Parallel column drives grouping:** The plan's task list table includes a **Parallel** column (or **Agent** if the plan uses that name). Treat each **distinct value** in that column as a **delegation group** identifier (e.g. `A1`, `A2`, `A3-timeout`, `A3-retry`, `A4`). - **One logical developer per group:** For each distinct **Parallel** value, assign a **separate** instance of the **same** chosen implementation agent (`robot-java-coder`, `robot-spring-boot-coder`, `robot-quarkus-coder`, or `robot-micronaut-coder`) whose scope is **only** the rows for that value. Label every handoff, e.g. `Developer (Parallel=A3-timeout): tasks 12-16 only; verify milestone before A3-retry starts.` @@ -36,9 +36,9 @@ When you analyze the task, **determine the target framework** from requirements | Finding | Delegate to | |---------|-------------| -| Spring Boot is the chosen or evident stack (starters, Boot parent/BOM, Boot-specific tests or tasks) | [@robot-spring-boot-coder](robot-spring-boot-coder.md) | -| Quarkus is the chosen or evident stack (quarkus-bom, quarkus-maven-plugin, `@QuarkusTest`, Dev Services, or Quarkus-specific tasks) | [@robot-quarkus-coder](robot-quarkus-coder.md) | -| Micronaut is the chosen or evident stack (micronaut-parent / micronaut-maven-plugin, `io.micronaut` BOM, `@MicronautTest`, `Micronaut.run`, or Micronaut-specific tasks) | [@robot-micronaut-coder](robot-micronaut-coder.md) | +| Spring Boot is the chosen or evident stack (starters, Boot parent/BOM, Boot-specific tests, Kafka with `spring-kafka`, or MongoDB with `spring-data-mongodb`) | [@robot-spring-boot-coder](robot-spring-boot-coder.md) | +| Quarkus is the chosen or evident stack (quarkus-bom, quarkus-maven-plugin, `@QuarkusTest`, Dev Services, SmallRye Reactive Messaging, or Quarkus MongoDB Panache) | [@robot-quarkus-coder](robot-quarkus-coder.md) | +| Micronaut is the chosen or evident stack (micronaut-parent / micronaut-maven-plugin, `io.micronaut` BOM, `@MicronautTest`, `Micronaut.run`, `micronaut-kafka`, or `micronaut-data-mongodb`) | [@robot-micronaut-coder](robot-micronaut-coder.md) | | No Spring Boot, Quarkus, or Micronaut; plain Java, other framework not covered by a dedicated agent here, or requirements are framework-neutral | [@robot-java-coder](robot-java-coder.md) | **If mixed or ambiguous:** Prefer **robot-spring-boot-coder** when **any** authoritative requirement document commits to Spring Boot; prefer **robot-quarkus-coder** when it commits to Quarkus; prefer **robot-micronaut-coder** when it commits to Micronaut; otherwise prefer **robot-java-coder** and state the ambiguity in the handoff so the implementer can align with `pom.xml` / ADRs. diff --git a/.cursor/agents/robot-micronaut-coder.md b/.cursor/agents/robot-micronaut-coder.md index eeed8001..0bdb6d51 100644 --- a/.cursor/agents/robot-micronaut-coder.md +++ b/.cursor/agents/robot-micronaut-coder.md @@ -11,6 +11,8 @@ You are an **Implementation Specialist** for Micronaut projects. You focus on wr - Implement `@Controller` HTTP endpoints, `@Singleton` application services, and `@Factory` beans following Micronaut conventions. - Configure Micronaut `application.yml` / `application.properties`, environments, and `@Requires` / `@ConfigurationProperties`. - Apply **Micronaut Data** (`@MappedEntity`, repositories, `@Query`, transactions) for relational persistence, or **raw JDBC** (`DataSource`, `PreparedStatement`) when `@511-frameworks-micronaut-jdbc` fits better. +- Integrate Apache Kafka producers and consumers using `@KafkaClient`, `@KafkaListener`, `@KafkaKey`, and `KafkaListenerExceptionHandler`. +- Integrate MongoDB using Micronaut Data MongoDB (`@MappedEntity`, `@MongoRepository`, `@MongoFindQuery`). - Write Micronaut tests (`@MicronautTest`, `@MockBean`, `HttpClient`, `TestPropertyProvider` with Testcontainers). - Ensure secure coding practices for web APIs. @@ -30,6 +32,8 @@ Apply guidance from these Skills when relevant: - `@511-frameworks-micronaut-jdbc`: programmatic JDBC (DataSource, SQL, transactions) - `@512-frameworks-micronaut-data`: Micronaut Data (repositories, entities, generated SQL) - `@513-frameworks-micronaut-db-migrations-flyway`: Micronaut DB migrations (Flyway) +- `@514-frameworks-micronaut-kafka`: Kafka messaging (@KafkaClient, @KafkaListener, retries, dead-letter routing) +- `@515-frameworks-micronaut-mongodb`: MongoDB (@MongoRepository, @MappedEntity, error handling) - `@142-java-functional-programming`: Functional programming patterns - `@143-java-functional-exception-handling`: Exception handling patterns - `@130-java-testing-strategies`: Testing strategies diff --git a/.cursor/agents/robot-quarkus-coder.md b/.cursor/agents/robot-quarkus-coder.md index 9b8ca2ec..00c74b2c 100644 --- a/.cursor/agents/robot-quarkus-coder.md +++ b/.cursor/agents/robot-quarkus-coder.md @@ -10,8 +10,10 @@ You are an **Implementation Specialist** for Quarkus projects. You focus on writ - Implement Jakarta REST resources, CDI services, and repositories following Quarkus conventions. - Configure Quarkus extensions, profiles (`%dev`, `%test`, `%prod`), and `application.properties`. -- Apply Quarkus JDBC or Hibernate ORM Panache for persistence. -- Write Quarkus tests (`@QuarkusTest`, `@QuarkusIntegrationTest`, REST Assured). +- Apply Quarkus JDBC or Hibernate ORM Panache for relational persistence. +- Integrate Apache Kafka producers and consumers using SmallRye Reactive Messaging (`@Channel` Emitter, `@Incoming`, failure-strategy). +- Integrate MongoDB using Quarkus MongoDB Panache (`PanacheMongoEntity`, `PanacheMongoRepository`). +- Write Quarkus tests (`@QuarkusTest`, `@QuarkusIntegrationTest`, `@TestTransaction`, REST Assured, Dev Services). - Ensure secure coding practices for web APIs. ### Coding Standards @@ -29,6 +31,8 @@ Apply guidance from these Skills when relevant: - `@411-frameworks-quarkus-jdbc`: Quarkus JDBC - `@412-frameworks-quarkus-panache`: Quarkus Panache - `@413-frameworks-quarkus-db-migrations-flyway`: Quarkus DB migrations (Flyway) +- `@414-frameworks-quarkus-kafka`: Kafka messaging (SmallRye Reactive Messaging, Emitter, @Incoming, failure strategies) +- `@415-frameworks-quarkus-mongodb`: MongoDB (Panache Mongo entities, repositories, error handling) - `@142-java-functional-programming`: Functional programming patterns - `@143-java-functional-exception-handling`: Exception handling patterns - `@130-java-testing-strategies`: Testing Strategies diff --git a/.cursor/agents/robot-spring-boot-coder.md b/.cursor/agents/robot-spring-boot-coder.md index cfbb2631..0126e621 100644 --- a/.cursor/agents/robot-spring-boot-coder.md +++ b/.cursor/agents/robot-spring-boot-coder.md @@ -10,8 +10,10 @@ You are an **Implementation Specialist** for Spring Boot projects. You focus on - Implement REST controllers, services, and repositories following Spring Boot conventions. - Configure Spring Boot auto-configuration, profiles, and `application.yml`. -- Apply Spring Data JDBC for persistence. -- Write Spring Test slices (`@WebMvcTest`, `@DataJdbcTest`, `@SpringBootTest`). +- Apply Spring Data JDBC for relational persistence. +- Integrate Apache Kafka producers and listeners using `spring-kafka` (typed templates, retries, dead-letter topics). +- Integrate MongoDB using Spring Data MongoDB (documents, repositories, error handling). +- Write Spring Test slices (`@WebMvcTest`, `@DataJdbcTest`, `@DataMongoTest`, `@SpringBootTest`, `@EmbeddedKafka`). - Ensure secure coding practices for web APIs. ### Coding Standards @@ -29,6 +31,8 @@ Apply guidance from these Skills when relevant: - `@311-frameworks-spring-jdbc`: Spring JDBC - `@312-frameworks-spring-data-jdbc`: Spring Data JDBC - `@313-frameworks-spring-db-migrations-flyway`: Flyway database migrations +- `@314-frameworks-spring-kafka`: Kafka messaging (producers, listeners, retries, dead-letter topics) +- `@315-frameworks-spring-mongodb`: MongoDB (document design, repositories, error handling) - `@142-java-functional-programming`: Functional programming patterns - `@143-java-functional-exception-handling`: Exception handling patterns - `@130-java-testing-strategies`: Testing strategies diff --git a/skills-generator/src/main/resources/skill-indexes/314-skill.xml b/skills-generator/src/main/resources/skill-indexes/314-skill.xml new file mode 100644 index 00000000..946211ef --- /dev/null +++ b/skills-generator/src/main/resources/skill-indexes/314-skill.xml @@ -0,0 +1,41 @@ + + + + Juan Antonio Breña Moral + 0.15.0-SNAPSHOT + Apache-2.0 + Use when you need to design or implement Kafka messaging in Spring Boot — including topic design, producer/consumer implementation with Spring for Apache Kafka, retries and dead-letter topics, idempotency, and error handling. This should trigger for requests such as Add Kafka in Spring Boot; Review Spring Kafka consumers; Improve retries and DLT in Spring Kafka. + + + Spring Boot — Kafka messaging + + + + Compile before messaging refactors; verify after changes. + + **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change + **SAFETY**: If compilation fails, stop immediately + **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements + **BEFORE APPLYING**: Read the reference for detailed rules and examples + + + + + + Add Kafka in Spring Boot + Review Spring Kafka consumers/producers + Improve retries, dead-letter topics, or idempotency in Spring Kafka + + + + + Read reference and assess project contextRead `references/314-frameworks-spring-kafka.md` and inspect current messaging setup before proposing changes. + Gather scope and decide target improvementsIdentify reliability and throughput goals and define the minimum safe set of changes. + Apply framework-aligned changesImplement/refactor Spring Kafka configuration, producer/consumer logic, and failure handling. + Run verification and report resultsExecute build/tests and summarize what changed, what was verified, and follow-up actions. + + diff --git a/skills-generator/src/main/resources/skill-indexes/315-skill.xml b/skills-generator/src/main/resources/skill-indexes/315-skill.xml new file mode 100644 index 00000000..f0e6bfcc --- /dev/null +++ b/skills-generator/src/main/resources/skill-indexes/315-skill.xml @@ -0,0 +1,41 @@ + + + + Juan Antonio Breña Moral + 0.15.0-SNAPSHOT + Apache-2.0 + Use when you need to design or implement MongoDB data access in Spring Boot — including document modeling, Spring Data Mongo repositories/templates, indexing, optimistic concurrency, and error handling. This should trigger for requests such as Add MongoDB in Spring Boot; Review Spring Data Mongo design; Improve error handling for Mongo writes. + + + Spring Boot — MongoDB + + + + Compile before MongoDB refactors; verify after changes. + + **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change + **SAFETY**: If compilation fails, stop immediately + **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements + **BEFORE APPLYING**: Read the reference for detailed rules and examples + + + + + + Add MongoDB in Spring Boot + Review Spring Data Mongo repositories/documents + Improve duplicate key handling, retries, or optimistic locking in Mongo flows + + + + + Read reference and assess project contextRead `references/315-frameworks-spring-mongodb.md` and inspect persistence setup before proposing changes. + Gather scope and decide target improvementsIdentify data model, consistency, and query requirements to define safe improvements. + Apply framework-aligned changesImplement/refactor mappings, repositories, indexes, and failure handling policies. + Run verification and report resultsExecute build/tests and summarize what changed, what was verified, and follow-up actions. + + diff --git a/skills-generator/src/main/resources/skill-indexes/414-skill.xml b/skills-generator/src/main/resources/skill-indexes/414-skill.xml new file mode 100644 index 00000000..64600576 --- /dev/null +++ b/skills-generator/src/main/resources/skill-indexes/414-skill.xml @@ -0,0 +1,41 @@ + + + + Juan Antonio Breña Moral + 0.15.0-SNAPSHOT + Apache-2.0 + Use when you need Kafka messaging in Quarkus with SmallRye Reactive Messaging — including channel/topic design, serialization, ack/failure strategies, retries/DLQ, and error handling. This should trigger for requests such as Add Kafka in Quarkus; Review Reactive Messaging consumers; Improve failure handling for Quarkus Kafka. + + + Quarkus — Kafka messaging + + + + Compile before messaging refactors; verify after changes. + + **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change + **SAFETY**: If compilation fails, stop immediately + **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements + **BEFORE APPLYING**: Read the reference for detailed rules and examples + + + + + + Add Kafka in Quarkus + Review Quarkus Reactive Messaging consumers/producers + Improve retries, dead-letter handling, or idempotency in Quarkus Kafka + + + + + Read reference and assess project contextRead `references/414-frameworks-quarkus-kafka.md` and inspect current messaging setup before proposing changes. + Gather scope and decide target improvementsIdentify delivery semantics and resilience goals to define safe improvements. + Apply framework-aligned changesImplement/refactor channels, serializers, and failure strategies in Reactive Messaging. + Run verification and report resultsExecute build/tests and summarize what changed, what was verified, and follow-up actions. + + diff --git a/skills-generator/src/main/resources/skill-indexes/415-skill.xml b/skills-generator/src/main/resources/skill-indexes/415-skill.xml new file mode 100644 index 00000000..4d9b7f87 --- /dev/null +++ b/skills-generator/src/main/resources/skill-indexes/415-skill.xml @@ -0,0 +1,41 @@ + + + + Juan Antonio Breña Moral + 0.15.0-SNAPSHOT + Apache-2.0 + Use when you need MongoDB persistence in Quarkus — including Panache Mongo entities/repositories, document design, indexes, transactions where applicable, and error handling. This should trigger for requests such as Add MongoDB in Quarkus; Review Quarkus Mongo Panache design; Improve Mongo error handling in Quarkus services. + + + Quarkus — MongoDB + + + + Compile before MongoDB refactors; verify after changes. + + **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change + **SAFETY**: If compilation fails, stop immediately + **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements + **BEFORE APPLYING**: Read the reference for detailed rules and examples + + + + + + Add MongoDB in Quarkus + Review Quarkus Mongo Panache entities/repositories + Improve duplicate key handling, retry policy, or optimistic locking in Quarkus Mongo + + + + + Read reference and assess project contextRead `references/415-frameworks-quarkus-mongodb.md` and inspect persistence setup before proposing changes. + Gather scope and decide target improvementsIdentify model/query consistency needs and define safe improvements. + Apply framework-aligned changesImplement/refactor Panache Mongo mappings, repository access, and failure handling. + Run verification and report resultsExecute build/tests and summarize what changed, what was verified, and follow-up actions. + + diff --git a/skills-generator/src/main/resources/skill-indexes/514-skill.xml b/skills-generator/src/main/resources/skill-indexes/514-skill.xml new file mode 100644 index 00000000..b6ff6954 --- /dev/null +++ b/skills-generator/src/main/resources/skill-indexes/514-skill.xml @@ -0,0 +1,41 @@ + + + + Juan Antonio Breña Moral + 0.15.0-SNAPSHOT + Apache-2.0 + Use when you need Kafka messaging in Micronaut — including @KafkaClient and @KafkaListener design, topic/partition strategy, serialization, retries and dead-letter processing, and error handling. This should trigger for requests such as Add Kafka in Micronaut; Review Micronaut Kafka listeners; Improve retry and failure handling for Micronaut Kafka. + + + Micronaut — Kafka messaging + + + + Compile before messaging refactors; verify after changes. + + **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change + **SAFETY**: If compilation fails, stop immediately + **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements + **BEFORE APPLYING**: Read the reference for detailed rules and examples + + + + + + Add Kafka in Micronaut + Review Micronaut Kafka consumers/producers + Improve retries, dead-letter handling, or idempotency in Micronaut Kafka + + + + + Read reference and assess project contextRead `references/514-frameworks-micronaut-kafka.md` and inspect current messaging setup before proposing changes. + Gather scope and decide target improvementsIdentify delivery guarantees and resilience requirements to define safe improvements. + Apply framework-aligned changesImplement/refactor clients, listeners, and failure strategies in Micronaut Kafka. + Run verification and report resultsExecute build/tests and summarize what changed, what was verified, and follow-up actions. + + diff --git a/skills-generator/src/main/resources/skill-indexes/515-skill.xml b/skills-generator/src/main/resources/skill-indexes/515-skill.xml new file mode 100644 index 00000000..74eeb420 --- /dev/null +++ b/skills-generator/src/main/resources/skill-indexes/515-skill.xml @@ -0,0 +1,41 @@ + + + + Juan Antonio Breña Moral + 0.15.0-SNAPSHOT + Apache-2.0 + Use when you need MongoDB persistence in Micronaut — including @MongoRepository design, document modeling, indexes, query patterns, and error handling. This should trigger for requests such as Add MongoDB in Micronaut; Review Micronaut Data Mongo design; Improve error handling for Micronaut Mongo operations. + + + Micronaut — MongoDB + + + + Compile before MongoDB refactors; verify after changes. + + **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change + **SAFETY**: If compilation fails, stop immediately + **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements + **BEFORE APPLYING**: Read the reference for detailed rules and examples + + + + + + Add MongoDB in Micronaut + Review Micronaut Mongo entities/repositories + Improve duplicate key handling, retries, or optimistic locking in Micronaut Mongo + + + + + Read reference and assess project contextRead `references/515-frameworks-micronaut-mongodb.md` and inspect persistence setup before proposing changes. + Gather scope and decide target improvementsIdentify model and consistency requirements and define safe improvements. + Apply framework-aligned changesImplement/refactor documents, repositories, indexes, and error handling. + Run verification and report resultsExecute build/tests and summarize what changed, what was verified, and follow-up actions. + + diff --git a/skills-generator/src/main/resources/skill-references/314-frameworks-spring-kafka.xml b/skills-generator/src/main/resources/skill-references/314-frameworks-spring-kafka.xml new file mode 100644 index 00000000..ace3596a --- /dev/null +++ b/skills-generator/src/main/resources/skill-references/314-frameworks-spring-kafka.xml @@ -0,0 +1,195 @@ + + + + Juan Antonio Breña Moral + 0.15.0-SNAPSHOT + Apache-2.0 + Spring Boot Kafka Guidelines + Use when you need Kafka with Spring Boot (`spring-kafka`) and want examples for design, implementation, and error handling with retries, dead-letter topics, and idempotent consumers. + + You are a Senior software engineer with extensive experience in Spring Boot and Apache Kafka + + Design resilient Kafka-based flows in Spring Boot with clear topic contracts, robust producer/consumer implementations, and predictable failure handling. + + + Compile first; stop on failure; verify after changes. + + **MANDATORY**: Run `./mvnw compile` before applying changes + **SAFETY**: If compile fails, stop immediately + **VERIFY**: Run `./mvnw clean verify` after changes + + + + + + + Design + Topic versioning and key strategy + + + `. + ]]> + + + + + + + createEvent() { + return Map.of("type", "order", "payload", "..."); + } +}]]> + + + + + + Implementation + Producer + listener with manual service boundary + + + + + + + template; + + OrderEventPublisher(KafkaTemplate template) { + this.template = template; + } + + void publish(OrderCreatedEvent event) { + template.send("orders.events.v1", event.orderId(), event); + } +} + +@Component +class OrderEventListener { + @KafkaListener(topics = "orders.events.v1", groupId = "billing-service") + void onEvent(OrderCreatedEvent event) { + // Delegate to application service; keep listener thin + // billingService.processOrder(event); + } +}]]> + + + + + + + + + Error handling + Retries + dead-letter topic + idempotency + + + + + + + template) { + var recoverer = new DeadLetterPublishingRecoverer(template); + // Retry 3 times with 1 second backoff before sending to DLT + return new DefaultErrorHandler(recoverer, new FixedBackOff(1000L, 3)); + } +}]]> + + + + + + + + + + **ANALYZE** messaging code: producer/consumer implementations, topic naming, serialization, and error handling strategies + **CATEGORIZE** issues by impact (RELIABILITY, MAINTAINABILITY, PERFORMANCE) + **APPLY** Spring Kafka-aligned fixes: configure proper error handlers, use typed payloads, ensure idempotency + **IMPLEMENT** changes consistently across producer and consumer configurations + **EXPLAIN** trade-offs (e.g., at-least-once vs exactly-once delivery, retry backoff strategies) + **TEST** messaging behavior with `@EmbeddedKafka` or Testcontainers + **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes + + + + + **BLOCKING SAFETY CHECK**: Run `./mvnw compile` before ANY refactoring + **POISON PILLS**: Always configure a Dead Letter Topic (DLT) or explicit error handler to prevent blocking the partition + **IDEMPOTENCY**: Ensure consumer logic is idempotent to handle retries and rebalances safely + **TRANSACTIONS**: If using Kafka transactions, ensure `transactional.id` is configured and align with database transactions if needed + + + \ No newline at end of file diff --git a/skills-generator/src/main/resources/skill-references/315-frameworks-spring-mongodb.xml b/skills-generator/src/main/resources/skill-references/315-frameworks-spring-mongodb.xml new file mode 100644 index 00000000..b13d8817 --- /dev/null +++ b/skills-generator/src/main/resources/skill-references/315-frameworks-spring-mongodb.xml @@ -0,0 +1,198 @@ + + + + Juan Antonio Breña Moral + 0.15.0-SNAPSHOT + Apache-2.0 + Spring Boot MongoDB Guidelines + Use when you need MongoDB with Spring Data MongoDB and want examples for design, implementation, and error handling for robust document persistence. + + You are a Senior software engineer with extensive experience in Spring Boot and MongoDB + + Design clear document models, implement robust Spring Data Mongo repositories/services, and handle Mongo failures explicitly. + + + Compile first; stop on failure; verify after changes. + + **MANDATORY**: Run `./mvnw compile` before applying changes + **SAFETY**: If compile fails, stop immediately + **VERIFY**: Run `./mvnw clean verify` after changes + + + + + + + Design + Document boundaries and indexes + + + + + + + + + + + + + + + + Implementation + Repository + service composition + + + + + + + { + Optional findByOrderNumber(String orderNumber); +} + +@Service +class OrderService { + private final OrderRepository repository; + + OrderService(OrderRepository repository) { + this.repository = repository; + } + + OrderDocument create(OrderDocument doc) { + return repository.save(doc); + } +}]]> + + + + + + + + + Error handling + Duplicate key and optimistic locking + + + + + + + + + + + + + + + + + **ANALYZE** MongoDB code: document design, repository interfaces, indexing strategies, and error handling + **CATEGORIZE** issues by impact (CORRECTNESS, PERFORMANCE, DATA INTEGRITY) + **APPLY** Spring Data MongoDB-aligned fixes: use `@Document`, `@Id`, `@Version`, and proper index annotations + **IMPLEMENT** changes so schema and queries stay consistent + **EXPLAIN** trade-offs (e.g., embedding vs referencing documents, index overhead) + **TEST** repository behavior with `@DataMongoTest` and Testcontainers + **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes + + + + + **BLOCKING SAFETY CHECK**: Run `./mvnw compile` before ANY refactoring + **INDEXING**: Ensure indexes are created (either via Spring Data auto-index creation in dev, or explicitly via scripts in prod) + **CONCURRENCY**: Use `@Version` to prevent lost updates in concurrent environments + **TRANSACTIONS**: MongoDB supports multi-document transactions; ensure replica sets are used if `@Transactional` is applied + + + \ No newline at end of file diff --git a/skills-generator/src/main/resources/skill-references/414-frameworks-quarkus-kafka.xml b/skills-generator/src/main/resources/skill-references/414-frameworks-quarkus-kafka.xml new file mode 100644 index 00000000..fd3172b2 --- /dev/null +++ b/skills-generator/src/main/resources/skill-references/414-frameworks-quarkus-kafka.xml @@ -0,0 +1,164 @@ + + + + Juan Antonio Breña Moral + 0.15.0-SNAPSHOT + Apache-2.0 + Quarkus Kafka Guidelines + Use when you need Kafka in Quarkus with SmallRye Reactive Messaging and want examples for design, implementation, and error handling. + + You are a Senior software engineer with extensive experience in Quarkus and Kafka + + Build resilient Quarkus Kafka messaging with explicit contracts, clear channel wiring, and controlled failure behavior using SmallRye Reactive Messaging. + + + Compile first; stop on failure; verify after changes. + + **MANDATORY**: Run `./mvnw compile` before applying changes + **SAFETY**: If compile fails, stop immediately + **VERIFY**: Run `./mvnw clean verify` after changes + + + + + + + Design + Channel naming and event contract stability + + + + + + + + + + + + + + + + Implementation + Reactive Messaging producer and consumer + + + + + + + emitter; + + void publish(OrderCreatedEvent event) { + emitter.send(event); + } +} + +@ApplicationScoped +class OrderConsumer { + @Incoming("orders-in") + public Uni onMessage(OrderCreatedEvent event) { + // Delegate to app service, returning a Uni for reactive processing + return Uni.createFrom().voidItem(); + } +}]]> + + + + + + + + + Error handling + Nack strategy and dead-letter queue + + + + + + + + + + onMessage(Message message) { + try { + // process(message.getPayload()); + return message.ack(); + } catch (Exception e) { + return message.ack(); // Bad: acknowledges failed records, losing data + } + } +}]]> + + + + + + + **ANALYZE** messaging code: `@Incoming`/`@Outgoing` usage, channel configuration, and failure strategies + **CATEGORIZE** issues by impact (RELIABILITY, MAINTAINABILITY, PERFORMANCE) + **APPLY** Quarkus Kafka-aligned fixes: configure DLQs, use typed payloads, ensure non-blocking processing + **IMPLEMENT** changes consistently across `application.properties` and Java code + **EXPLAIN** trade-offs (e.g., DLQ vs ignore, throttled vs latest commit strategies) + **TEST** messaging behavior with `@QuarkusTest` and Dev Services for Kafka + **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes + + + + + **BLOCKING SAFETY CHECK**: Run `./mvnw compile` before ANY refactoring + **POISON PILLS**: Use `failure-strategy=dead-letter-queue` to prevent poison pills from blocking the consumer + **BLOCKING CODE**: Do not block the event loop in `@Incoming` methods; use `@Blocking` if synchronous processing is required + **ACKNOWLEDGEMENT**: Let the framework handle ACKs automatically when returning `Uni`/`CompletionStage`, or handle them explicitly but correctly on failure + + + \ No newline at end of file diff --git a/skills-generator/src/main/resources/skill-references/415-frameworks-quarkus-mongodb.xml b/skills-generator/src/main/resources/skill-references/415-frameworks-quarkus-mongodb.xml new file mode 100644 index 00000000..5fabdd4f --- /dev/null +++ b/skills-generator/src/main/resources/skill-references/415-frameworks-quarkus-mongodb.xml @@ -0,0 +1,192 @@ + + + + Juan Antonio Breña Moral + 0.15.0-SNAPSHOT + Apache-2.0 + Quarkus MongoDB Guidelines + Use when you need MongoDB in Quarkus with Mongo Panache and want examples for design, implementation, and error handling. + + You are a Senior software engineer with extensive experience in Quarkus and MongoDB + + Apply Quarkus MongoDB patterns with clean document modeling, maintainable repositories/services, and robust failure handling using Panache. + + + Compile first; stop on failure; verify after changes. + + **MANDATORY**: Run `./mvnw compile` before applying changes + **SAFETY**: If compile fails, stop immediately + **VERIFY**: Run `./mvnw clean verify` after changes + + + + + + + Design + Document schema and indexes + + + + + + + + + + + + + + + + Implementation + Panache repository with explicit query methods + + + + + + + { + public Optional findByOrderNumber(String orderNumber) { + return find("orderNumber", orderNumber).firstResultOptional(); + } +}]]> + + + + + + + + + Error handling + Duplicate keys and transient failures + + + + + + + + + + + + + + + + + **ANALYZE** MongoDB code: document design, repository interfaces, indexing strategies, and error handling + **CATEGORIZE** issues by impact (CORRECTNESS, PERFORMANCE, DATA INTEGRITY) + **APPLY** Quarkus Mongo Panache-aligned fixes: use `@MongoEntity`, `PanacheMongoRepository`, and proper exception handling + **IMPLEMENT** changes so schema and queries stay consistent + **EXPLAIN** trade-offs (e.g., active record vs repository pattern, embedding vs referencing) + **TEST** repository behavior with `@QuarkusTest` and Dev Services for MongoDB + **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes + + + + + **BLOCKING SAFETY CHECK**: Run `./mvnw compile` before ANY refactoring + **INDEXING**: Ensure indexes are created explicitly, as Quarkus does not auto-create them by default in production + **INJECTION**: Avoid building raw JSON queries using string concatenation; use Panache's parameterized `find()` methods + + + \ No newline at end of file diff --git a/skills-generator/src/main/resources/skill-references/514-frameworks-micronaut-kafka.xml b/skills-generator/src/main/resources/skill-references/514-frameworks-micronaut-kafka.xml new file mode 100644 index 00000000..3ea472f9 --- /dev/null +++ b/skills-generator/src/main/resources/skill-references/514-frameworks-micronaut-kafka.xml @@ -0,0 +1,171 @@ + + + + Juan Antonio Breña Moral + 0.15.0-SNAPSHOT + Apache-2.0 + Micronaut Kafka Guidelines + Use when you need Kafka in Micronaut and want examples for design, implementation, and error handling with @KafkaClient and @KafkaListener. + + You are a Senior software engineer with extensive experience in Micronaut and Kafka + + Design and implement reliable Micronaut Kafka flows with explicit contracts and resilient consumer error handling. + + + Compile first; stop on failure; verify after changes. + + **MANDATORY**: Run `./mvnw compile` before applying changes + **SAFETY**: If compile fails, stop immediately + **VERIFY**: Run `./mvnw clean verify` after changes + + + + + + + Design + Topic and consumer-group strategy + + + + + + + + + + + + + + + + Implementation + @KafkaClient and @KafkaListener + + + + + + + + + + + + + + + + Error handling + Retry with backoff and dead-letter strategy + + + + + + + + + + + + + + + + + **ANALYZE** messaging code: `@KafkaClient`/`@KafkaListener` usage, topic naming, serialization, and error handling strategies + **CATEGORIZE** issues by impact (RELIABILITY, MAINTAINABILITY, PERFORMANCE) + **APPLY** Micronaut Kafka-aligned fixes: configure proper error strategies, use typed payloads, ensure idempotency + **IMPLEMENT** changes consistently across `application.yml` and Java code + **EXPLAIN** trade-offs (e.g., retry vs DLQ, consumer group isolation) + **TEST** messaging behavior with `@MicronautTest` and Testcontainers + **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes + + + + + **BLOCKING SAFETY CHECK**: Run `./mvnw compile` before ANY refactoring + **POISON PILLS**: Use `RETRY_ON_ERROR` or explicit exception handlers to prevent poison pills from silently being acknowledged + **CONSUMER GROUPS**: Always specify a `groupId` in `@KafkaListener` to prevent random group generation and message loss on restart + + + \ No newline at end of file diff --git a/skills-generator/src/main/resources/skill-references/515-frameworks-micronaut-mongodb.xml b/skills-generator/src/main/resources/skill-references/515-frameworks-micronaut-mongodb.xml new file mode 100644 index 00000000..bdc2b6f1 --- /dev/null +++ b/skills-generator/src/main/resources/skill-references/515-frameworks-micronaut-mongodb.xml @@ -0,0 +1,197 @@ + + + + Juan Antonio Breña Moral + 0.15.0-SNAPSHOT + Apache-2.0 + Micronaut MongoDB Guidelines + Use when you need MongoDB in Micronaut Data and want examples for design, implementation, and error handling. + + You are a Senior software engineer with extensive experience in Micronaut and MongoDB + + Apply Micronaut MongoDB best practices with explicit document design, repository implementation, and safe handling of persistence failures. + + + Compile first; stop on failure; verify after changes. + + **MANDATORY**: Run `./mvnw compile` before applying changes + **SAFETY**: If compile fails, stop immediately + **VERIFY**: Run `./mvnw clean verify` after changes + + + + + + + Design + Document shape and unique keys + + + + + + + + + + + + + + + + Implementation + @MongoRepository with typed query methods + + + + + + + { + Optional findByOrderNumber(String orderNumber); +} + +@Singleton +class OrderService { + private final OrderRepository repository; + + OrderService(OrderRepository repository) { + this.repository = repository; + } + + OrderDocument create(OrderDocument doc) { + return repository.save(doc); + } +}]]> + + + + + + + + + Error handling + Duplicate key and transient timeout handling + + + + + + + + + + + + + + + + + **ANALYZE** MongoDB code: document design, repository interfaces, indexing strategies, and error handling + **CATEGORIZE** issues by impact (CORRECTNESS, PERFORMANCE, DATA INTEGRITY) + **APPLY** Micronaut Data MongoDB-aligned fixes: use `@MappedEntity`, `@MongoRepository`, and proper exception handling + **IMPLEMENT** changes so schema and queries stay consistent + **EXPLAIN** trade-offs (e.g., embedding vs referencing, index overhead) + **TEST** repository behavior with `@MicronautTest` and Testcontainers + **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes + + + + + **BLOCKING SAFETY CHECK**: Run `./mvnw compile` before ANY refactoring + **INDEXING**: Ensure indexes are created explicitly, as Micronaut Data may not auto-create them in production + **INJECTION**: Avoid building raw JSON queries using string concatenation; use derived queries or parameterized `@Query` + + + \ No newline at end of file diff --git a/skills-generator/src/main/resources/skill-references/assets/agents/robot-coordinator.md b/skills-generator/src/main/resources/skill-references/assets/agents/robot-coordinator.md index 8226e585..09a0f2ea 100644 --- a/skills-generator/src/main/resources/skill-references/assets/agents/robot-coordinator.md +++ b/skills-generator/src/main/resources/skill-references/assets/agents/robot-coordinator.md @@ -15,9 +15,9 @@ You are a **Coordinator** for Java Enterprise Development. Your primary responsi ### Collaboration partners - **[@robot-java-coder](robot-java-coder.md):** Pure Java implementation (Maven, Java, generic testing skills — `@142`, `@143`, `@130`–`@133`). Use when **Framework identification** yields plain Java, CLI-only, or a stack without a dedicated framework agent here. -- **[@robot-spring-boot-coder](robot-spring-boot-coder.md):** Spring Boot implementation (controllers, REST, Spring Test slices, Spring Data/JDBC, Flyway migrations, etc. — `@301`, `@302`, `@311`–`@313`, `@321`–`@323`). Use when **Framework identification** yields **Spring Boot** as the application framework. -- **[@robot-quarkus-coder](robot-quarkus-coder.md):** Quarkus implementation (Jakarta REST resources, CDI, Panache/JDBC, Flyway migrations, Quarkus tests — `@401`, `@402`, `@411`–`@413`, `@421`–`@423`). Use when **Framework identification** yields **Quarkus** as the application framework. -- **[@robot-micronaut-coder](robot-micronaut-coder.md):** Micronaut implementation (`@Controller`, programmatic JDBC, Micronaut Data, Flyway migrations, `Micronaut.run`, CDI-style beans, Micronaut tests — `@501`, `@502`, `@511`–`@513`, `@521`–`@523`). Use when **Framework identification** yields **Micronaut** as the application framework. +- **[@robot-spring-boot-coder](robot-spring-boot-coder.md):** Spring Boot implementation (controllers, REST, Spring Test slices, Spring Data/JDBC, Flyway migrations, Kafka messaging, MongoDB — `@301`, `@302`, `@311`–`@315`, `@321`–`@323`). Use when **Framework identification** yields **Spring Boot** as the application framework. +- **[@robot-quarkus-coder](robot-quarkus-coder.md):** Quarkus implementation (Jakarta REST resources, CDI, Panache/JDBC, Flyway migrations, Kafka messaging, MongoDB, Quarkus tests — `@401`, `@402`, `@411`–`@415`, `@421`–`@423`). Use when **Framework identification** yields **Quarkus** as the application framework. +- **[@robot-micronaut-coder](robot-micronaut-coder.md):** Micronaut implementation (`@Controller`, programmatic JDBC, Micronaut Data, Flyway migrations, Kafka messaging, MongoDB, `Micronaut.run`, CDI-style beans, Micronaut tests — `@501`, `@502`, `@511`–`@515`, `@521`–`@523`). Use when **Framework identification** yields **Micronaut** as the application framework. - **Parallel column drives grouping:** The plan's task list table includes a **Parallel** column (or **Agent** if the plan uses that name). Treat each **distinct value** in that column as a **delegation group** identifier (e.g. `A1`, `A2`, `A3-timeout`, `A3-retry`, `A4`). - **One logical developer per group:** For each distinct **Parallel** value, assign a **separate** instance of the **same** chosen implementation agent (`robot-java-coder`, `robot-spring-boot-coder`, `robot-quarkus-coder`, or `robot-micronaut-coder`) whose scope is **only** the rows for that value. Label every handoff, e.g. `Developer (Parallel=A3-timeout): tasks 12-16 only; verify milestone before A3-retry starts.` @@ -36,9 +36,9 @@ When you analyze the task, **determine the target framework** from requirements | Finding | Delegate to | |---------|-------------| -| Spring Boot is the chosen or evident stack (starters, Boot parent/BOM, Boot-specific tests or tasks) | [@robot-spring-boot-coder](robot-spring-boot-coder.md) | -| Quarkus is the chosen or evident stack (quarkus-bom, quarkus-maven-plugin, `@QuarkusTest`, Dev Services, or Quarkus-specific tasks) | [@robot-quarkus-coder](robot-quarkus-coder.md) | -| Micronaut is the chosen or evident stack (micronaut-parent / micronaut-maven-plugin, `io.micronaut` BOM, `@MicronautTest`, `Micronaut.run`, or Micronaut-specific tasks) | [@robot-micronaut-coder](robot-micronaut-coder.md) | +| Spring Boot is the chosen or evident stack (starters, Boot parent/BOM, Boot-specific tests, Kafka with `spring-kafka`, or MongoDB with `spring-data-mongodb`) | [@robot-spring-boot-coder](robot-spring-boot-coder.md) | +| Quarkus is the chosen or evident stack (quarkus-bom, quarkus-maven-plugin, `@QuarkusTest`, Dev Services, SmallRye Reactive Messaging, or Quarkus MongoDB Panache) | [@robot-quarkus-coder](robot-quarkus-coder.md) | +| Micronaut is the chosen or evident stack (micronaut-parent / micronaut-maven-plugin, `io.micronaut` BOM, `@MicronautTest`, `Micronaut.run`, `micronaut-kafka`, or `micronaut-data-mongodb`) | [@robot-micronaut-coder](robot-micronaut-coder.md) | | No Spring Boot, Quarkus, or Micronaut; plain Java, other framework not covered by a dedicated agent here, or requirements are framework-neutral | [@robot-java-coder](robot-java-coder.md) | **If mixed or ambiguous:** Prefer **robot-spring-boot-coder** when **any** authoritative requirement document commits to Spring Boot; prefer **robot-quarkus-coder** when it commits to Quarkus; prefer **robot-micronaut-coder** when it commits to Micronaut; otherwise prefer **robot-java-coder** and state the ambiguity in the handoff so the implementer can align with `pom.xml` / ADRs. diff --git a/skills-generator/src/main/resources/skill-references/assets/agents/robot-micronaut-coder.md b/skills-generator/src/main/resources/skill-references/assets/agents/robot-micronaut-coder.md index eeed8001..0bdb6d51 100644 --- a/skills-generator/src/main/resources/skill-references/assets/agents/robot-micronaut-coder.md +++ b/skills-generator/src/main/resources/skill-references/assets/agents/robot-micronaut-coder.md @@ -11,6 +11,8 @@ You are an **Implementation Specialist** for Micronaut projects. You focus on wr - Implement `@Controller` HTTP endpoints, `@Singleton` application services, and `@Factory` beans following Micronaut conventions. - Configure Micronaut `application.yml` / `application.properties`, environments, and `@Requires` / `@ConfigurationProperties`. - Apply **Micronaut Data** (`@MappedEntity`, repositories, `@Query`, transactions) for relational persistence, or **raw JDBC** (`DataSource`, `PreparedStatement`) when `@511-frameworks-micronaut-jdbc` fits better. +- Integrate Apache Kafka producers and consumers using `@KafkaClient`, `@KafkaListener`, `@KafkaKey`, and `KafkaListenerExceptionHandler`. +- Integrate MongoDB using Micronaut Data MongoDB (`@MappedEntity`, `@MongoRepository`, `@MongoFindQuery`). - Write Micronaut tests (`@MicronautTest`, `@MockBean`, `HttpClient`, `TestPropertyProvider` with Testcontainers). - Ensure secure coding practices for web APIs. @@ -30,6 +32,8 @@ Apply guidance from these Skills when relevant: - `@511-frameworks-micronaut-jdbc`: programmatic JDBC (DataSource, SQL, transactions) - `@512-frameworks-micronaut-data`: Micronaut Data (repositories, entities, generated SQL) - `@513-frameworks-micronaut-db-migrations-flyway`: Micronaut DB migrations (Flyway) +- `@514-frameworks-micronaut-kafka`: Kafka messaging (@KafkaClient, @KafkaListener, retries, dead-letter routing) +- `@515-frameworks-micronaut-mongodb`: MongoDB (@MongoRepository, @MappedEntity, error handling) - `@142-java-functional-programming`: Functional programming patterns - `@143-java-functional-exception-handling`: Exception handling patterns - `@130-java-testing-strategies`: Testing strategies diff --git a/skills-generator/src/main/resources/skill-references/assets/agents/robot-quarkus-coder.md b/skills-generator/src/main/resources/skill-references/assets/agents/robot-quarkus-coder.md index 9b8ca2ec..00c74b2c 100644 --- a/skills-generator/src/main/resources/skill-references/assets/agents/robot-quarkus-coder.md +++ b/skills-generator/src/main/resources/skill-references/assets/agents/robot-quarkus-coder.md @@ -10,8 +10,10 @@ You are an **Implementation Specialist** for Quarkus projects. You focus on writ - Implement Jakarta REST resources, CDI services, and repositories following Quarkus conventions. - Configure Quarkus extensions, profiles (`%dev`, `%test`, `%prod`), and `application.properties`. -- Apply Quarkus JDBC or Hibernate ORM Panache for persistence. -- Write Quarkus tests (`@QuarkusTest`, `@QuarkusIntegrationTest`, REST Assured). +- Apply Quarkus JDBC or Hibernate ORM Panache for relational persistence. +- Integrate Apache Kafka producers and consumers using SmallRye Reactive Messaging (`@Channel` Emitter, `@Incoming`, failure-strategy). +- Integrate MongoDB using Quarkus MongoDB Panache (`PanacheMongoEntity`, `PanacheMongoRepository`). +- Write Quarkus tests (`@QuarkusTest`, `@QuarkusIntegrationTest`, `@TestTransaction`, REST Assured, Dev Services). - Ensure secure coding practices for web APIs. ### Coding Standards @@ -29,6 +31,8 @@ Apply guidance from these Skills when relevant: - `@411-frameworks-quarkus-jdbc`: Quarkus JDBC - `@412-frameworks-quarkus-panache`: Quarkus Panache - `@413-frameworks-quarkus-db-migrations-flyway`: Quarkus DB migrations (Flyway) +- `@414-frameworks-quarkus-kafka`: Kafka messaging (SmallRye Reactive Messaging, Emitter, @Incoming, failure strategies) +- `@415-frameworks-quarkus-mongodb`: MongoDB (Panache Mongo entities, repositories, error handling) - `@142-java-functional-programming`: Functional programming patterns - `@143-java-functional-exception-handling`: Exception handling patterns - `@130-java-testing-strategies`: Testing Strategies diff --git a/skills-generator/src/main/resources/skill-references/assets/agents/robot-spring-boot-coder.md b/skills-generator/src/main/resources/skill-references/assets/agents/robot-spring-boot-coder.md index cfbb2631..0126e621 100644 --- a/skills-generator/src/main/resources/skill-references/assets/agents/robot-spring-boot-coder.md +++ b/skills-generator/src/main/resources/skill-references/assets/agents/robot-spring-boot-coder.md @@ -10,8 +10,10 @@ You are an **Implementation Specialist** for Spring Boot projects. You focus on - Implement REST controllers, services, and repositories following Spring Boot conventions. - Configure Spring Boot auto-configuration, profiles, and `application.yml`. -- Apply Spring Data JDBC for persistence. -- Write Spring Test slices (`@WebMvcTest`, `@DataJdbcTest`, `@SpringBootTest`). +- Apply Spring Data JDBC for relational persistence. +- Integrate Apache Kafka producers and listeners using `spring-kafka` (typed templates, retries, dead-letter topics). +- Integrate MongoDB using Spring Data MongoDB (documents, repositories, error handling). +- Write Spring Test slices (`@WebMvcTest`, `@DataJdbcTest`, `@DataMongoTest`, `@SpringBootTest`, `@EmbeddedKafka`). - Ensure secure coding practices for web APIs. ### Coding Standards @@ -29,6 +31,8 @@ Apply guidance from these Skills when relevant: - `@311-frameworks-spring-jdbc`: Spring JDBC - `@312-frameworks-spring-data-jdbc`: Spring Data JDBC - `@313-frameworks-spring-db-migrations-flyway`: Flyway database migrations +- `@314-frameworks-spring-kafka`: Kafka messaging (producers, listeners, retries, dead-letter topics) +- `@315-frameworks-spring-mongodb`: MongoDB (document design, repositories, error handling) - `@142-java-functional-programming`: Functional programming patterns - `@143-java-functional-exception-handling`: Exception handling patterns - `@130-java-testing-strategies`: Testing strategies diff --git a/skills-generator/src/main/resources/skills.xml b/skills-generator/src/main/resources/skills.xml index 028635b8..917f1fb4 100644 --- a/skills-generator/src/main/resources/skills.xml +++ b/skills-generator/src/main/resources/skills.xml @@ -257,6 +257,16 @@ 313-frameworks-spring-db-migrations-flyway + + + 314-frameworks-spring-kafka + + + + + 315-frameworks-spring-mongodb + + 321-frameworks-spring-boot-testing-unit-tests @@ -307,6 +317,16 @@ 413-frameworks-quarkus-db-migrations-flyway + + + 414-frameworks-quarkus-kafka + + + + + 415-frameworks-quarkus-mongodb + + 421-frameworks-quarkus-testing-unit-tests @@ -357,6 +377,16 @@ 513-frameworks-micronaut-db-migrations-flyway + + + 514-frameworks-micronaut-kafka + + + + + 515-frameworks-micronaut-mongodb + + 521-frameworks-micronaut-testing-unit-tests diff --git a/skills/003-agents-installation/references/003-agents-installation.md b/skills/003-agents-installation/references/003-agents-installation.md index 46b75139..2c490c8e 100644 --- a/skills/003-agents-installation/references/003-agents-installation.md +++ b/skills/003-agents-installation/references/003-agents-installation.md @@ -98,9 +98,9 @@ You are a **Coordinator** for Java Enterprise Development. Your primary responsi ### Collaboration partners - **[@robot-java-coder](robot-java-coder.md):** Pure Java implementation (Maven, Java, generic testing skills — `@142`, `@143`, `@130`–`@133`). Use when **Framework identification** yields plain Java, CLI-only, or a stack without a dedicated framework agent here. -- **[@robot-spring-boot-coder](robot-spring-boot-coder.md):** Spring Boot implementation (controllers, REST, Spring Test slices, Spring Data/JDBC, Flyway migrations, etc. — `@301`, `@302`, `@311`–`@313`, `@321`–`@323`). Use when **Framework identification** yields **Spring Boot** as the application framework. -- **[@robot-quarkus-coder](robot-quarkus-coder.md):** Quarkus implementation (Jakarta REST resources, CDI, Panache/JDBC, Flyway migrations, Quarkus tests — `@401`, `@402`, `@411`–`@413`, `@421`–`@423`). Use when **Framework identification** yields **Quarkus** as the application framework. -- **[@robot-micronaut-coder](robot-micronaut-coder.md):** Micronaut implementation (`@Controller`, programmatic JDBC, Micronaut Data, Flyway migrations, `Micronaut.run`, CDI-style beans, Micronaut tests — `@501`, `@502`, `@511`–`@513`, `@521`–`@523`). Use when **Framework identification** yields **Micronaut** as the application framework. +- **[@robot-spring-boot-coder](robot-spring-boot-coder.md):** Spring Boot implementation (controllers, REST, Spring Test slices, Spring Data/JDBC, Flyway migrations, Kafka messaging, MongoDB — `@301`, `@302`, `@311`–`@315`, `@321`–`@323`). Use when **Framework identification** yields **Spring Boot** as the application framework. +- **[@robot-quarkus-coder](robot-quarkus-coder.md):** Quarkus implementation (Jakarta REST resources, CDI, Panache/JDBC, Flyway migrations, Kafka messaging, MongoDB, Quarkus tests — `@401`, `@402`, `@411`–`@415`, `@421`–`@423`). Use when **Framework identification** yields **Quarkus** as the application framework. +- **[@robot-micronaut-coder](robot-micronaut-coder.md):** Micronaut implementation (`@Controller`, programmatic JDBC, Micronaut Data, Flyway migrations, Kafka messaging, MongoDB, `Micronaut.run`, CDI-style beans, Micronaut tests — `@501`, `@502`, `@511`–`@515`, `@521`–`@523`). Use when **Framework identification** yields **Micronaut** as the application framework. - **Parallel column drives grouping:** The plan's task list table includes a **Parallel** column (or **Agent** if the plan uses that name). Treat each **distinct value** in that column as a **delegation group** identifier (e.g. `A1`, `A2`, `A3-timeout`, `A3-retry`, `A4`). - **One logical developer per group:** For each distinct **Parallel** value, assign a **separate** instance of the **same** chosen implementation agent (`robot-java-coder`, `robot-spring-boot-coder`, `robot-quarkus-coder`, or `robot-micronaut-coder`) whose scope is **only** the rows for that value. Label every handoff, e.g. `Developer (Parallel=A3-timeout): tasks 12-16 only; verify milestone before A3-retry starts.` @@ -119,9 +119,9 @@ When you analyze the task, **determine the target framework** from requirements | Finding | Delegate to | |---------|-------------| -| Spring Boot is the chosen or evident stack (starters, Boot parent/BOM, Boot-specific tests or tasks) | [@robot-spring-boot-coder](robot-spring-boot-coder.md) | -| Quarkus is the chosen or evident stack (quarkus-bom, quarkus-maven-plugin, `@QuarkusTest`, Dev Services, or Quarkus-specific tasks) | [@robot-quarkus-coder](robot-quarkus-coder.md) | -| Micronaut is the chosen or evident stack (micronaut-parent / micronaut-maven-plugin, `io.micronaut` BOM, `@MicronautTest`, `Micronaut.run`, or Micronaut-specific tasks) | [@robot-micronaut-coder](robot-micronaut-coder.md) | +| Spring Boot is the chosen or evident stack (starters, Boot parent/BOM, Boot-specific tests, Kafka with `spring-kafka`, or MongoDB with `spring-data-mongodb`) | [@robot-spring-boot-coder](robot-spring-boot-coder.md) | +| Quarkus is the chosen or evident stack (quarkus-bom, quarkus-maven-plugin, `@QuarkusTest`, Dev Services, SmallRye Reactive Messaging, or Quarkus MongoDB Panache) | [@robot-quarkus-coder](robot-quarkus-coder.md) | +| Micronaut is the chosen or evident stack (micronaut-parent / micronaut-maven-plugin, `io.micronaut` BOM, `@MicronautTest`, `Micronaut.run`, `micronaut-kafka`, or `micronaut-data-mongodb`) | [@robot-micronaut-coder](robot-micronaut-coder.md) | | No Spring Boot, Quarkus, or Micronaut; plain Java, other framework not covered by a dedicated agent here, or requirements are framework-neutral | [@robot-java-coder](robot-java-coder.md) | **If mixed or ambiguous:** Prefer **robot-spring-boot-coder** when **any** authoritative requirement document commits to Spring Boot; prefer **robot-quarkus-coder** when it commits to Quarkus; prefer **robot-micronaut-coder** when it commits to Micronaut; otherwise prefer **robot-java-coder** and state the ambiguity in the handoff so the implementer can align with `pom.xml` / ADRs. @@ -248,6 +248,8 @@ You are an **Implementation Specialist** for Micronaut projects. You focus on wr - Implement `@Controller` HTTP endpoints, `@Singleton` application services, and `@Factory` beans following Micronaut conventions. - Configure Micronaut `application.yml` / `application.properties`, environments, and `@Requires` / `@ConfigurationProperties`. - Apply **Micronaut Data** (`@MappedEntity`, repositories, `@Query`, transactions) for relational persistence, or **raw JDBC** (`DataSource`, `PreparedStatement`) when `@511-frameworks-micronaut-jdbc` fits better. +- Integrate Apache Kafka producers and consumers using `@KafkaClient`, `@KafkaListener`, `@KafkaKey`, and `KafkaListenerExceptionHandler`. +- Integrate MongoDB using Micronaut Data MongoDB (`@MappedEntity`, `@MongoRepository`, `@MongoFindQuery`). - Write Micronaut tests (`@MicronautTest`, `@MockBean`, `HttpClient`, `TestPropertyProvider` with Testcontainers). - Ensure secure coding practices for web APIs. @@ -267,6 +269,8 @@ Apply guidance from these Skills when relevant: - `@511-frameworks-micronaut-jdbc`: programmatic JDBC (DataSource, SQL, transactions) - `@512-frameworks-micronaut-data`: Micronaut Data (repositories, entities, generated SQL) - `@513-frameworks-micronaut-db-migrations-flyway`: Micronaut DB migrations (Flyway) +- `@514-frameworks-micronaut-kafka`: Kafka messaging (@KafkaClient, @KafkaListener, retries, dead-letter routing) +- `@515-frameworks-micronaut-mongodb`: MongoDB (@MongoRepository, @MappedEntity, error handling) - `@142-java-functional-programming`: Functional programming patterns - `@143-java-functional-exception-handling`: Exception handling patterns - `@130-java-testing-strategies`: Testing strategies @@ -302,8 +306,10 @@ You are an **Implementation Specialist** for Quarkus projects. You focus on writ - Implement Jakarta REST resources, CDI services, and repositories following Quarkus conventions. - Configure Quarkus extensions, profiles (`%dev`, `%test`, `%prod`), and `application.properties`. -- Apply Quarkus JDBC or Hibernate ORM Panache for persistence. -- Write Quarkus tests (`@QuarkusTest`, `@QuarkusIntegrationTest`, REST Assured). +- Apply Quarkus JDBC or Hibernate ORM Panache for relational persistence. +- Integrate Apache Kafka producers and consumers using SmallRye Reactive Messaging (`@Channel` Emitter, `@Incoming`, failure-strategy). +- Integrate MongoDB using Quarkus MongoDB Panache (`PanacheMongoEntity`, `PanacheMongoRepository`). +- Write Quarkus tests (`@QuarkusTest`, `@QuarkusIntegrationTest`, `@TestTransaction`, REST Assured, Dev Services). - Ensure secure coding practices for web APIs. ### Coding Standards @@ -321,6 +327,8 @@ Apply guidance from these Skills when relevant: - `@411-frameworks-quarkus-jdbc`: Quarkus JDBC - `@412-frameworks-quarkus-panache`: Quarkus Panache - `@413-frameworks-quarkus-db-migrations-flyway`: Quarkus DB migrations (Flyway) +- `@414-frameworks-quarkus-kafka`: Kafka messaging (SmallRye Reactive Messaging, Emitter, @Incoming, failure strategies) +- `@415-frameworks-quarkus-mongodb`: MongoDB (Panache Mongo entities, repositories, error handling) - `@142-java-functional-programming`: Functional programming patterns - `@143-java-functional-exception-handling`: Exception handling patterns - `@130-java-testing-strategies`: Testing Strategies @@ -356,8 +364,10 @@ You are an **Implementation Specialist** for Spring Boot projects. You focus on - Implement REST controllers, services, and repositories following Spring Boot conventions. - Configure Spring Boot auto-configuration, profiles, and `application.yml`. -- Apply Spring Data JDBC for persistence. -- Write Spring Test slices (`@WebMvcTest`, `@DataJdbcTest`, `@SpringBootTest`). +- Apply Spring Data JDBC for relational persistence. +- Integrate Apache Kafka producers and listeners using `spring-kafka` (typed templates, retries, dead-letter topics). +- Integrate MongoDB using Spring Data MongoDB (documents, repositories, error handling). +- Write Spring Test slices (`@WebMvcTest`, `@DataJdbcTest`, `@DataMongoTest`, `@SpringBootTest`, `@EmbeddedKafka`). - Ensure secure coding practices for web APIs. ### Coding Standards @@ -375,6 +385,8 @@ Apply guidance from these Skills when relevant: - `@311-frameworks-spring-jdbc`: Spring JDBC - `@312-frameworks-spring-data-jdbc`: Spring Data JDBC - `@313-frameworks-spring-db-migrations-flyway`: Flyway database migrations +- `@314-frameworks-spring-kafka`: Kafka messaging (producers, listeners, retries, dead-letter topics) +- `@315-frameworks-spring-mongodb`: MongoDB (document design, repositories, error handling) - `@142-java-functional-programming`: Functional programming patterns - `@143-java-functional-exception-handling`: Exception handling patterns - `@130-java-testing-strategies`: Testing strategies diff --git a/skills/314-frameworks-spring-kafka/SKILL.md b/skills/314-frameworks-spring-kafka/SKILL.md new file mode 100644 index 00000000..31f3d9c3 --- /dev/null +++ b/skills/314-frameworks-spring-kafka/SKILL.md @@ -0,0 +1,48 @@ +--- +name: 314-frameworks-spring-kafka +description: Use when you need to design or implement Kafka messaging in Spring Boot — including topic design, producer/consumer implementation with Spring for Apache Kafka, retries and dead-letter topics, idempotency, and error handling. This should trigger for requests such as Add Kafka in Spring Boot; Review Spring Kafka consumers; Improve retries and DLT in Spring Kafka. Part of cursor-rules-java project +license: Apache-2.0 +metadata: + author: Juan Antonio Breña Moral + version: 0.15.0-SNAPSHOT +--- +# Spring Boot — Kafka messaging + +Apply Spring Kafka guidance with concrete examples for design, implementation, and error handling. + +## Constraints + +Compile before messaging refactors; verify after changes. + +- **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change +- **SAFETY**: If compilation fails, stop immediately +- **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements +- **BEFORE APPLYING**: Read the reference for detailed rules and examples + +## When to use this skill + +- Add Kafka in Spring Boot +- Review Spring Kafka consumers/producers +- Improve retries, dead-letter topics, or idempotency in Spring Kafka + +## Workflow + +1. **Read reference and assess project context** + +Read `references/314-frameworks-spring-kafka.md` and inspect current messaging setup before proposing changes. + +2. **Gather scope and decide target improvements** + +Identify reliability and throughput goals and define the minimum safe set of changes. + +3. **Apply framework-aligned changes** + +Implement/refactor Spring Kafka configuration, producer/consumer logic, and failure handling. + +4. **Run verification and report results** + +Execute build/tests and summarize what changed, what was verified, and follow-up actions. + +## Reference + +For detailed guidance, examples, and constraints, see [references/314-frameworks-spring-kafka.md](references/314-frameworks-spring-kafka.md). diff --git a/skills/314-frameworks-spring-kafka/references/314-frameworks-spring-kafka.md b/skills/314-frameworks-spring-kafka/references/314-frameworks-spring-kafka.md new file mode 100644 index 00000000..881885b2 --- /dev/null +++ b/skills/314-frameworks-spring-kafka/references/314-frameworks-spring-kafka.md @@ -0,0 +1,396 @@ +--- +name: 314-frameworks-spring-kafka +description: Use when you need Kafka with Spring Boot (`spring-kafka`) — including Maven dependencies, topic and event schema design, typed KafkaTemplate producers, @KafkaListener consumers, retries with DefaultErrorHandler, dead-letter topics, idempotent consumers, and integration testing with @EmbeddedKafka. This should trigger for requests such as Add Kafka in Spring Boot; Review Spring Kafka consumers; Improve retries and DLT in Spring Kafka. +license: Apache-2.0 +metadata: + author: Juan Antonio Breña Moral + version: 0.15.0-SNAPSHOT +--- +# Spring Boot — Kafka messaging + +## Role + +You are a Senior software engineer with extensive experience in Spring Boot and Apache Kafka + +## Goal + +Design and implement reliable Kafka messaging in Spring Boot using `spring-kafka`. Prefer typed event records, keyed producers for ordered processing, and declarative error handling with dead-letter topics over silent exception swallowing. Keep listeners thin — delegate business logic to services. Guard consumers against poison messages and replay with idempotency on the eventId. + +**What is covered in this Skill?** + +- Maven `spring-kafka` dependency aligned with the Spring Boot BOM +- Versioned event schemas as Java records with explicit `eventId`, `schemaVersion`, and aggregate key +- Topic naming conventions (`domain.entity.operation.v{N}`) +- `KafkaTemplate` typed producer with explicit key strategy +- `@KafkaListener` consumer with explicit `groupId`, `topics`, and typed payload +- `ConcurrentKafkaListenerContainerFactory` for concurrency and batch vs. single-record modes +- `DefaultErrorHandler` with `FixedBackOff` / `ExponentialBackOff` and `DeadLetterPublishingRecoverer` +- Idempotent consumer pattern using `eventId` de-duplication store +- Kafka transactions for exactly-once producer semantics +- Testing with `@EmbeddedKafka` and `EmbeddedKafkaBroker` + +**Scope:** Apply recommendations based on the reference rules and good/bad code examples. + +## Constraints + +Before applying any Kafka changes, ensure the project compiles. Compilation failure is a BLOCKING condition. + +- **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change +- **SAFETY**: If compilation fails, stop immediately +- **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements +- **INJECTION**: Never build topic names or Kafka header values from untrusted user input +- **BEFORE APPLYING**: Read the reference for detailed rules and good/bad patterns +- **EDGE CASE**: If the user goal is ambiguous, stop and ask a clarifying question before editing files +- **EDGE CASE**: If required context, files, or tools are missing, report the blocker explicitly + +## Examples + +### Table of contents + +- Example 1: Maven dependency +- Example 2: Event schema design +- Example 3: Producer implementation +- Example 4: Consumer implementation +- Example 5: Error handling and dead-letter topic +- Example 6: Idempotent consumer +- Example 7: Integration testing + +### Example 1: Maven dependency + +Title: Add spring-kafka via the Spring Boot BOM; never pin the version manually +Description: Spring Boot manages the `spring-kafka` version via its BOM. Declaring an explicit version pins the library and can cause incompatibility with the auto-configured `KafkaAutoConfiguration`. Use the starter form when using Spring Boot; add the raw `spring-kafka` artifact for library modules that do not depend on Spring Boot auto-configuration. + +**Good example:** + +```xml + + + org.springframework.kafka + spring-kafka + +``` + +**Bad example:** + +```xml + + + org.springframework.kafka + spring-kafka + 3.1.0 + +``` + +### Example 2: Event schema design + +Title: Versioned immutable records with eventId and aggregate key +Description: Define each Kafka event as a Java record. Include a unique `eventId` (UUID) for consumer de-duplication, a `schemaVersion` string for forward compatibility, and the aggregate's natural key (e.g. `orderId`). Use the aggregate key as the Kafka message key so all events for the same aggregate land in the same partition, preserving ordering. Topic names follow the pattern `domain.entity.operation.v{N}` to allow parallel consumers on older and newer versions. + +**Good example:** + +```java +import java.time.Instant; + +// Immutable event schema — one record per event type +public record OrderCreatedEvent( + String eventId, // UUID; used for consumer-side de-duplication + String schemaVersion, // "v1"; bump when non-backward-compatible fields change + String orderId, // aggregate key → use as Kafka message key + String customerId, + Instant occurredAt +) { + public static OrderCreatedEvent of(String orderId, String customerId) { + return new OrderCreatedEvent( + java.util.UUID.randomUUID().toString(), + "v1", + orderId, + customerId, + Instant.now() + ); + } +} +// topic: orders.created.v1 key: orderId +``` + +**Bad example:** + +```java +// Bad: generic map payload — no schema, no version, no stable key +Map event = Map.of( + "type", "orderCreated", + "data", "..." +); +// topic: events key: null (random partition assignment) +``` + +### Example 3: Producer implementation + +Title: Typed KafkaTemplate with explicit key; handle send failure +Description: Inject a typed `KafkaTemplate<String, OrderCreatedEvent>` so the compiler enforces the key/value contract. Always pass the aggregate's key as the Kafka record key. Return the `CompletableFuture` to the caller (or await it at a command boundary) so broker-side send failures are observable. Do not throw from a `whenComplete` callback and assume the caller will see it. + +**Good example:** + +```java +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.support.SendResult; +import org.springframework.stereotype.Service; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; + +@Service +class OrderEventPublisher { + + private static final String TOPIC = "orders.created.v1"; + + private final KafkaTemplate template; + + OrderEventPublisher(KafkaTemplate template) { + this.template = template; + } + + CompletableFuture> publish(OrderCreatedEvent event) { + return template.send(TOPIC, event.orderId(), event) + .exceptionally(ex -> { + throw new CompletionException("Kafka send failed for order: " + event.orderId(), ex); + }); + } +} +``` + +**Bad example:** + +```java +// Bad: raw KafkaTemplate loses type safety; +// null key loses partition ordering; ignored CompletableFuture drops send errors +@Service +class OrderEventPublisher { + @Autowired + KafkaTemplate template; + + void publish(OrderCreatedEvent event) { + template.send("events", null, event.toString()); // no key, serialized via toString() + // CompletableFuture discarded — broker errors silently lost + } +} +``` + +### Example 4: Consumer implementation + +Title: @KafkaListener with typed payload and thin handler +Description: Annotate the listener class with `@Component`. Declare `topics` and `groupId` on the `@KafkaListener` so the consumer group is explicit and testable. Accept the typed event record as the method parameter. Delegate all business logic to an application service — the listener method should only translate the Kafka message into a service call. + +**Good example:** + +```java +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.stereotype.Component; + +@Component +class BillingEventListener { + + private final BillingService billingService; + + BillingEventListener(BillingService billingService) { + this.billingService = billingService; + } + + @KafkaListener(topics = "orders.created.v1", groupId = "billing-service-v1") + void onOrderCreated(OrderCreatedEvent event) { + billingService.createInvoice(event.orderId(), event.customerId()); + } +} +``` + +**Bad example:** + +```java +// Bad: no groupId means a random group on each restart causing re-read from the beginning; +// String payload requires manual parsing; business logic mixed directly in listener +@KafkaListener(topics = "orders.created.v1") +void onMessage(String rawJson) { + var orderId = rawJson.split(",")[0]; // fragile parsing + // 50 lines of business logic inline ... +} +``` + +### Example 5: Error handling and dead-letter topic + +Title: DefaultErrorHandler with backoff and DeadLetterPublishingRecoverer +Description: Configure a `DefaultErrorHandler` bean that retries with exponential back-off and then routes unrecoverable messages to a dead-letter topic via `DeadLetterPublishingRecoverer`. The recoverer automatically targets `{topic}.DLT` by default. Register the handler on the container factory so it applies to all listeners. Do not swallow exceptions inside the listener — let them propagate so the container's error handler can decide. + +**Good example:** + +```java +import org.apache.kafka.common.TopicPartition; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.listener.DeadLetterPublishingRecoverer; +import org.springframework.kafka.listener.DefaultErrorHandler; +import org.springframework.util.backoff.ExponentialBackOff; + +@Configuration +class KafkaErrorConfig { + + @Bean + DefaultErrorHandler errorHandler(KafkaTemplate template) { + var recoverer = new DeadLetterPublishingRecoverer(template, + (record, ex) -> new TopicPartition(record.topic() + ".DLT", -1)); + + var backOff = new ExponentialBackOff(500L, 2.0); + backOff.setMaxElapsedTime(30_000L); // stop retrying after 30 s + + return new DefaultErrorHandler(recoverer, backOff); + } +} +``` + +**Bad example:** + +```java +// Bad: swallowing the exception causes acknowledgement of poison messages; +// they are never retried and never routed to a DLT +@KafkaListener(topics = "orders.created.v1", groupId = "billing-service-v1") +void onOrderCreated(OrderCreatedEvent event) { + try { + billingService.createInvoice(event.orderId(), event.customerId()); + } catch (Exception ignored) { + // silent: message acknowledged, data loss guaranteed + } +} +``` + +### Example 6: Idempotent consumer + +Title: De-duplicate on eventId to survive retries and rebalances +Description: Kafka at-least-once delivery means a consumer can receive the same message more than once — on broker retries, after a rebalance, or when consuming from a DLT. Check the `eventId` against a de-duplication store (an in-memory set in dev/test; a database table or Redis in production) before processing. Persist the `eventId` inside the same transaction as the business side effect. + +**Good example:** + +```java +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +@Component +class IdempotentBillingListener { + + private final Set processedEventIds = ConcurrentHashMap.newKeySet(); + private final BillingService billingService; + + IdempotentBillingListener(BillingService billingService) { + this.billingService = billingService; + } + + @KafkaListener(topics = "orders.created.v1", groupId = "billing-service-v1") + @Transactional + void onOrderCreated(OrderCreatedEvent event) { + if (!processedEventIds.add(event.eventId())) { + return; // already processed — skip without error + } + billingService.createInvoice(event.orderId(), event.customerId()); + } +} +``` + +**Bad example:** + +```java +@KafkaListener(topics = "orders.created.v1", groupId = "billing-service-v1") +void onOrderCreated(OrderCreatedEvent event) { + // Bad: no idempotency guard — rebalance or retry doubles billing + billingService.createInvoice(event.orderId(), event.customerId()); +} +``` + +### Example 7: Integration testing + +Title: @EmbeddedKafka with KafkaTestUtils for end-to-end listener tests +Description: Annotate the test class with `@SpringBootTest` and `@EmbeddedKafka` to start an in-process broker. Use `KafkaTestUtils.getRecords` to consume messages programmatically and assert on their values. This verifies the full serialization/deserialization path without a running Kafka cluster. + +**Good example:** + +```java +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.kafka.core.ConsumerFactory; +import org.springframework.kafka.test.EmbeddedKafkaBroker; +import org.springframework.kafka.test.context.EmbeddedKafka; +import org.springframework.kafka.test.utils.KafkaTestUtils; + +import static org.assertj.core.api.Assertions.assertThat; + +@SpringBootTest +@EmbeddedKafka(topics = {"orders.created.v1"}, partitions = 1) +class OrderEventPublisherIT { + + @Autowired + OrderEventPublisher publisher; + + @Autowired + EmbeddedKafkaBroker broker; + + @Autowired + ConsumerFactory consumerFactory; + + @Test + void publish_sendsEventToTopic() { + OrderCreatedEvent event = OrderCreatedEvent.of("order-1", "cust-1"); + publisher.publish(event); + + try (Consumer consumer = consumerFactory.createConsumer("test-group", "")) { + broker.consumeFromAnEmbeddedTopic(consumer, "orders.created.v1"); + ConsumerRecords records = KafkaTestUtils.getRecords(consumer); + assertThat(records.count()).isEqualTo(1); + assertThat(records.iterator().next().value().orderId()).isEqualTo("order-1"); + } + } +} +``` + +**Bad example:** + +```java +// Bad: mocking KafkaTemplate proves nothing — does not test serialization or topic routing +@SpringBootTest +class OrderEventPublisherTest { + + @MockBean + KafkaTemplate template; + + @Autowired + OrderEventPublisher publisher; + + @Test + void publish_callsTemplate() { + publisher.publish(OrderCreatedEvent.of("o1", "c1")); + verify(template).send(any(), any(), any()); // verifies Mockito glue only + } +} +``` + +## Output Format + +- **ANALYZE** Kafka code: event schema versioning, producer key strategy, listener group isolation, error handler registration, idempotency guards, and test coverage +- **CATEGORIZE** issues by impact (RELIABILITY for missing retries/DLT, CORRECTNESS for missing idempotency, MAINTAINABILITY for untyped schemas, SECURITY for untrusted topic name injection) +- **APPLY** Spring Kafka–aligned fixes: type the templates, key producers on the aggregate id, register DefaultErrorHandler with backoff and DLT, add eventId de-duplication +- **IMPLEMENT** changes so topic configs, serializers, and tests stay consistent +- **EXPLAIN** trade-offs (at-least-once vs exactly-once, fixed vs exponential backoff, in-process vs Testcontainers Kafka) +- **TEST** with `@EmbeddedKafka` for unit/integration; use Testcontainers for full-stack acceptance tests +- **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes + +## Safeguards + +- **BLOCKING SAFETY CHECK**: Run `./mvnw compile` before ANY Kafka refactoring — compilation failure is a HARD STOP +- **CRITICAL VALIDATION**: Run `./mvnw clean verify` after changes; exercise listener integration tests before promoting +- **INJECTION SAFETY**: Never construct topic names or message headers from untrusted user input +- **ERROR HANDLING**: Never swallow exceptions inside a `@KafkaListener` — propagate so the container's error handler can retry or route to DLT +- **IDEMPOTENCY**: Always de-duplicate on `eventId` — Kafka's at-least-once guarantee means consumers must tolerate duplicates +- **KEY STRATEGY**: Always set the Kafka message key to the aggregate's natural key; null keys disable partition-level ordering +- **DLT MONITORING**: Set up alerting on the DLT topic — messages landing there indicate systematic processing failures +- **INCREMENTAL SAFETY**: Change one producer/consumer surface at a time; verify with `@EmbeddedKafka` tests between steps \ No newline at end of file diff --git a/skills/315-frameworks-spring-mongodb/SKILL.md b/skills/315-frameworks-spring-mongodb/SKILL.md new file mode 100644 index 00000000..33791b20 --- /dev/null +++ b/skills/315-frameworks-spring-mongodb/SKILL.md @@ -0,0 +1,48 @@ +--- +name: 315-frameworks-spring-mongodb +description: Use when you need to design or implement MongoDB data access in Spring Boot — including document modeling, Spring Data Mongo repositories/templates, indexing, optimistic concurrency, and error handling. This should trigger for requests such as Add MongoDB in Spring Boot; Review Spring Data Mongo design; Improve error handling for Mongo writes. Part of cursor-rules-java project +license: Apache-2.0 +metadata: + author: Juan Antonio Breña Moral + version: 0.15.0-SNAPSHOT +--- +# Spring Boot — MongoDB + +Apply Spring Data MongoDB guidance with concrete examples for design, implementation, and error handling. + +## Constraints + +Compile before MongoDB refactors; verify after changes. + +- **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change +- **SAFETY**: If compilation fails, stop immediately +- **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements +- **BEFORE APPLYING**: Read the reference for detailed rules and examples + +## When to use this skill + +- Add MongoDB in Spring Boot +- Review Spring Data Mongo repositories/documents +- Improve duplicate key handling, retries, or optimistic locking in Mongo flows + +## Workflow + +1. **Read reference and assess project context** + +Read `references/315-frameworks-spring-mongodb.md` and inspect persistence setup before proposing changes. + +2. **Gather scope and decide target improvements** + +Identify data model, consistency, and query requirements to define safe improvements. + +3. **Apply framework-aligned changes** + +Implement/refactor mappings, repositories, indexes, and failure handling policies. + +4. **Run verification and report results** + +Execute build/tests and summarize what changed, what was verified, and follow-up actions. + +## Reference + +For detailed guidance, examples, and constraints, see [references/315-frameworks-spring-mongodb.md](references/315-frameworks-spring-mongodb.md). diff --git a/skills/315-frameworks-spring-mongodb/references/315-frameworks-spring-mongodb.md b/skills/315-frameworks-spring-mongodb/references/315-frameworks-spring-mongodb.md new file mode 100644 index 00000000..cb705709 --- /dev/null +++ b/skills/315-frameworks-spring-mongodb/references/315-frameworks-spring-mongodb.md @@ -0,0 +1,439 @@ +--- +name: 315-frameworks-spring-mongodb +description: Use when you need MongoDB with Spring Data MongoDB — including Maven dependencies, document modeling with @Document and @CompoundIndex, MongoRepository, MongoTemplate for complex queries, @Version optimistic locking, and explicit error handling for DuplicateKeyException and OptimisticLockingFailureException. This should trigger for requests such as Add MongoDB in Spring Boot; Review Spring Data Mongo repositories; Improve error handling for Mongo writes. +license: Apache-2.0 +metadata: + author: Juan Antonio Breña Moral + version: 0.15.0-SNAPSHOT +--- +# Spring Boot — MongoDB + +## Role + +You are a Senior software engineer with extensive experience in Spring Boot and MongoDB + +## Goal + +Design clear document models, implement correct Spring Data MongoDB repositories and services, and handle MongoDB failures explicitly. Prefer immutable records for documents, explicit index declarations, and `@Version` for concurrency control. Never expose managed documents at API boundaries — map to DTOs. Guard against injection by using derived finders or `Criteria` — never string-concatenated query strings. + +**What is covered in this Skill?** + +- Maven `spring-boot-starter-data-mongodb` dependency aligned with the Spring Boot BOM +- Document design: `@Document`, `@Field`, `@CompoundIndex`, and `@Id` mapping +- Factory methods for new documents; `@Version` for optimistic locking +- `MongoRepository` with derived finders and explicit `@Query` methods +- `MongoTemplate` for complex aggregations and multi-condition queries +- `@Transactional` on service methods (MongoDB multi-document transactions where supported) +- Error handling: `DuplicateKeyException`, `OptimisticLockingFailureException`, `DataAccessException` +- DTO projections: returning view types instead of leaking internal documents +- Testing with `@DataMongoTest` slice and Testcontainers MongoDB + +**Scope:** Apply recommendations based on the reference rules and good/bad code examples. + +## Constraints + +Before applying any MongoDB changes, ensure the project compiles. Compilation failure is a BLOCKING condition. + +- **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change +- **SAFETY**: If compilation fails, stop immediately +- **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements +- **INJECTION**: Never build query strings by concatenating user input — always use Criteria API or derived finders +- **BEFORE APPLYING**: Read the reference for detailed rules and good/bad patterns +- **EDGE CASE**: If the user goal is ambiguous, stop and ask a clarifying question before editing files +- **EDGE CASE**: If required context, files, or tools are missing, report the blocker explicitly + +## Examples + +### Table of contents + +- Example 1: Maven dependency +- Example 2: Document design +- Example 3: Repository pattern +- Example 4: Service layer +- Example 5: MongoTemplate for complex queries +- Example 6: Error handling +- Example 7: Testing + +### Example 1: Maven dependency + +Title: Add the Spring Data MongoDB starter; version is managed by the Spring Boot BOM +Description: Spring Boot's BOM manages the `spring-boot-starter-data-mongodb` version. Declaring an explicit version risks classpath conflicts with the auto-configured `MongoAutoConfiguration`. Add the embedded Flapdoodle MongoDB only in `test` scope for slice tests when not using Testcontainers. + +**Good example:** + +```xml + + + org.springframework.boot + spring-boot-starter-data-mongodb + + + + + de.flapdoodle.embed + de.flapdoodle.embed.mongo.spring30x + test + +``` + +**Bad example:** + +```xml + + + org.springframework.boot + spring-boot-starter-data-mongodb + 3.2.0 + +``` + +### Example 2: Document design + +Title: @Document with explicit collection, @Field, @CompoundIndex, and factory method +Description: Annotate with `@Document` and name the collection explicitly to avoid case-sensitivity surprises across environments. Map field names with `@Field` when the Java property name differs from the stored field. Declare indexes on the entity with `@CompoundIndex` to keep schema intent alongside the model. Use a static factory method (`of(...)`) that leaves `id` null for new inserts. + +**Good example:** + +```java +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Version; +import org.springframework.data.mongodb.core.index.CompoundIndex; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.Field; +import java.time.Instant; + +@Document(collection = "orders") +@CompoundIndex(name = "uk_order_number", def = "{'order_number': 1}", unique = true) +public record OrderDocument( + @Id String id, + @Field("order_number") String orderNumber, + @Field("customer_id") String customerId, + @Field("created_at") Instant createdAt, + @Version Long version +) { + public static OrderDocument of(String orderNumber, String customerId) { + return new OrderDocument(null, orderNumber, customerId, Instant.now(), null); + } +} +``` + +**Bad example:** + +```java +// Bad: implicit collection name derived from class name; no indexes; mutable class +@Document +class OrderDocument { + @Id String id; + String orderNumber; // no unique index + String customerId; + // setters allow accidental mutation + public void setId(String id) { this.id = id; } +} +``` + +### Example 3: Repository pattern + +Title: MongoRepository with derived finders; avoid string-concatenated queries +Description: Extend `MongoRepository` (or `ListCrudRepository` for `List<T>` return types). Use Spring Data's query derivation for simple lookups and `@Query` with named parameters for complex ones. Never build MongoDB JSON query strings from user input — use the Criteria API or `@Query` with `:?0` / `:#{[0]}` placeholders. + +**Good example:** + +```java +import org.springframework.data.mongodb.repository.MongoRepository; +import org.springframework.data.mongodb.repository.Query; +import org.springframework.stereotype.Repository; +import java.time.Instant; +import java.util.List; +import java.util.Optional; + +@Repository +public interface OrderRepository extends MongoRepository { + + Optional findByOrderNumber(String orderNumber); + + List findByCustomerId(String customerId); + + // Named @Query — binds the parameter safely + @Query("{ 'created_at': { $gte: ?0 } }") + List findCreatedAfter(Instant since); +} +``` + +**Bad example:** + +```java +// Bad: raw string concatenation into a Mongo query — NoSQL injection risk +class OrderRepositoryBad { + @Autowired MongoTemplate mongoTemplate; + + List findByCustomer(String customerId) { + String json = "{ 'customer_id': '" + customerId + "' }"; // injection-prone + return mongoTemplate.find(new BasicQuery(json), OrderDocument.class); + } +} +``` + +### Example 4: Service layer + +Title: Constructor injection, @Transactional for multi-document atomicity +Description: Keep the repository out of REST controllers — route everything through a service. Declare the class `@Transactional(readOnly = true)` and override write methods with `@Transactional`. MongoDB multi-document transactions require a replica set; Spring Data MongoDB supports them transparently when the driver is connected to one. + +**Good example:** + +```java +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import java.util.List; +import java.util.Optional; + +@Service +@Transactional(readOnly = true) +class OrderService { + + private final OrderRepository repository; + + OrderService(OrderRepository repository) { + this.repository = repository; + } + + Optional findByOrderNumber(String orderNumber) { + return repository.findByOrderNumber(orderNumber); + } + + List findByCustomer(String customerId) { + return repository.findByCustomerId(customerId); + } + + @Transactional + OrderDocument create(String orderNumber, String customerId) { + return repository.save(OrderDocument.of(orderNumber, customerId)); + } + + @Transactional + OrderDocument updateCustomer(String orderId, String newCustomerId) { + return repository.findById(orderId) + .map(o -> new OrderDocument(o.id(), o.orderNumber(), newCustomerId, o.createdAt(), o.version())) + .map(repository::save) + .orElseThrow(() -> new IllegalArgumentException("Order not found: " + orderId)); + } +} +``` + +**Bad example:** + +```java +// Bad: no transaction boundary; repository directly in REST controller leaks persistence +@RestController +class OrderController { + @Autowired OrderRepository repository; + + @PostMapping("/orders") + OrderDocument create(String orderNumber) { + return repository.save(new OrderDocument(null, orderNumber, null, null, null)); + } +} +``` + +### Example 5: MongoTemplate for complex queries + +Title: Criteria API — type-safe and injection-free +Description: Use `MongoTemplate` with the `Criteria` and `Query` builder API when query derivation is insufficient (e.g. dynamic optional filters, aggregation pipelines). This keeps query logic readable, testable, and free of string concatenation risks. + +**Good example:** + +```java +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.stereotype.Repository; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; + +@Repository +class OrderCustomRepository { + + private final MongoTemplate mongoTemplate; + + OrderCustomRepository(MongoTemplate mongoTemplate) { + this.mongoTemplate = mongoTemplate; + } + + List search(String customerId, Instant since) { + var criteria = new ArrayList(); + if (customerId != null) { + criteria.add(Criteria.where("customer_id").is(customerId)); + } + if (since != null) { + criteria.add(Criteria.where("created_at").gte(since)); + } + var query = criteria.isEmpty() + ? new Query() + : new Query(new Criteria().andOperator(criteria.toArray(new Criteria[0]))); + return mongoTemplate.find(query, OrderDocument.class); + } +} +``` + +**Bad example:** + +```java +// Bad: building the JSON query document from user input +List search(String customerId) { + String json = "{ customer_id: '" + customerId + "' }"; // injection risk + return mongoTemplate.find(new BasicQuery(json), OrderDocument.class); +} +``` + +### Example 6: Error handling + +Title: DuplicateKeyException, OptimisticLockingFailureException — translate at service boundary +Description: Catch MongoDB-specific Spring Data exceptions (`DuplicateKeyException`, `OptimisticLockingFailureException`) at the service layer and translate them into domain exceptions or meaningful HTTP status codes. Never let raw driver-level exceptions leak to the API. Avoid blanket `catch (Exception e)` which swallows the failure and loses context. + +**Good example:** + +```java +import org.springframework.dao.DuplicateKeyException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +@Service +@Transactional(readOnly = true) +class OrderService { + + private final OrderRepository repository; + + OrderService(OrderRepository repository) { + this.repository = repository; + } + + @Transactional + OrderDocument create(String orderNumber, String customerId) { + try { + return repository.save(OrderDocument.of(orderNumber, customerId)); + } catch (DuplicateKeyException ex) { + throw new IllegalStateException("Order number already exists: " + orderNumber, ex); + } + } + + @Transactional + OrderDocument updateCustomer(String orderId, String newCustomerId) { + try { + return repository.findById(orderId) + .map(o -> new OrderDocument(o.id(), o.orderNumber(), newCustomerId, o.createdAt(), o.version())) + .map(repository::save) + .orElseThrow(() -> new IllegalArgumentException("Order not found: " + orderId)); + } catch (OptimisticLockingFailureException ex) { + throw new IllegalStateException("Concurrent update on order: " + orderId, ex); + } + } +} +``` + +**Bad example:** + +```java +OrderDocument create(String orderNumber, String customerId) { + try { + return repository.save(OrderDocument.of(orderNumber, customerId)); + } catch (Exception e) { + // Bad: generic catch + silent return null hides duplicate-key and network errors + return null; + } +} +``` + + +### Example 7: Testing + +Title: @DataMongoTest slice with Testcontainers for repository behaviour +Description: Use `@DataMongoTest` to spin up only the MongoDB slice — repositories, `MongoTemplate`, and auto-configurations — without the full web layer. Pair it with a `MongoDBContainer` from Testcontainers to run tests against the real dialect. Never mock the repository inside this test — the entire purpose is exercising real MongoDB behaviour. + +**Good example:** + +```java +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.data.mongo.DataMongoTest; +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; +import org.testcontainers.containers.MongoDBContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import java.util.Optional; + +import static org.assertj.core.api.Assertions.assertThat; + +@DataMongoTest +@Testcontainers +class OrderRepositoryIT { + + @Container + static MongoDBContainer mongo = new MongoDBContainer("mongo:7.0"); + + @DynamicPropertySource + static void mongoProps(DynamicPropertyRegistry registry) { + registry.add("spring.data.mongodb.uri", mongo::getReplicaSetUrl); + } + + @Autowired + OrderRepository repository; + + @AfterEach + void cleanup() { repository.deleteAll(); } + + @Test + void save_generatesId() { + OrderDocument saved = repository.save(OrderDocument.of("ORD-001", "CUST-1")); + assertThat(saved.id()).isNotNull(); + } + + @Test + void findByOrderNumber_returnsDocument() { + repository.save(OrderDocument.of("ORD-002", "CUST-2")); + Optional found = repository.findByOrderNumber("ORD-002"); + assertThat(found).isPresent(); + assertThat(found.get().customerId()).isEqualTo("CUST-2"); + } +} +``` + +**Bad example:** + +```java +// Bad: @SpringBootTest loads full context; mocking repository adds no value +@SpringBootTest +class OrderRepositoryTest { + @MockBean OrderRepository repository; + + @Test + void findByOrderNumber() { + when(repository.findByOrderNumber("ORD-001")) + .thenReturn(Optional.of(new OrderDocument("id", "ORD-001", "c", null, null))); + // verifies Mockito wiring only — tests nothing about real MongoDB + } +} +``` + +## Output Format + +- **ANALYZE** MongoDB code: document mapping completeness, index declarations, repository query safety, service transaction placement, error handling specificity, and DTO vs document leakage +- **CATEGORIZE** issues by impact (SECURITY for injection risk, CORRECTNESS for missing @Version or transactions, PERFORMANCE for missing indexes or unbounded queries, MAINTAINABILITY for mutable documents or missing factory methods) +- **APPLY** Spring Data MongoDB–aligned fixes: explicit collection, @Field, @CompoundIndex, @Version, safe Criteria-based queries, service-layer transactions, exception translation +- **IMPLEMENT** changes so document model, indexes, repositories, and tests stay consistent +- **EXPLAIN** trade-offs (MongoRepository derived finders vs MongoTemplate Criteria, embedded Flapdoodle vs Testcontainers, multi-document transactions vs application-level idempotency) +- **TEST** repository behaviour with `@DataMongoTest` + Testcontainers; never mock repositories inside persistence slice tests +- **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes + +## Safeguards + +- **BLOCKING SAFETY CHECK**: Run `./mvnw compile` before ANY MongoDB refactoring — compilation failure is a HARD STOP +- **CRITICAL VALIDATION**: Run `./mvnw clean verify` after changes; confirm repository tests pass before promoting +- **INJECTION SAFETY**: Never concatenate user input into MongoDB query strings or `BasicQuery` objects — use Criteria API or `@Query` with bound parameters +- **ERROR HANDLING**: Catch `DuplicateKeyException` and `OptimisticLockingFailureException` at the service boundary and translate to domain exceptions; never swallow with a generic catch +- **OPTIMISTIC LOCKING**: Adding `@Version` to an existing document requires a planned rollout for existing documents; test stale-update behavior before enabling it in production +- **INDEXES**: Deploy index changes as coordinated background operations on large collections; avoid blocking foreground index builds in production +- **API BOUNDARIES**: Never return managed `@Document` entities directly from REST controllers — map to DTOs to keep API contracts stable and prevent field leakage +- **INCREMENTAL SAFETY**: Change one repository surface or document mapping at a time; verify with `@DataMongoTest` between steps \ No newline at end of file diff --git a/skills/414-frameworks-quarkus-kafka/SKILL.md b/skills/414-frameworks-quarkus-kafka/SKILL.md new file mode 100644 index 00000000..38be6cb9 --- /dev/null +++ b/skills/414-frameworks-quarkus-kafka/SKILL.md @@ -0,0 +1,48 @@ +--- +name: 414-frameworks-quarkus-kafka +description: Use when you need Kafka messaging in Quarkus with SmallRye Reactive Messaging — including channel/topic design, serialization, ack/failure strategies, retries/DLQ, and error handling. This should trigger for requests such as Add Kafka in Quarkus; Review Reactive Messaging consumers; Improve failure handling for Quarkus Kafka. Part of cursor-rules-java project +license: Apache-2.0 +metadata: + author: Juan Antonio Breña Moral + version: 0.15.0-SNAPSHOT +--- +# Quarkus — Kafka messaging + +Apply Quarkus Kafka guidance with concrete examples for design, implementation, and error handling. + +## Constraints + +Compile before messaging refactors; verify after changes. + +- **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change +- **SAFETY**: If compilation fails, stop immediately +- **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements +- **BEFORE APPLYING**: Read the reference for detailed rules and examples + +## When to use this skill + +- Add Kafka in Quarkus +- Review Quarkus Reactive Messaging consumers/producers +- Improve retries, dead-letter handling, or idempotency in Quarkus Kafka + +## Workflow + +1. **Read reference and assess project context** + +Read `references/414-frameworks-quarkus-kafka.md` and inspect current messaging setup before proposing changes. + +2. **Gather scope and decide target improvements** + +Identify delivery semantics and resilience goals to define safe improvements. + +3. **Apply framework-aligned changes** + +Implement/refactor channels, serializers, and failure strategies in Reactive Messaging. + +4. **Run verification and report results** + +Execute build/tests and summarize what changed, what was verified, and follow-up actions. + +## Reference + +For detailed guidance, examples, and constraints, see [references/414-frameworks-quarkus-kafka.md](references/414-frameworks-quarkus-kafka.md). diff --git a/skills/414-frameworks-quarkus-kafka/references/414-frameworks-quarkus-kafka.md b/skills/414-frameworks-quarkus-kafka/references/414-frameworks-quarkus-kafka.md new file mode 100644 index 00000000..35109573 --- /dev/null +++ b/skills/414-frameworks-quarkus-kafka/references/414-frameworks-quarkus-kafka.md @@ -0,0 +1,320 @@ +--- +name: 414-frameworks-quarkus-kafka +description: Use when you need Kafka in Quarkus with SmallRye Reactive Messaging — including Maven extension, channel/topic design, typed @Channel Emitter producers, @Incoming consumers with Uni, failure strategies (dead-letter-queue, retry), idempotency, and Dev Services testing. This should trigger for requests such as Add Kafka in Quarkus; Review Reactive Messaging consumers; Improve failure handling for Quarkus Kafka. +license: Apache-2.0 +metadata: + author: Juan Antonio Breña Moral + version: 0.15.0-SNAPSHOT +--- +# Quarkus — Kafka messaging + +## Role + +You are a Senior software engineer with extensive experience in Quarkus and Apache Kafka + +## Goal + +Design and implement reliable Kafka messaging in Quarkus using SmallRye Reactive Messaging. Keep channel names and topic names decoupled via configuration. Use typed event records. Delegate business logic to CDI services from thin `@Incoming` methods. Configure `failure-strategy=dead-letter-queue` so unrecoverable messages are routed rather than silently acknowledged. + +**What is covered in this Skill?** + +- Maven `quarkus-messaging-kafka` extension dependency +- Channel and topic naming conventions and configuration +- Typed event records with `eventId` for de-duplication +- `@Channel` / `Emitter` typed producer +- `@Incoming` consumer with `Uni` return type +- Manual `Message` acknowledgement for explicit ack/nack control +- `failure-strategy`: `dead-letter-queue`, `fail`, and `ignore`; retry is configured separately with connector retry properties +- Idempotent consumer pattern using `eventId` +- Dev Services for Kafka (zero-config in dev and test) +- Integration testing with `@QuarkusTest` and `@TestProfile` + +**Scope:** Apply recommendations based on the reference rules and good/bad code examples. + +## Constraints + +Before applying any Kafka changes, ensure the project compiles. Compilation failure is a BLOCKING condition. + +- **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change +- **SAFETY**: If compilation fails, stop immediately +- **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements +- **INJECTION**: Never construct topic names or header values from untrusted user input +- **BEFORE APPLYING**: Read the reference for detailed rules and good/bad patterns +- **EDGE CASE**: If the user goal is ambiguous, stop and ask a clarifying question before editing files + +## Examples + +### Table of contents + +- Example 1: Maven extension +- Example 2: Event schema design +- Example 3: Channel and topic configuration +- Example 4: Producer implementation +- Example 5: Consumer implementation +- Example 6: Error handling and dead-letter queue +- Example 7: Idempotent consumer + +### Example 1: Maven extension + +Title: Add quarkus-messaging-kafka; let Quarkus BOM manage the version +Description: Add `quarkus-messaging-kafka` to your `pom.xml`. The Quarkus BOM manages the version — do not pin it. Quarkus Dev Services automatically starts a Kafka broker in dev and test modes when this extension is on the classpath and no `kafka.bootstrap.servers` is configured, so no manual setup is needed for local development. + +**Good example:** + +```xml + + + io.quarkus + quarkus-messaging-kafka + +``` + +**Bad example:** + +```xml + + + io.quarkus + quarkus-messaging-kafka + 3.8.1 + +``` + +### Example 2: Event schema design + +Title: Versioned immutable record with eventId and aggregate key +Description: Define each Kafka event as a Java record with a `eventId` (UUID) for de-duplication, a `schemaVersion` for forward compatibility, and the aggregate's natural key (e.g. `orderId`). The aggregate key should be used as the Kafka message key to keep partition ordering for the same aggregate. Topic names follow `domain.entity.operation.v{N}`. + +**Good example:** + +```java +import java.time.Instant; + +public record OrderCreatedEvent( + String eventId, // UUID; used for consumer-side de-duplication + String schemaVersion, // "v1" + String orderId, // aggregate key → used as Kafka message key + String customerId, + Instant occurredAt +) { + public static OrderCreatedEvent of(String orderId, String customerId) { + return new OrderCreatedEvent( + java.util.UUID.randomUUID().toString(), "v1", + orderId, customerId, Instant.now() + ); + } +} +// topic: orders.created.v1 key: orderId +``` + +**Bad example:** + +```java +// Bad: untyped map, no schema version, no aggregate key +Map event = Map.of("type", "order", "data", "..."); +``` + +### Example 3: Channel and topic configuration + +Title: Map logical channel names to Kafka topics via application.properties +Description: Decouple the channel name used in code from the physical Kafka topic name via configuration. This allows topics to be renamed across environments without code changes. Use stable, versioned topic names (`domain.entity.operation.v{N}`). Configure `value-serializer` / `value-deserializer` explicitly so there are no implicit class-name-based surprises. + +**Good example:** + +```properties +# application.properties +mp.messaging.outgoing.orders-out.connector=smallrye-kafka +mp.messaging.outgoing.orders-out.topic=orders.created.v1 +mp.messaging.outgoing.orders-out.value.serializer=io.quarkus.kafka.client.serialization.ObjectMapperSerializer + +mp.messaging.incoming.orders-in.connector=smallrye-kafka +mp.messaging.incoming.orders-in.topic=orders.created.v1 +mp.messaging.incoming.orders-in.group.id=billing-service-v1 +mp.messaging.incoming.orders-in.value.deserializer=com.example.kafka.OrderCreatedEventDeserializer +``` + +**Bad example:** + +```java +// Bad: hardcoded topic string inside a Java annotation — cannot be overridden per environment +@Incoming("orders.created.v1") +void onMessage(OrderCreatedEvent event) { } +``` + +### Example 4: Producer implementation + +Title: @Channel Emitter with keyed Message for partition ordering +Description: Inject `@Channel("orders-out") Emitter<OrderCreatedEvent>` into the CDI bean that needs to publish. Wrap the payload in a `Message` with an `OutgoingKafkaRecordMetadata` to set the partition key so all events for the same aggregate go to the same partition. + +**Good example:** + +```java +import io.smallrye.reactive.messaging.kafka.api.OutgoingKafkaRecordMetadata; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import org.eclipse.microprofile.reactive.messaging.Channel; +import org.eclipse.microprofile.reactive.messaging.Emitter; +import org.eclipse.microprofile.reactive.messaging.Message; + +@ApplicationScoped +public class OrderEventPublisher { + + @Inject + @Channel("orders-out") + Emitter emitter; + + public void publish(OrderCreatedEvent event) { + emitter.send( + Message.of(event) + .addMetadata(OutgoingKafkaRecordMetadata.builder() + .withKey(event.orderId()) + .build()) + ); + } +} +``` + +**Bad example:** + +```java +@ApplicationScoped +class OrderEventPublisher { + @Inject @Channel("orders-out") + Emitter emitter; // Bad: untyped String emitter; no message key set + + void publish(OrderCreatedEvent event) { + emitter.send(event.toString()); // loses type safety and partition ordering + } +} +``` + +### Example 5: Consumer implementation + +Title: @Incoming with Uni return and thin handler delegating to a service +Description: The `@Incoming` method should be thin: receive the typed event, delegate immediately to an `@ApplicationScoped` service, and return `Uni<Void>` when the processing path is asynchronous. Synchronous `void` methods can still propagate thrown exceptions, but a `Uni`/`CompletionStage` is the safer contract when the service work completes later. + +**Good example:** + +```java +import io.smallrye.mutiny.Uni; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import org.eclipse.microprofile.reactive.messaging.Incoming; + +@ApplicationScoped +public class BillingEventConsumer { + + @Inject + BillingService billingService; + + @Incoming("orders-in") + public Uni onOrderCreated(OrderCreatedEvent event) { + return billingService.createInvoice(event.orderId(), event.customerId()); + } +} +``` + +**Bad example:** + +```java +// Bad: asynchronous work is hidden behind a void method; +// business logic inline instead of delegating to a service +@Incoming("orders-in") +void onOrderCreated(String rawJson) { + // 40 lines of parsing + DB calls + HTTP calls +} +``` + +### Example 6: Error handling and dead-letter queue + +Title: failure-strategy=dead-letter-queue with nack for unrecoverable messages +Description: Configure `failure-strategy=dead-letter-queue` so failed messages are routed to a dedicated DLQ topic instead of stopping the channel or being silently ignored. For transient failures (network, timeout), enable the connector retry options (`retry=true`, bounded attempts, bounded wait) alongside the failure strategy. For deterministic failures (deserialization errors, schema mismatch), use explicit `message.nack(ex)` to route immediately to the DLQ. + +**Good example:** + +```properties +# application.properties +mp.messaging.incoming.orders-in.failure-strategy=dead-letter-queue +mp.messaging.incoming.orders-in.dead-letter-queue.topic=orders.created.dlq +mp.messaging.incoming.orders-in.retry=true +mp.messaging.incoming.orders-in.retry-attempts=3 +mp.messaging.incoming.orders-in.retry-max-wait=10S +mp.messaging.incoming.orders-in.commit-strategy=throttled +``` + +**Bad example:** + +```java +@Incoming("orders-in") +public CompletionStage onOrderCreated(Message message) { + try { + billingService.createInvoice(message.getPayload().orderId(), message.getPayload().customerId()); + return message.ack(); + } catch (Exception e) { + return message.ack(); // Bad: ack on failure — record consumed and lost silently + } +} +``` + +### Example 7: Idempotent consumer + +Title: De-duplicate on eventId to survive at-least-once redelivery +Description: Kafka guarantees at-least-once delivery, so consumers can receive duplicate messages on broker retries, consumer group rebalances, or DLQ re-processing. Guard each consumer method with an `eventId` check against a de-duplication store. In production, back the store with a database or Redis; in tests, an in-memory `ConcurrentHashMap` is sufficient. + +**Good example:** + +```java +import io.smallrye.mutiny.Uni; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import org.eclipse.microprofile.reactive.messaging.Incoming; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +@ApplicationScoped +public class BillingEventConsumer { + + private final Set processedIds = ConcurrentHashMap.newKeySet(); + + @Inject BillingService billingService; + + @Incoming("orders-in") + public Uni onOrderCreated(OrderCreatedEvent event) { + if (!processedIds.add(event.eventId())) { + return Uni.createFrom().voidItem(); // duplicate — skip + } + return billingService.createInvoice(event.orderId(), event.customerId()); + } +} +``` + +**Bad example:** + +```java +@Incoming("orders-in") +public Uni onOrderCreated(OrderCreatedEvent event) { + // Bad: no idempotency guard — rebalance or DLQ re-play doubles billing + return billingService.createInvoice(event.orderId(), event.customerId()); +} +``` + +## Output Format + +- **ANALYZE** Kafka code: event schema versioning, channel/topic binding, producer key strategy, consumer return type, failure strategy configuration, and idempotency guards +- **CATEGORIZE** issues by impact (RELIABILITY for missing DLQ or failure strategy, CORRECTNESS for missing idempotency, MAINTAINABILITY for untyped emitters or inline business logic, SECURITY for user-controlled topic names) +- **APPLY** SmallRye Reactive Messaging–aligned fixes: type the emitters and channels, key messages on the aggregate id, configure failure-strategy, add eventId de-duplication +- **IMPLEMENT** changes so channel config, serializers, consumer methods, and tests stay consistent +- **EXPLAIN** trade-offs (void vs Uni return, dead-letter-queue vs retry vs ignore, in-process Dev Services vs Testcontainers) +- **TEST** with `@QuarkusTest` and Dev Services; use Testcontainers Kafka for full-stack acceptance tests +- **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes + +## Safeguards + +- **BLOCKING SAFETY CHECK**: Run `./mvnw compile` before ANY Kafka refactoring — compilation failure is a HARD STOP +- **CRITICAL VALIDATION**: Run `./mvnw clean verify` after changes; exercise consumer integration tests before promoting +- **INJECTION SAFETY**: Never build topic names or Kafka header values from untrusted user input +- **ERROR HANDLING**: Propagate failures from `@Incoming` methods; use `Uni`/`CompletionStage` for asynchronous processing so failures happen inside the messaging contract +- **IDEMPOTENCY**: Always de-duplicate on `eventId` — at-least-once delivery means consumers must tolerate receiving the same message more than once +- **DLQ MONITORING**: Set up alerting on the DLQ topic — messages landing there indicate systematic processing failures +- **CDI SELF-INVOCATION**: Never call an `@Incoming`-annotated method directly via `this` — always inject through the CDI proxy +- **INCREMENTAL SAFETY**: Change one producer or consumer surface at a time; verify with `@QuarkusTest` between steps \ No newline at end of file diff --git a/skills/415-frameworks-quarkus-mongodb/SKILL.md b/skills/415-frameworks-quarkus-mongodb/SKILL.md new file mode 100644 index 00000000..e0ed8977 --- /dev/null +++ b/skills/415-frameworks-quarkus-mongodb/SKILL.md @@ -0,0 +1,48 @@ +--- +name: 415-frameworks-quarkus-mongodb +description: Use when you need MongoDB persistence in Quarkus — including Panache Mongo entities/repositories, document design, indexes, transactions where applicable, and error handling. This should trigger for requests such as Add MongoDB in Quarkus; Review Quarkus Mongo Panache design; Improve Mongo error handling in Quarkus services. Part of cursor-rules-java project +license: Apache-2.0 +metadata: + author: Juan Antonio Breña Moral + version: 0.15.0-SNAPSHOT +--- +# Quarkus — MongoDB + +Apply Quarkus MongoDB guidance with concrete examples for design, implementation, and error handling. + +## Constraints + +Compile before MongoDB refactors; verify after changes. + +- **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change +- **SAFETY**: If compilation fails, stop immediately +- **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements +- **BEFORE APPLYING**: Read the reference for detailed rules and examples + +## When to use this skill + +- Add MongoDB in Quarkus +- Review Quarkus Mongo Panache entities/repositories +- Improve duplicate key handling, retry policy, or optimistic locking in Quarkus Mongo + +## Workflow + +1. **Read reference and assess project context** + +Read `references/415-frameworks-quarkus-mongodb.md` and inspect persistence setup before proposing changes. + +2. **Gather scope and decide target improvements** + +Identify model/query consistency needs and define safe improvements. + +3. **Apply framework-aligned changes** + +Implement/refactor Panache Mongo mappings, repository access, and failure handling. + +4. **Run verification and report results** + +Execute build/tests and summarize what changed, what was verified, and follow-up actions. + +## Reference + +For detailed guidance, examples, and constraints, see [references/415-frameworks-quarkus-mongodb.md](references/415-frameworks-quarkus-mongodb.md). diff --git a/skills/415-frameworks-quarkus-mongodb/references/415-frameworks-quarkus-mongodb.md b/skills/415-frameworks-quarkus-mongodb/references/415-frameworks-quarkus-mongodb.md new file mode 100644 index 00000000..9bf8c572 --- /dev/null +++ b/skills/415-frameworks-quarkus-mongodb/references/415-frameworks-quarkus-mongodb.md @@ -0,0 +1,350 @@ +--- +name: 415-frameworks-quarkus-mongodb +description: Use when you need MongoDB in Quarkus with MongoDB Panache — including Maven extension, entity/repository design with @MongoEntity, PanacheMongoEntity active record vs PanacheMongoRepository, parameterized queries, service-layer persistence boundaries, optional transaction support where infrastructure allows it, and explicit error handling for duplicate key and transient failures. This should trigger for requests such as Add MongoDB in Quarkus; Review Quarkus Mongo Panache entities; Improve Mongo error handling in Quarkus services. +license: Apache-2.0 +metadata: + author: Juan Antonio Breña Moral + version: 0.15.0-SNAPSHOT +--- +# Quarkus — MongoDB + +## Role + +You are a Senior software engineer with extensive experience in Quarkus and MongoDB + +## Goal + +Design and implement MongoDB persistence in Quarkus using MongoDB Panache. Choose the **repository pattern** (`PanacheMongoRepository`) for layered architectures and testability; use the **active record** pattern (`PanacheMongoEntity`) only for small, isolated entities. Keep business logic in `@ApplicationScoped` services, not in entities or REST resources. Prefer single-document atomic updates and explicit idempotency; only rely on multi-document MongoDB transactions when the infrastructure is configured for them. Guard all Panache query parameters — never concatenate user input into query strings. + +**What is covered in this Skill?** + +- Maven `quarkus-mongodb-panache` extension dependency +- `@MongoEntity` document design with `@BsonProperty` and index strategy +- Active record (`PanacheMongoEntity`) for simple CRUD +- Repository (`PanacheMongoRepository`) for separation of concerns +- Parameterized `find` using field shorthand, positional parameters, and `Document` filters +- `@ApplicationScoped` service boundaries with explicit single-document atomicity and optional transaction support where infrastructure allows it +- Error handling: `MongoWriteException` category checks (`DUPLICATE_KEY`), `MongoTimeoutException`, exception chaining +- DTO projections — never leak internal entities at API boundaries +- Dev Services for MongoDB (zero-config in dev/test) +- Testing with `@QuarkusTest`, Dev Services, and explicit cleanup between tests + +**Scope:** Apply recommendations based on the reference rules and good/bad code examples. + +## Constraints + +Before applying any MongoDB changes, ensure the project compiles. Compilation failure is a BLOCKING condition. + +- **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change +- **SAFETY**: If compilation fails, stop immediately +- **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements +- **INJECTION**: Never build Mongo query strings by concatenating user input — always use bound parameters or the Document filter API +- **BEFORE APPLYING**: Read the reference for detailed rules and good/bad patterns +- **EDGE CASE**: If the user goal is ambiguous, stop and ask a clarifying question before editing files + +## Examples + +### Table of contents + +- Example 1: Maven extension +- Example 2: Entity design +- Example 3: Repository pattern +- Example 4: Service layer +- Example 5: Error handling +- Example 6: Testing + +### Example 1: Maven extension + +Title: Add quarkus-mongodb-panache; Quarkus BOM manages the version +Description: Add `quarkus-mongodb-panache` to your `pom.xml`. The Quarkus BOM manages the version. Quarkus Dev Services automatically starts a MongoDB instance in dev and test modes when no `quarkus.mongodb.connection-string` is configured, so no manual setup is needed for local development. + +**Good example:** + +```xml + + + io.quarkus + quarkus-mongodb-panache + +``` + +**Bad example:** + +```xml + + + io.quarkus + quarkus-mongodb-panache + 3.8.1 + +``` + +### Example 2: Entity design + +Title: @MongoEntity with @BsonProperty for explicit field mapping +Description: Annotate the entity class with `@MongoEntity` and specify the collection name explicitly. Use `@BsonProperty` to map Java field names to MongoDB document field names when they differ. Keep domain-significant business identifiers (like `orderNumber`) separate from the MongoDB `id`. Use a `Instant createdAt` timestamp for auditability. + +**Good example:** + +```java +import io.quarkus.mongodb.panache.common.MongoEntity; +import org.bson.codecs.pojo.annotations.BsonProperty; +import org.bson.types.ObjectId; +import java.time.Instant; + +@MongoEntity(collection = "orders") +public class OrderDocument { + public ObjectId id; + + @BsonProperty("order_number") + public String orderNumber; + + @BsonProperty("customer_id") + public String customerId; + + @BsonProperty("created_at") + public Instant createdAt; +} +``` + +**Bad example:** + +```java +// Bad: no @MongoEntity → collection name derived from class name case-sensitively; +// no @BsonProperty → Java field names stored as-is in Mongo (camelCase vs snake_case mismatch) +public class OrderDocument extends io.quarkus.mongodb.panache.PanacheMongoEntity { + public Object payload; // untyped — no schema +} +``` + +### Example 3: Repository pattern + +Title: PanacheMongoRepository with typed, parameterized queries +Description: Implement `PanacheMongoRepository<OrderDocument>` in an `@ApplicationScoped` bean. Use Panache's shorthand field equality (`find("field", value)`) for simple lookups or `Document` filter objects for composite criteria. Never concatenate user input into a query string — always pass values as bound parameters. + +**Good example:** + +```java +import io.quarkus.mongodb.panache.PanacheMongoRepository; +import jakarta.enterprise.context.ApplicationScoped; +import org.bson.Document; +import java.time.Instant; +import java.util.List; +import java.util.Optional; + +@ApplicationScoped +public class OrderRepository implements PanacheMongoRepository { + + // Safe: field shorthand — Panache builds the filter internally + public Optional findByOrderNumber(String orderNumber) { + return find("orderNumber", orderNumber).firstResultOptional(); + } + + // Safe: Document filter with bound parameters — no string concatenation + public List findByCustomerSince(String customerId, Instant since) { + return find(new Document("customer_id", customerId) + .append("created_at", new Document("$gte", since))) + .list(); + } +} +``` + +**Bad example:** + +```java +@ApplicationScoped +public class OrderRepository implements PanacheMongoRepository { + // Bad: concatenating user input into a query string — NoSQL injection risk + public List findByCustomer(String customerId) { + return find("{ customer_id: '" + customerId + "' }").list(); + } +} +``` + +### Example 4: Service layer + +Title: @ApplicationScoped service with explicit persistence boundaries +Description: Wrap business use cases in `@ApplicationScoped` services. Keep REST resources or CDI event handlers thin — they delegate to the service, not to the repository directly. Constructor-inject the repository to maintain testability. Use single-document atomic updates by default; if a use case truly needs multi-document atomicity, verify MongoDB transaction support and configure it deliberately. + +**Good example:** + +```java +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import java.time.Instant; +import java.util.Optional; + +@ApplicationScoped +public class OrderService { + + private final OrderRepository repository; + + @Inject + public OrderService(OrderRepository repository) { + this.repository = repository; + } + + public OrderDocument create(String orderNumber, String customerId) { + var doc = new OrderDocument(); + doc.orderNumber = orderNumber; + doc.customerId = customerId; + doc.createdAt = Instant.now(); + repository.persist(doc); + return doc; + } + + public Optional findByOrderNumber(String orderNumber) { + return repository.findByOrderNumber(orderNumber); + } +} +``` + +**Bad example:** + +```java +// Bad: business logic scattered inside a REST resource; no service layer +@Path("/orders") +class OrderResource { + @Inject OrderRepository repository; + + @POST + void create(String body) { + var doc = new OrderDocument(); + doc.orderNumber = body; // no validation, no mapping + repository.persist(doc); + } +} +``` + +### Example 5: Error handling + +Title: MongoWriteException category check; exception chaining; no generic catch +Description: Catch MongoDB driver-level exceptions by category rather than by message string. `MongoWriteException` carries an `ErrorCategory` — check for `DUPLICATE_KEY` to translate it into a domain constraint violation. Catch `MongoTimeoutException` separately for transient connectivity failures and consider retry logic there. Always chain the original exception to preserve the stack trace. Never swallow with a generic `catch (Exception e)` that returns `null` or logs and continues. + +**Good example:** + +```java +import com.mongodb.ErrorCategory; +import com.mongodb.MongoTimeoutException; +import com.mongodb.MongoWriteException; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; + +@ApplicationScoped +public class OrderService { + + @Inject OrderRepository repository; + + public OrderDocument create(String orderNumber, String customerId) { + try { + var doc = new OrderDocument(); + doc.orderNumber = orderNumber; + doc.customerId = customerId; + doc.createdAt = java.time.Instant.now(); + repository.persist(doc); + return doc; + } catch (MongoWriteException ex) { + if (ex.getError().getCategory() == ErrorCategory.DUPLICATE_KEY) { + throw new IllegalStateException("Order number already exists: " + orderNumber, ex); + } + throw ex; + } catch (MongoTimeoutException ex) { + throw new IllegalStateException("Transient MongoDB timeout — consider retry", ex); + } + } +} +``` + +**Bad example:** + +```java +public OrderDocument create(String orderNumber, String customerId) { + try { + repository.persist(doc); + return doc; + } catch (Exception e) { + // Bad: generic catch swallows duplicate-key, timeout, schema errors + return null; // corrupts control flow for the caller + } +} +``` + + +### Example 6: Testing + +Title: @QuarkusTest + Dev Services for repository integration; clean collections between tests +Description: Annotate the test class with `@QuarkusTest`. Quarkus Dev Services starts a MongoDB container automatically. Clean the repository before or after each test to keep test data isolated. Do not use JPA-style rollback assumptions for MongoDB persistence tests. Never mock `PanacheMongoRepository` inside a persistence test — the goal is exercising real MongoDB behaviour. + +**Good example:** + +```java +import io.quarkus.test.junit.QuarkusTest; +import jakarta.inject.Inject; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import java.util.Optional; + +import static org.assertj.core.api.Assertions.assertThat; + +@QuarkusTest +class OrderRepositoryTest { + + @Inject + OrderRepository repository; + + @AfterEach + void cleanup() { + repository.deleteAll(); + } + + @Test + void persist_and_findByOrderNumber() { + OrderDocument doc = new OrderDocument(); + doc.orderNumber = "ORD-001"; + doc.customerId = "CUST-1"; + doc.createdAt = java.time.Instant.now(); + repository.persist(doc); + + Optional found = repository.findByOrderNumber("ORD-001"); + assertThat(found).isPresent(); + assertThat(found.get().customerId).isEqualTo("CUST-1"); + } +} +``` + +**Bad example:** + +```java +// Bad: mocking PanacheMongoRepository proves nothing about real Mongo behavior +@QuarkusTest +class OrderRepositoryTest { + @InjectMock OrderRepository repository; + + @Test + void find() { + when(repository.findByOrderNumber("ORD-001")).thenReturn(Optional.empty()); + assertThat(repository.findByOrderNumber("ORD-001")).isEmpty(); + // only tests Mockito wiring + } +} +``` + +## Output Format + +- **ANALYZE** MongoDB code: entity mapping correctness, query parameter safety, service persistence boundaries, error handling specificity, and DTO vs entity leakage +- **CATEGORIZE** issues by impact (SECURITY for query injection, CORRECTNESS for non-atomic multi-document assumptions or generic exception handling, PERFORMANCE for missing indexes, MAINTAINABILITY for active-record overuse or entity leakage at API boundaries) +- **APPLY** Quarkus MongoDB–aligned fixes: explicit `@MongoEntity` collection, `@BsonProperty` mappings, parameterized `find()` calls, clear service-layer persistence boundaries, and typed exception translation +- **IMPLEMENT** changes so entity design, repositories, services, and tests stay consistent; create index creation scripts when adding `@CompoundIndex`-equivalent needs +- **EXPLAIN** trade-offs (active record vs repository, Dev Services vs Testcontainers, multi-document transactions vs single-document atomicity and application-level idempotency) +- **TEST** persistence behaviour with `@QuarkusTest` using Dev Services and explicit cleanup; never mock repositories inside persistence tests +- **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes + +## Safeguards + +- **BLOCKING SAFETY CHECK**: Run `./mvnw compile` before ANY MongoDB refactoring — compilation failure is a HARD STOP +- **CRITICAL VALIDATION**: Run `./mvnw clean verify` after changes; confirm persistence tests pass with Dev Services before promoting +- **INJECTION SAFETY**: Never concatenate user input into Panache query strings or MongoDB JSON filters — use bound parameters or the `Document` builder API +- **ERROR HANDLING**: Catch `MongoWriteException` by `ErrorCategory`; never use a generic `catch (Exception e)` that swallows failures or returns `null` +- **API BOUNDARIES**: Do not return Panache entity instances directly from REST resources — map to DTOs to keep contracts stable and prevent internal field leakage +- **TRANSACTIONS**: Multi-document MongoDB transactions require replica set/session support — verify infrastructure compatibility before relying on them; prefer single-document atomic updates when possible +- **INCREMENTAL SAFETY**: Change one entity, repository, or service surface at a time; verify with `@QuarkusTest` and isolated test data between steps \ No newline at end of file diff --git a/skills/514-frameworks-micronaut-kafka/SKILL.md b/skills/514-frameworks-micronaut-kafka/SKILL.md new file mode 100644 index 00000000..e15b4c5f --- /dev/null +++ b/skills/514-frameworks-micronaut-kafka/SKILL.md @@ -0,0 +1,48 @@ +--- +name: 514-frameworks-micronaut-kafka +description: Use when you need Kafka messaging in Micronaut — including @KafkaClient and @KafkaListener design, topic/partition strategy, serialization, retries and dead-letter processing, and error handling. This should trigger for requests such as Add Kafka in Micronaut; Review Micronaut Kafka listeners; Improve retry and failure handling for Micronaut Kafka. Part of cursor-rules-java project +license: Apache-2.0 +metadata: + author: Juan Antonio Breña Moral + version: 0.15.0-SNAPSHOT +--- +# Micronaut — Kafka messaging + +Apply Micronaut Kafka guidance with concrete examples for design, implementation, and error handling. + +## Constraints + +Compile before messaging refactors; verify after changes. + +- **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change +- **SAFETY**: If compilation fails, stop immediately +- **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements +- **BEFORE APPLYING**: Read the reference for detailed rules and examples + +## When to use this skill + +- Add Kafka in Micronaut +- Review Micronaut Kafka consumers/producers +- Improve retries, dead-letter handling, or idempotency in Micronaut Kafka + +## Workflow + +1. **Read reference and assess project context** + +Read `references/514-frameworks-micronaut-kafka.md` and inspect current messaging setup before proposing changes. + +2. **Gather scope and decide target improvements** + +Identify delivery guarantees and resilience requirements to define safe improvements. + +3. **Apply framework-aligned changes** + +Implement/refactor clients, listeners, and failure strategies in Micronaut Kafka. + +4. **Run verification and report results** + +Execute build/tests and summarize what changed, what was verified, and follow-up actions. + +## Reference + +For detailed guidance, examples, and constraints, see [references/514-frameworks-micronaut-kafka.md](references/514-frameworks-micronaut-kafka.md). diff --git a/skills/514-frameworks-micronaut-kafka/references/514-frameworks-micronaut-kafka.md b/skills/514-frameworks-micronaut-kafka/references/514-frameworks-micronaut-kafka.md new file mode 100644 index 00000000..e53cab72 --- /dev/null +++ b/skills/514-frameworks-micronaut-kafka/references/514-frameworks-micronaut-kafka.md @@ -0,0 +1,362 @@ +--- +name: 514-frameworks-micronaut-kafka +description: Use when you need Kafka in Micronaut — including Maven dependency, consumer group and topic strategy, @KafkaClient typed producers, @KafkaListener consumers with OffsetStrategy, ErrorStrategyValue for retry behaviour, dead-letter routing, idempotency, and testing with @MicronautTest and Testcontainers. This should trigger for requests such as Add Kafka in Micronaut; Review Micronaut Kafka listeners; Improve retry and failure handling for Micronaut Kafka. +license: Apache-2.0 +metadata: + author: Juan Antonio Breña Moral + version: 0.15.0-SNAPSHOT +--- +# Micronaut — Kafka messaging + +## Role + +You are a Senior software engineer with extensive experience in Micronaut and Apache Kafka + +## Goal + +Design and implement reliable Kafka messaging in Micronaut using the `micronaut-kafka` integration. Declare producers as `@KafkaClient` interfaces and consumers as `@KafkaListener` classes. Keep listener methods thin — delegate to `@Singleton` services. Configure `errorStrategy` so processing failures trigger retries or dead-letter routing instead of being silently consumed. + +**What is covered in this Skill?** + +- Maven `micronaut-kafka` dependency +- Consumer group and topic naming conventions (`application.yml`) +- Typed event records with `eventId` for consumer de-duplication +- `@KafkaClient` interface-based typed producer with `@KafkaKey` +- `@KafkaListener` consumer with `@Topic`, `@OffsetStrategy`, and typed payload +- `errorStrategy` with `ErrorStrategyValue.RETRY_ON_ERROR`, `RETRY_EXPONENTIALLY_ON_ERROR`, and `RESUME_AT_NEXT_RECORD` — trade-offs +- Dead-letter routing via a producer called from an exception handler +- Idempotent consumer pattern using `eventId` +- Testing with `@MicronautTest` and an embedded Kafka via `@EmbeddedServer` or Testcontainers + +**Scope:** Apply recommendations based on the reference rules and good/bad code examples. + +## Constraints + +Before applying any Kafka changes, ensure the project compiles. Compilation failure is a BLOCKING condition. + +- **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change +- **SAFETY**: If compilation fails, stop immediately +- **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements +- **INJECTION**: Never construct topic names or Kafka header values from untrusted user input +- **BEFORE APPLYING**: Read the reference for detailed rules and good/bad patterns +- **EDGE CASE**: If the user goal is ambiguous, stop and ask a clarifying question before editing files + +## Examples + +### Table of contents + +- Example 1: Maven dependency +- Example 2: Event schema design +- Example 3: Kafka configuration +- Example 4: Producer implementation +- Example 5: Consumer implementation +- Example 6: Error handling +- Example 7: Idempotent consumer + +### Example 1: Maven dependency + +Title: Add micronaut-kafka; Micronaut BOM manages the version +Description: Add `micronaut-kafka` to `pom.xml`. The Micronaut BOM manages its version. Avoid pinning the version explicitly — it can create classpath conflicts with the auto-configured Micronaut Kafka factory beans. For test isolation with an embedded broker, add `org.testcontainers:kafka` in `test` scope. + +**Good example:** + +```xml + + + io.micronaut.kafka + micronaut-kafka + + + + + org.testcontainers + kafka + test + +``` + +**Bad example:** + +```xml + + + io.micronaut.kafka + micronaut-kafka + 5.3.0 + +``` + +### Example 2: Event schema design + +Title: Versioned record with eventId for de-duplication and aggregate key +Description: Define each Kafka event as a Java record with a `eventId` (UUID) for consumer-side de-duplication and an `orderId` (or equivalent aggregate key) that will be set as the Kafka message key. Include a `schemaVersion` string so consumers can handle multiple versions gracefully. Use the topic naming pattern `domain.entity.operation.v{N}`. + +**Good example:** + +```java +import java.time.Instant; + +public record OrderCreatedEvent( + String eventId, // UUID; used for consumer-side de-duplication + String schemaVersion, // "v1" + String orderId, // aggregate key → used as @KafkaKey + String customerId, + Instant occurredAt +) { + public static OrderCreatedEvent of(String orderId, String customerId) { + return new OrderCreatedEvent( + java.util.UUID.randomUUID().toString(), "v1", + orderId, customerId, Instant.now() + ); + } +} +// topic: orders.created.v1 key: orderId +``` + +**Bad example:** + +```java +// Bad: untyped string payload, no schema version, no stable message key +String event = "{ type: orderCreated }"; +``` + +### Example 3: Kafka configuration + +Title: Bootstrap server, consumer group, and serializer in application.yml +Description: Configure the Kafka broker, default serializers, and consumer group in `application.yml`. Use a unique, versioned `group-id` per consumer (e.g. `billing-service-v1`) so different service instances share workload without accidentally consuming events intended for another service. + +**Good example:** + +```yaml +kafka: + bootstrap: + servers: ${KAFKA_BOOTSTRAP_SERVERS:`localhost:9092`} + consumers: + billing-service-v1: + group: + id: billing-service-v1 + key: + deserializer: org.apache.kafka.common.serialization.StringDeserializer + value: + deserializer: io.micronaut.kafka.serde.JsonObjectSerde +``` + +**Bad example:** + +```yaml +# Bad: shared consumer group between unrelated services +kafka: + consumers: + default: + group: + id: shared-group +``` + +### Example 4: Producer implementation + +Title: @KafkaClient interface with @KafkaKey for partition ordering +Description: Declare the producer as a `@KafkaClient` interface — Micronaut generates the implementation at compile time. Annotate the key parameter with `@KafkaKey` so the Kafka record key is set explicitly, ensuring all events for the same aggregate land in the same partition and are consumed in order. + +**Good example:** + +```java +import io.micronaut.configuration.kafka.annotation.KafkaClient; +import io.micronaut.configuration.kafka.annotation.KafkaKey; +import io.micronaut.configuration.kafka.annotation.Topic; + +@KafkaClient +public interface OrderEventsClient { + + @Topic("orders.created.v1") + void send(@KafkaKey String orderId, OrderCreatedEvent event); +} +``` + +**Bad example:** + +```java +@KafkaClient +interface OrderEventsClient { + @Topic("orders.created.v1") + void send(OrderCreatedEvent event); // Bad: no @KafkaKey → null key → no partition ordering +} +``` + +### Example 5: Consumer implementation + +Title: @KafkaListener with typed payload; thin method delegating to a service +Description: Annotate the class with `@KafkaListener` and specify the `groupId`. Accept the typed event record as the parameter. Inject and delegate immediately to an application service — keep the listener method as thin as a dispatcher. Use `@OffsetStrategy(SYNC)` to commit offsets synchronously after successful processing, reducing duplicate delivery on restart. + +**Good example:** + +```java +import io.micronaut.configuration.kafka.annotation.KafkaKey; +import io.micronaut.configuration.kafka.annotation.KafkaListener; +import io.micronaut.configuration.kafka.annotation.OffsetStrategy; +import io.micronaut.configuration.kafka.annotation.Topic; +import jakarta.inject.Inject; + +@KafkaListener(groupId = "billing-service-v1", offsetStrategy = OffsetStrategy.SYNC) +public class BillingEventListener { + + @Inject + BillingService billingService; + + @Topic("orders.created.v1") + void onOrderCreated(@KafkaKey String orderId, OrderCreatedEvent event) { + billingService.createInvoice(event.orderId(), event.customerId()); + } +} +``` + +**Bad example:** + +```java +@KafkaListener +class RawListener { + @Topic("orders.created.v1") + void receive(String rawJson) { + // Bad: untyped payload, no groupId (random group on each restart), + // full business logic inlined + } +} +``` + +### Example 6: Error handling + +Title: errorStrategy RETRY_ON_ERROR with dead-letter routing for unrecoverable failures +Description: Configure `errorStrategy` with `ErrorStrategyValue` so transient failures (network timeouts, temporary DB unavailability) trigger automatic retries. Prefer a bounded strategy such as `RETRY_EXPONENTIALLY_ON_ERROR` with `retryCount` instead of unbounded retry loops. For unrecoverable failures (deserialization errors, permanent business constraint violations) implement a `KafkaListenerExceptionHandler` that publishes the original record to a dead-letter topic and resumes according to the configured strategy. Never swallow exceptions with an empty `catch` block. + +**Good example:** + +```java +import io.micronaut.configuration.kafka.annotation.ErrorStrategy; +import io.micronaut.configuration.kafka.annotation.ErrorStrategyValue; +import io.micronaut.configuration.kafka.annotation.KafkaClient; +import io.micronaut.configuration.kafka.annotation.KafkaKey; +import io.micronaut.configuration.kafka.annotation.KafkaListener; +import io.micronaut.configuration.kafka.annotation.OffsetStrategy; +import io.micronaut.configuration.kafka.annotation.Topic; +import io.micronaut.configuration.kafka.exceptions.KafkaListenerException; +import io.micronaut.configuration.kafka.exceptions.KafkaListenerExceptionHandler; +import jakarta.inject.Inject; + +@KafkaListener( + groupId = "billing-service-v1", + offsetStrategy = OffsetStrategy.SYNC, + errorStrategy = @ErrorStrategy( + value = ErrorStrategyValue.RETRY_EXPONENTIALLY_ON_ERROR, + retryCount = 3 + ) +) +public class BillingEventListener implements KafkaListenerExceptionHandler { + + @Inject BillingService billingService; + @Inject OrderEventsDeadLetterClient dlqClient; + + @Topic("orders.created.v1") + void onOrderCreated(@KafkaKey String orderId, OrderCreatedEvent event) { + billingService.createInvoice(event.orderId(), event.customerId()); + } + + @Override + public void handle(KafkaListenerException ex) { + // After retries exhausted: route to DLQ and resume + ex.getKafkaRecord().ifPresent(record -> + dlqClient.sendToDlq(record.key().toString(), (OrderCreatedEvent) record.value())); + } +} + +@KafkaClient +interface OrderEventsDeadLetterClient { + @Topic("orders.created.dlq") + void sendToDlq(@KafkaKey String key, OrderCreatedEvent event); +} +``` + +**Bad example:** + +```java +@Topic("orders.created.v1") +void onOrderCreated(OrderCreatedEvent event) { + try { + billingService.createInvoice(event.orderId(), event.customerId()); + } catch (Exception ignored) { + // Bad: offset committed, message consumed, error never visible + } +} +``` + + +### Example 7: Idempotent consumer + +Title: De-duplicate on eventId to survive at-least-once redelivery +Description: Kafka at-least-once delivery means a consumer can receive the same message more than once — on retries, after a group rebalance, or from DLQ re-processing. Guard processing with an `eventId` check before calling business logic. In production, back the de-dup store with a database or Redis. In tests, an in-memory `ConcurrentHashMap` is sufficient. + +**Good example:** + +```java +import io.micronaut.configuration.kafka.annotation.ErrorStrategy; +import io.micronaut.configuration.kafka.annotation.ErrorStrategyValue; +import io.micronaut.configuration.kafka.annotation.KafkaKey; +import io.micronaut.configuration.kafka.annotation.KafkaListener; +import io.micronaut.configuration.kafka.annotation.OffsetStrategy; +import io.micronaut.configuration.kafka.annotation.Topic; +import jakarta.inject.Inject; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +@KafkaListener( + groupId = "billing-service-v1", + offsetStrategy = OffsetStrategy.SYNC, + errorStrategy = @ErrorStrategy( + value = ErrorStrategyValue.RETRY_EXPONENTIALLY_ON_ERROR, + retryCount = 3 + ) +) +public class IdempotentBillingListener { + + private final Set processedIds = ConcurrentHashMap.newKeySet(); + + @Inject BillingService billingService; + + @Topic("orders.created.v1") + void onOrderCreated(@KafkaKey String orderId, OrderCreatedEvent event) { + if (!processedIds.add(event.eventId())) { + return; // duplicate — skip without error + } + billingService.createInvoice(event.orderId(), event.customerId()); + } +} +``` + +**Bad example:** + +```java +@Topic("orders.created.v1") +void onOrderCreated(OrderCreatedEvent event) { + // Bad: no de-duplication — rebalance or retry causes double billing + billingService.createInvoice(event.orderId(), event.customerId()); +} +``` + +## Output Format + +- **ANALYZE** Kafka code: event schema versioning, consumer group isolation, producer key strategy, listener return type and offset strategy, error handling configuration, and idempotency guards +- **CATEGORIZE** issues by impact (RELIABILITY for missing retries/DLQ, CORRECTNESS for missing idempotency or wrong offset strategy, MAINTAINABILITY for untyped payloads or inline business logic, SECURITY for user-controlled topic names) +- **APPLY** Micronaut Kafka–aligned fixes: typed `@KafkaClient`, `@KafkaKey` producers, versioned `groupId`, `SYNC` offset strategy, bounded `ErrorStrategyValue` retry configuration + DLQ exception handler, eventId de-duplication +- **IMPLEMENT** changes so `application.yml` bindings, producer interfaces, listener classes, and tests stay consistent +- **EXPLAIN** trade-offs (SYNC vs ASYNC offset strategy, retry strategies vs `RESUME_AT_NEXT_RECORD`, in-memory vs persistent de-dup store) +- **TEST** with `@MicronautTest` + Testcontainers Kafka; never mock `@KafkaClient` interfaces inside integration tests meant to verify messaging behaviour +- **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes + +## Safeguards + +- **BLOCKING SAFETY CHECK**: Run `./mvnw compile` before ANY Kafka refactoring — compilation failure is a HARD STOP +- **CRITICAL VALIDATION**: Run `./mvnw clean verify` after changes; exercise listener integration tests before promoting +- **INJECTION SAFETY**: Never construct topic names or Kafka header values from untrusted user input +- **ERROR HANDLING**: Never swallow exceptions inside `@KafkaListener` methods — propagate so the configured `errorStrategy` can retry or route to DLQ +- **IDEMPOTENCY**: Always de-duplicate on `eventId` — Kafka's at-least-once guarantee means consumers must tolerate receiving the same message more than once +- **GROUP ID**: Use a unique, versioned `groupId` per consumer class — a shared `groupId` across unrelated services causes accidental competitive consumption +- **DLQ MONITORING**: Set up alerting on the DLQ topic — messages landing there indicate systematic processing failures +- **INCREMENTAL SAFETY**: Change one producer or consumer surface at a time; verify with `@MicronautTest` between steps \ No newline at end of file diff --git a/skills/515-frameworks-micronaut-mongodb/SKILL.md b/skills/515-frameworks-micronaut-mongodb/SKILL.md new file mode 100644 index 00000000..f7be36a3 --- /dev/null +++ b/skills/515-frameworks-micronaut-mongodb/SKILL.md @@ -0,0 +1,48 @@ +--- +name: 515-frameworks-micronaut-mongodb +description: Use when you need MongoDB persistence in Micronaut — including @MongoRepository design, document modeling, indexes, query patterns, and error handling. This should trigger for requests such as Add MongoDB in Micronaut; Review Micronaut Data Mongo design; Improve error handling for Micronaut Mongo operations. Part of cursor-rules-java project +license: Apache-2.0 +metadata: + author: Juan Antonio Breña Moral + version: 0.15.0-SNAPSHOT +--- +# Micronaut — MongoDB + +Apply Micronaut MongoDB guidance with concrete examples for design, implementation, and error handling. + +## Constraints + +Compile before MongoDB refactors; verify after changes. + +- **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change +- **SAFETY**: If compilation fails, stop immediately +- **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements +- **BEFORE APPLYING**: Read the reference for detailed rules and examples + +## When to use this skill + +- Add MongoDB in Micronaut +- Review Micronaut Mongo entities/repositories +- Improve duplicate key handling, retries, or optimistic locking in Micronaut Mongo + +## Workflow + +1. **Read reference and assess project context** + +Read `references/515-frameworks-micronaut-mongodb.md` and inspect persistence setup before proposing changes. + +2. **Gather scope and decide target improvements** + +Identify model and consistency requirements and define safe improvements. + +3. **Apply framework-aligned changes** + +Implement/refactor documents, repositories, indexes, and error handling. + +4. **Run verification and report results** + +Execute build/tests and summarize what changed, what was verified, and follow-up actions. + +## Reference + +For detailed guidance, examples, and constraints, see [references/515-frameworks-micronaut-mongodb.md](references/515-frameworks-micronaut-mongodb.md). diff --git a/skills/515-frameworks-micronaut-mongodb/references/515-frameworks-micronaut-mongodb.md b/skills/515-frameworks-micronaut-mongodb/references/515-frameworks-micronaut-mongodb.md new file mode 100644 index 00000000..3d963f4e --- /dev/null +++ b/skills/515-frameworks-micronaut-mongodb/references/515-frameworks-micronaut-mongodb.md @@ -0,0 +1,412 @@ +--- +name: 515-frameworks-micronaut-mongodb +description: Use when you need MongoDB persistence in Micronaut — including Maven dependency, @MappedEntity document design, @MongoRepository with typed finders and @MongoFindQuery, @Singleton service boundaries, optional @Transactional use where MongoDB transaction support is configured, and explicit error handling for MongoWriteException and DataAccessException. This should trigger for requests such as Add MongoDB in Micronaut; Review Micronaut Data Mongo entities; Improve error handling for Micronaut Mongo operations. +license: Apache-2.0 +metadata: + author: Juan Antonio Breña Moral + version: 0.15.0-SNAPSHOT +--- +# Micronaut — MongoDB + +## Role + +You are a Senior software engineer with extensive experience in Micronaut and MongoDB + +## Goal + +Design and implement MongoDB persistence in Micronaut using Micronaut Data MongoDB. Prefer `@MappedEntity` records for immutable, compile-time-verified persistence types. Declare repositories as `@MongoRepository` interfaces so Micronaut generates implementations at compile time. Keep business logic in `@Singleton` services — never in repositories or controllers. Guard all queries with bound parameters — never concatenate user input into filter strings. + +**What is covered in this Skill?** + +- Maven `micronaut-data-mongodb` and `mongodb-driver-sync` dependencies +- `@MappedEntity` document design with `@Id`, `@MappedProperty`, and `@GeneratedValue` +- `@MongoRepository` extending `CrudRepository` with derived finders +- `@MongoFindQuery` for complex filter expressions with bound parameters +- `@Singleton` service with `io.micronaut.transaction.annotation.Transactional` +- Pagination with `Pageable` and `Page` +- DTO projections with interface projections +- Error handling: `MongoWriteException` / duplicate-key category checks, `DataAccessException`, exception chaining +- Testing with `@MicronautTest` + `TestPropertyProvider` + Testcontainers MongoDB + +**Scope:** Apply recommendations based on the reference rules and good/bad code examples. + +## Constraints + +Before applying any MongoDB changes, ensure the project compiles. Compilation failure is a BLOCKING condition. + +- **MANDATORY**: Run `./mvnw compile` or `mvn compile` before applying any change +- **SAFETY**: If compilation fails, stop immediately +- **VERIFY**: Run `./mvnw clean verify` or `mvn clean verify` after applying improvements +- **INJECTION**: Never concatenate user input into MongoDB filter strings or raw JSON queries — always use derived finders or `@MongoFindQuery` with bound parameters +- **BEFORE APPLYING**: Read the reference for detailed rules and good/bad patterns +- **EDGE CASE**: If the user goal is ambiguous, stop and ask a clarifying question before editing files + +## Examples + +### Table of contents + +- Example 1: Maven dependency +- Example 2: Entity design +- Example 3: Repository interface +- Example 4: Service layer +- Example 5: Pagination +- Example 6: Error handling +- Example 7: Integration testing + +### Example 1: Maven dependency + +Title: Add micronaut-data-mongodb and mongodb-driver-sync; BOM manages versions +Description: Add `micronaut-data-mongodb` for the repository abstraction and `mongodb-driver-sync` for the synchronous driver. The Micronaut BOM manages both versions — do not pin them explicitly. Add the Testcontainers MongoDB artifact in `test` scope for real-database integration tests. + +**Good example:** + +```xml + + + io.micronaut.data + micronaut-data-mongodb + + + org.mongodb + mongodb-driver-sync + runtime + + + + + org.testcontainers + mongodb + test + +``` + +**Bad example:** + +```xml + + + io.micronaut.data + micronaut-data-mongodb + 4.7.0 + +``` + +### Example 2: Entity design + +Title: @MappedEntity record with @Id, @MappedProperty, and factory method +Description: Annotate persistence types with `@MappedEntity("collection-name")` to bind them to an explicit MongoDB collection. Use `@MappedProperty` to map Java record components to MongoDB document field names when they differ (e.g. snake_case storage vs camelCase Java). Leave `@Id` null for new documents — Micronaut Data sets it after insert. Expose a static factory method so callers never construct a raw entity with a non-null `id`. + +**Good example:** + +```java +import io.micronaut.data.annotation.GeneratedValue; +import io.micronaut.data.annotation.Id; +import io.micronaut.data.annotation.MappedEntity; +import io.micronaut.data.annotation.MappedProperty; +import org.bson.types.ObjectId; +import java.time.Instant; + +@MappedEntity("orders") +public record OrderDocument( + @Id @GeneratedValue ObjectId id, + @MappedProperty("order_number") String orderNumber, + @MappedProperty("customer_id") String customerId, + @MappedProperty("created_at") Instant createdAt +) { + public static OrderDocument of(String orderNumber, String customerId) { + return new OrderDocument(null, orderNumber, customerId, Instant.now()); + } +} +``` + +**Bad example:** + +```java +// Bad: missing @MappedEntity — Micronaut Data cannot derive collection name reliably; +// no @MappedProperty — field names stored as Java camelCase, breaking schema conventions; +// mutable class with setters allows accidental mutation after load +public class OrderDocument { + @Id private ObjectId id; + private String orderNumber; + public void setId(ObjectId id) { this.id = id; } +} +``` + +### Example 3: Repository interface + +Title: @MongoRepository with typed derived finders and @MongoFindQuery +Description: Extend `CrudRepository` and annotate with `@MongoRepository`. Micronaut Data generates the implementation at compile time, validating method names and query correctness. Use query derivation (`findByOrderNumber`) for simple equality lookups. Use `@MongoFindQuery` with a bound filter document for composite or range queries — never concatenate user input into the filter string. + +**Good example:** + +```java +import io.micronaut.data.mongodb.annotation.MongoFindQuery; +import io.micronaut.data.mongodb.annotation.MongoRepository; +import io.micronaut.data.repository.CrudRepository; +import org.bson.types.ObjectId; +import java.time.Instant; +import java.util.List; +import java.util.Optional; + +@MongoRepository +public interface OrderRepository extends CrudRepository { + + // Derived finder — safe, generated at compile time + Optional findByOrderNumber(String orderNumber); + + List findByCustomerId(String customerId); + + // @MongoFindQuery: bound parameter :createdAt — no string concatenation + @MongoFindQuery("{ 'created_at': { $gte: :createdAt } }") + List findCreatedAfter(Instant createdAt); +} +``` + +**Bad example:** + +```java +// Bad: missing @MongoRepository — not a Micronaut Data bean +public interface OrderRepository extends CrudRepository {} + +// Bad: raw string concatenation into a Mongo filter — NoSQL injection risk +class OrderCustomQuery { + @Inject MongoClient client; + List findByCustomer(String customerId) { + String filter = "{ customer_id: '" + customerId + "' }"; // injection-prone + return ...; // unsafe + } +} +``` + +### Example 4: Service layer + +Title: @Singleton service with @Transactional for multi-step writes +Description: Wrap business use cases in `@Singleton` services. Use `io.micronaut.transaction.annotation.Transactional` for methods that coordinate multiple repository writes. MongoDB multi-document transactions require a replica set — verify infrastructure support before enabling. Constructor-inject the repository to maintain testability and keep the API layer out of persistence concerns. + +**Good example:** + +```java +import io.micronaut.context.annotation.Singleton; +import io.micronaut.transaction.annotation.Transactional; +import jakarta.inject.Inject; +import org.bson.types.ObjectId; +import java.util.Optional; + +@Singleton +public class OrderService { + + private final OrderRepository repository; + + @Inject + public OrderService(OrderRepository repository) { + this.repository = repository; + } + + @Transactional + public OrderDocument create(String orderNumber, String customerId) { + return repository.save(OrderDocument.of(orderNumber, customerId)); + } + + public Optional findByOrderNumber(String orderNumber) { + return repository.findByOrderNumber(orderNumber); + } + + @Transactional + public void delete(ObjectId id) { + repository.deleteById(id); + } +} +``` + +**Bad example:** + +```java +// Bad: controller calling repository directly — no service layer, untestable +@Controller("/orders") +class OrderController { + @Inject OrderRepository repository; + + @Post + OrderDocument create(String body) { + return repository.save(OrderDocument.of(body, "unknown")); // no validation or mapping + } +} +``` + +### Example 5: Pagination + +Title: Pageable and Page for bounded list queries +Description: Never expose unbounded `findAll()` on large collections. Use `PageableRepository` or accept a `Pageable` parameter and return `Page<T>`. This keeps memory consumption bounded and lets callers navigate results page by page. + +**Good example:** + +```java +import io.micronaut.data.model.Page; +import io.micronaut.data.model.Pageable; +import io.micronaut.data.mongodb.annotation.MongoRepository; +import io.micronaut.data.repository.PageableRepository; +import org.bson.types.ObjectId; + +@MongoRepository +public interface OrderRepository extends PageableRepository { + + Page findByCustomerId(String customerId, Pageable pageable); +} +``` + +**Bad example:** + +```java +@MongoRepository +interface OrderRepository extends CrudRepository { + + // Bad: loads every document in the collection — OOM risk on large datasets + List findAll(); +} +``` + +### Example 6: Error handling + +Title: MongoWriteException and DataAccessException — translate at service boundary +Description: Catch MongoDB driver write failures (for example duplicate-key violations via `MongoWriteException` and `ErrorCategory.DUPLICATE_KEY`) and `DataAccessException` at the service boundary, then translate them into meaningful domain exceptions. Always chain the original exception to preserve the stack trace. Never return `null` from a catch block — it corrupts the caller's control flow. + +**Good example:** + +```java +import com.mongodb.ErrorCategory; +import com.mongodb.MongoWriteException; +import io.micronaut.context.annotation.Singleton; +import io.micronaut.data.exceptions.DataAccessException; +import io.micronaut.transaction.annotation.Transactional; +import jakarta.inject.Inject; + +@Singleton +public class OrderService { + + @Inject + OrderRepository repository; + + @Transactional + public OrderDocument create(String orderNumber, String customerId) { + try { + return repository.save(OrderDocument.of(orderNumber, customerId)); + } catch (MongoWriteException ex) { + if (ex.getError().getCategory() == ErrorCategory.DUPLICATE_KEY) { + throw new IllegalStateException("Order number already exists: " + orderNumber, ex); + } + throw ex; + } catch (DataAccessException ex) { + throw new IllegalStateException("MongoDB data access failure saving order: " + orderNumber, ex); + } + } +} +``` + +**Bad example:** + +```java +public OrderDocument create(String orderNumber, String customerId) { + try { + return repository.save(OrderDocument.of(orderNumber, customerId)); + } catch (Exception e) { + // Bad: generic catch + null return hides duplicate-key, network errors, schema drift + return null; + } +} +``` + + +### Example 7: Integration testing + +Title: @MicronautTest + TestPropertyProvider + Testcontainers MongoDB +Description: Use `@MicronautTest` with `TestPropertyProvider` to supply the real MongoDB connection string from a Testcontainers `MongoDBContainer`. This runs tests against a real MongoDB instance with the actual driver, schema, and index behaviour. Never mock `@MongoRepository` inside a persistence test — the goal is exercising real MongoDB behaviour. + +**Good example:** + +```java +import io.micronaut.test.extensions.junit5.annotation.MicronautTest; +import io.micronaut.test.support.TestPropertyProvider; +import jakarta.inject.Inject; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.testcontainers.containers.MongoDBContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import java.util.Map; +import java.util.Optional; + +import static org.assertj.core.api.Assertions.assertThat; + +@MicronautTest(transactional = false) +@Testcontainers +class OrderRepositoryIT implements TestPropertyProvider { + + @Container + static MongoDBContainer mongo = new MongoDBContainer("mongo:7.0"); + + @Inject + OrderRepository repository; + + @Override + public Map getProperties() { + if (!mongo.isRunning()) mongo.start(); + return Map.of("mongodb.uri", mongo.getReplicaSetUrl()); + } + + @AfterEach + void cleanup() { repository.deleteAll(); } + + @Test + void save_generatesId() { + OrderDocument saved = repository.save(OrderDocument.of("ORD-001", "CUST-1")); + assertThat(saved.id()).isNotNull(); + } + + @Test + void findByOrderNumber_returnsDocument() { + repository.save(OrderDocument.of("ORD-002", "CUST-2")); + Optional found = repository.findByOrderNumber("ORD-002"); + assertThat(found).isPresent(); + assertThat(found.get().customerId()).isEqualTo("CUST-2"); + } +} +``` + +**Bad example:** + +```java +// Bad: mocking @MongoRepository proves nothing about real MongoDB behavior +@MicronautTest +class OrderRepositoryTest { + @MockBean(OrderRepository.class) + OrderRepository mockRepository; + + @Test + void findByOrderNumber() { + when(mockRepository.findByOrderNumber("ORD-001")).thenReturn(Optional.empty()); + // verifies Mockito wiring only — no actual MongoDB interaction + } +} +``` + +## Output Format + +- **ANALYZE** MongoDB code: `@MappedEntity` mapping completeness, `@MongoRepository` query safety, service transaction boundaries, error handling specificity, pagination strategy, and DTO vs entity leakage +- **CATEGORIZE** issues by impact (SECURITY for filter injection, CORRECTNESS for missing transactions or generic exception handling, PERFORMANCE for unbounded queries or missing indexes, MAINTAINABILITY for mutable entities or entity leakage at API boundaries) +- **APPLY** Micronaut Data MongoDB–aligned fixes: explicit `@MappedEntity` collection, `@MappedProperty` field mappings, `@MongoRepository` with bound-parameter queries, service-layer `@Transactional`, and typed exception translation +- **IMPLEMENT** changes so document model, repository interfaces, services, and tests stay consistent +- **EXPLAIN** trade-offs (derived finder vs `@MongoFindQuery`, sync driver vs reactive, multi-document transactions vs application idempotency, `Page` vs full list returns) +- **TEST** repository behaviour with `@MicronautTest` + `TestPropertyProvider` + Testcontainers; never mock repositories inside persistence tests meant to verify MongoDB behaviour +- **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes + +## Safeguards + +- **BLOCKING SAFETY CHECK**: Run `./mvnw compile` before ANY MongoDB refactoring — compilation failure is a HARD STOP +- **CRITICAL VALIDATION**: Run `./mvnw clean verify` after changes; confirm repository integration tests pass against a real MongoDB instance before promoting +- **INJECTION SAFETY**: Never concatenate user input into `@MongoFindQuery` filter strings or raw MongoDB JSON — use derived finders or bound parameters exclusively +- **ERROR HANDLING**: Catch duplicate-key write failures and `DataAccessException` at the service boundary; never use a generic `catch (Exception e)` that swallows failures or returns `null` +- **PAGINATION**: Never expose unbounded `findAll()` on large collections — always use `PageableRepository` or `Pageable`-accepting methods with `Page` +- **TRANSACTIONS**: Multi-document `@Transactional` requires a MongoDB replica set — verify infrastructure compatibility before relying on it +- **AOP SELF-INVOCATION**: Never call a `@Transactional` method via `this.method()` inside the same Micronaut bean — the AOP interceptor is bypassed; extract to a separate injected bean +- **API BOUNDARIES**: Avoid returning `@MappedEntity` instances directly from HTTP controllers — map to DTOs to keep API contracts stable and prevent internal field leakage +- **INCREMENTAL SAFETY**: Change one entity, repository, or service surface at a time; verify with integration tests between steps \ No newline at end of file From 8ffe5d89b5e94c52ab09bc483ccc4c120473a48f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Bre=C3=B1a=20Moral?= Date: Thu, 7 May 2026 14:51:17 +0200 Subject: [PATCH 2/3] feat(Skills): Adding Kafka & MongoDB support for Spring Boot, Quarkus, Micronaut & Associated Agents --- .../314-frameworks-spring-kafka.xml | 51 ++++++++++++++++- .../315-frameworks-spring-mongodb.xml | 49 ++++++++++++++++- .../414-frameworks-quarkus-kafka.xml | 55 ++++++++++++++++++- .../415-frameworks-quarkus-mongodb.xml | 55 ++++++++++++++++++- .../514-frameworks-micronaut-kafka.xml | 49 ++++++++++++++++- .../515-frameworks-micronaut-mongodb.xml | 49 ++++++++++++++++- 6 files changed, 299 insertions(+), 9 deletions(-) diff --git a/skills-generator/src/main/resources/skill-references/314-frameworks-spring-kafka.xml b/skills-generator/src/main/resources/skill-references/314-frameworks-spring-kafka.xml index ace3596a..ab3f58d1 100644 --- a/skills-generator/src/main/resources/skill-references/314-frameworks-spring-kafka.xml +++ b/skills-generator/src/main/resources/skill-references/314-frameworks-spring-kafka.xml @@ -4,7 +4,7 @@ Juan Antonio Breña Moral 0.15.0-SNAPSHOT Apache-2.0 - Spring Boot Kafka Guidelines + Spring Boot — Kafka messaging Use when you need Kafka with Spring Boot (`spring-kafka`) and want examples for design, implementation, and error handling with retries, dead-letter topics, and idempotent consumers. You are a Senior software engineer with extensive experience in Spring Boot and Apache Kafka @@ -172,6 +172,53 @@ class BadOrderEventListener { + + + Testing + Integration tests with Testcontainers Kafka + + + + + + + + + + + + + @@ -180,7 +227,7 @@ class BadOrderEventListener { **APPLY** Spring Kafka-aligned fixes: configure proper error handlers, use typed payloads, ensure idempotency **IMPLEMENT** changes consistently across producer and consumer configurations **EXPLAIN** trade-offs (e.g., at-least-once vs exactly-once delivery, retry backoff strategies) - **TEST** messaging behavior with `@EmbeddedKafka` or Testcontainers + **TEST** messaging behavior with Testcontainers Kafka integration tests (avoid relying only on `@EmbeddedKafka`) **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes diff --git a/skills-generator/src/main/resources/skill-references/315-frameworks-spring-mongodb.xml b/skills-generator/src/main/resources/skill-references/315-frameworks-spring-mongodb.xml index b13d8817..244efa92 100644 --- a/skills-generator/src/main/resources/skill-references/315-frameworks-spring-mongodb.xml +++ b/skills-generator/src/main/resources/skill-references/315-frameworks-spring-mongodb.xml @@ -4,7 +4,7 @@ Juan Antonio Breña Moral 0.15.0-SNAPSHOT Apache-2.0 - Spring Boot MongoDB Guidelines + Spring Boot — MongoDB Use when you need MongoDB with Spring Data MongoDB and want examples for design, implementation, and error handling for robust document persistence. You are a Senior software engineer with extensive experience in Spring Boot and MongoDB @@ -175,6 +175,53 @@ class BadOrderService { + + + Testing + Repository tests with MongoDB Testcontainers + + + + + + + + + + + + + diff --git a/skills-generator/src/main/resources/skill-references/414-frameworks-quarkus-kafka.xml b/skills-generator/src/main/resources/skill-references/414-frameworks-quarkus-kafka.xml index fd3172b2..cfd59b53 100644 --- a/skills-generator/src/main/resources/skill-references/414-frameworks-quarkus-kafka.xml +++ b/skills-generator/src/main/resources/skill-references/414-frameworks-quarkus-kafka.xml @@ -4,7 +4,7 @@ Juan Antonio Breña Moral 0.15.0-SNAPSHOT Apache-2.0 - Quarkus Kafka Guidelines + Quarkus — Kafka messaging Use when you need Kafka in Quarkus with SmallRye Reactive Messaging and want examples for design, implementation, and error handling. You are a Senior software engineer with extensive experience in Quarkus and Kafka @@ -141,6 +141,57 @@ class BadOrderConsumer { + + + Testing + @QuarkusTest with Kafka Testcontainers via test resource + + + + + + + start() { + kafka = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.6.1")); + kafka.start(); + return Map.of("kafka.bootstrap.servers", kafka.getBootstrapServers()); + } + @Override + public void stop() { + if (kafka != null) kafka.stop(); + } +} + +@QuarkusTest +@QuarkusTestResource(KafkaTestResource.class) +class OrderKafkaIT { + @Test + void shouldProcessIncomingMessage() { + // publish event and assert side effects + } +}]]> + + + + + + @@ -149,7 +200,7 @@ class BadOrderConsumer { **APPLY** Quarkus Kafka-aligned fixes: configure DLQs, use typed payloads, ensure non-blocking processing **IMPLEMENT** changes consistently across `application.properties` and Java code **EXPLAIN** trade-offs (e.g., DLQ vs ignore, throttled vs latest commit strategies) - **TEST** messaging behavior with `@QuarkusTest` and Dev Services for Kafka + **TEST** messaging behavior with `@QuarkusTest` and Testcontainers Kafka (optionally through Dev Services where appropriate) **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes diff --git a/skills-generator/src/main/resources/skill-references/415-frameworks-quarkus-mongodb.xml b/skills-generator/src/main/resources/skill-references/415-frameworks-quarkus-mongodb.xml index 5fabdd4f..ef92e1cd 100644 --- a/skills-generator/src/main/resources/skill-references/415-frameworks-quarkus-mongodb.xml +++ b/skills-generator/src/main/resources/skill-references/415-frameworks-quarkus-mongodb.xml @@ -4,7 +4,7 @@ Juan Antonio Breña Moral 0.15.0-SNAPSHOT Apache-2.0 - Quarkus MongoDB Guidelines + Quarkus — MongoDB Use when you need MongoDB in Quarkus with Mongo Panache and want examples for design, implementation, and error handling. You are a Senior software engineer with extensive experience in Quarkus and MongoDB @@ -170,6 +170,57 @@ public class BadOrderService { + + + Testing + @QuarkusTest with MongoDB Testcontainers via test resource + + + + + + + start() { + mongo = new MongoDBContainer(DockerImageName.parse("mongo:7.0")); + mongo.start(); + return Map.of("quarkus.mongodb.connection-string", mongo.getReplicaSetUrl()); + } + @Override + public void stop() { + if (mongo != null) mongo.stop(); + } +} + +@QuarkusTest +@QuarkusTestResource(MongoTestResource.class) +class OrderMongoIT { + @Test + void shouldPersistAndReadDocument() { + // verify repository round trip + } +}]]> + + + + + + @@ -178,7 +229,7 @@ public class BadOrderService { **APPLY** Quarkus Mongo Panache-aligned fixes: use `@MongoEntity`, `PanacheMongoRepository`, and proper exception handling **IMPLEMENT** changes so schema and queries stay consistent **EXPLAIN** trade-offs (e.g., active record vs repository pattern, embedding vs referencing) - **TEST** repository behavior with `@QuarkusTest` and Dev Services for MongoDB + **TEST** repository behavior with `@QuarkusTest` and MongoDB Testcontainers (optionally through Dev Services where appropriate) **VALIDATE** with `./mvnw compile` before and `./mvnw clean verify` after changes diff --git a/skills-generator/src/main/resources/skill-references/514-frameworks-micronaut-kafka.xml b/skills-generator/src/main/resources/skill-references/514-frameworks-micronaut-kafka.xml index 3ea472f9..d8c05eca 100644 --- a/skills-generator/src/main/resources/skill-references/514-frameworks-micronaut-kafka.xml +++ b/skills-generator/src/main/resources/skill-references/514-frameworks-micronaut-kafka.xml @@ -4,7 +4,7 @@ Juan Antonio Breña Moral 0.15.0-SNAPSHOT Apache-2.0 - Micronaut Kafka Guidelines + Micronaut — Kafka messaging Use when you need Kafka in Micronaut and want examples for design, implementation, and error handling with @KafkaClient and @KafkaListener. You are a Senior software engineer with extensive experience in Micronaut and Kafka @@ -149,6 +149,53 @@ class BadBillingListener { + + + Testing + @MicronautTest with Kafka Testcontainers + + + + + + + getProperties() { + return Map.of("kafka.bootstrap.servers", kafka.getBootstrapServers()); + } + + @Test + void shouldConsumeEvent() { + // publish and verify processing + } +}]]> + + + + + + diff --git a/skills-generator/src/main/resources/skill-references/515-frameworks-micronaut-mongodb.xml b/skills-generator/src/main/resources/skill-references/515-frameworks-micronaut-mongodb.xml index bdc2b6f1..2b9b40c2 100644 --- a/skills-generator/src/main/resources/skill-references/515-frameworks-micronaut-mongodb.xml +++ b/skills-generator/src/main/resources/skill-references/515-frameworks-micronaut-mongodb.xml @@ -4,7 +4,7 @@ Juan Antonio Breña Moral 0.15.0-SNAPSHOT Apache-2.0 - Micronaut MongoDB Guidelines + Micronaut — MongoDB Use when you need MongoDB in Micronaut Data and want examples for design, implementation, and error handling. You are a Senior software engineer with extensive experience in Micronaut and MongoDB @@ -175,6 +175,53 @@ class BadOrderService { + + + Testing + @MicronautTest with MongoDB Testcontainers + + + + + + + getProperties() { + return Map.of("mongodb.uri", mongo.getReplicaSetUrl()); + } + + @Test + void shouldPersistDocument() { + // verify repository persistence/query behavior + } +}]]> + + + + + + From 4b120baeab30280d73eac8c9d0d39af47e39664d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Bre=C3=B1a=20Moral?= Date: Thu, 7 May 2026 14:53:52 +0200 Subject: [PATCH 3/3] feat(pipeline): Improving main pipeline performance --- .github/workflows/examples-build.yaml | 28 +++++++++++++++++++++++++++ .github/workflows/maven.yaml | 21 -------------------- 2 files changed, 28 insertions(+), 21 deletions(-) create mode 100644 .github/workflows/examples-build.yaml diff --git a/.github/workflows/examples-build.yaml b/.github/workflows/examples-build.yaml new file mode 100644 index 00000000..fe276b82 --- /dev/null +++ b/.github/workflows/examples-build.yaml @@ -0,0 +1,28 @@ +name: Examples Build + +on: + push: + paths: + - "examples/**" + +jobs: + examples: + name: Build Examples + runs-on: ubuntu-latest + strategy: + matrix: + example: + - { name: "Maven", path: "examples/maven-demo", goal: "verify" } + - { name: "Spring Boot Memory Leak", path: "examples/spring-boot-memory-leak-demo", goal: "package" } + - { name: "Spring Boot Performance Bottleneck", path: "examples/spring-boot-performance-bottleneck-demo", goal: "package" } + - { name: "Spring Boot", path: "examples/spring-boot-demo/implementation", goal: "verify -Pjacoco" } + steps: + - uses: actions/checkout@v6 + with: + submodules: true # Fetches all submodules + - uses: actions/setup-java@v5 + with: + distribution: "graalvm" # See 'Supported distributions' for available options + java-version: "25" + - name: Build ${{ matrix.example.name }} + run: cd ${{ matrix.example.path }} && ./mvnw --batch-mode --no-transfer-progress ${{ matrix.example.goal }} --file pom.xml diff --git a/.github/workflows/maven.yaml b/.github/workflows/maven.yaml index 671357c5..a4ba9c88 100644 --- a/.github/workflows/maven.yaml +++ b/.github/workflows/maven.yaml @@ -52,27 +52,6 @@ jobs: git log -1 --pretty=%B > /tmp/commit-msg.txt pre-commit run conventional-pre-commit --hook-stage commit-msg --commit-msg-filename /tmp/commit-msg.txt - examples: - name: Build Examples - runs-on: ubuntu-latest - strategy: - matrix: - example: - - { name: "Maven", path: "examples/maven-demo", goal: "verify" } - - { name: "Spring Boot Memory Leak", path: "examples/spring-boot-memory-leak-demo", goal: "package" } - - { name: "Spring Boot Performance Bottleneck", path: "examples/spring-boot-performance-bottleneck-demo", goal: "package" } - - { name: "Spring Boot", path: "examples/spring-boot-demo/implementation", goal: "verify -Pjacoco" } - steps: - - uses: actions/checkout@v6 - with: - submodules: true # Fetches all submodules - - uses: actions/setup-java@v5 - with: - distribution: 'graalvm' # See 'Supported distributions' for available options - java-version: '25' - - name: Build ${{ matrix.example.name }} - run: cd ${{ matrix.example.path }} && ./mvnw --batch-mode --no-transfer-progress ${{ matrix.example.goal }} --file pom.xml - package-agent-artifacts: name: Package Agents and Skills runs-on: ubuntu-latest