From e4990ed3467a20b3e55057876ec0cf1b351e740f Mon Sep 17 00:00:00 2001 From: solfe Date: Mon, 30 Mar 2026 12:31:36 +0900 Subject: [PATCH 01/28] docs: add mongodb logical erd --- docs/architecture/README.md | 1 + docs/architecture/mongodb-logical-erd.dbml | 499 +++++++++++++++++++++ 2 files changed, 500 insertions(+) create mode 100644 docs/architecture/mongodb-logical-erd.dbml diff --git a/docs/architecture/README.md b/docs/architecture/README.md index c89648f..8d3bace 100644 --- a/docs/architecture/README.md +++ b/docs/architecture/README.md @@ -27,3 +27,4 @@ - [Streak 도메인 구조](streak.md) - [Word 도메인 구조](word.md) - [Book 도메인 구조](content-book.md) +- [MongoDB 논리 ERD (dbdiagram.io용 DBML)](mongodb-logical-erd.dbml) diff --git a/docs/architecture/mongodb-logical-erd.dbml b/docs/architecture/mongodb-logical-erd.dbml new file mode 100644 index 0000000..a72df4f --- /dev/null +++ b/docs/architecture/mongodb-logical-erd.dbml @@ -0,0 +1,499 @@ +// llv-api MongoDB logical ERD for dbdiagram.io +// - Embedded arrays/objects/maps are modeled as `json`. +// - Refs are application-level logical relations; MongoDB does not enforce foreign keys. +// - Some polymorphic fields such as `contentId` are kept as notes instead of strict refs. + +Table users { + id varchar [pk] + username varchar [not null, unique] + password varchar + email varchar + displayName varchar + provider varchar + profileImageUrl varchar + role varchar + deleted boolean + createdAt datetime + deletedAt datetime +} + +Table refreshTokens { + id varchar [pk] + tokenId varchar [not null, unique] + userId varchar [not null] + expiresAt datetime [note: 'TTL index ttl_expires_at, expireAfter=0s'] +} + +Table userTickets { + id varchar [pk] + userId varchar [not null, unique] + balance int + version bigint + createdAt datetime + updatedAt datetime +} + +Table ticketTransactions { + id varchar [pk] + userId varchar [not null] + amount int [note: 'positive=grant, negative=use'] + description text + status varchar + reservationId varchar + createdAt datetime + + indexes { + (userId, createdAt) [name: 'userId_createdAt'] + } +} + +Table fcmTokens { + id varchar [pk] + userId varchar [not null] + deviceId varchar [not null] + fcmToken varchar [not null, unique] + platform varchar + countryCode varchar + appVersion varchar + osVersion varchar + createdAt datetime + updatedAt datetime [note: 'TTL index, expireAfter=90d'] + isActive boolean +} + +Table pushLogs { + id varchar [pk] + campaignId varchar [unique] + fcmMessageId varchar + campaignGroup varchar + userId varchar + sentAt datetime + sentSuccess boolean + openedAt datetime + createdAt datetime [note: 'TTL index, expireAfter=180d'] + version bigint +} + +Table appVersion { + id varchar [pk] + latestVersion varchar + minimumVersion varchar + updatedAt datetime +} + +Table contentBanners { + id varchar [pk] + countryCode varchar + contentId varchar [note: 'Polymorphic content ref: book/article/custom/feed depending on contentType'] + contentType varchar + contentTitle varchar + contentAuthor varchar + contentCoverImageUrl varchar + contentReadingTime int + subtitle varchar + title varchar + description text + displayOrder int + isActive boolean + createdAt datetime +} + +Table crawlingDsl { + id varchar [pk] + domain varchar [unique] + name varchar + contentType varchar + titleDsl text + contentDsl text + coverImageDsl text + accessUrl varchar + createdAt datetime + updatedAt datetime +} + +Table words { + id varchar [pk] + word varchar [not null] + sourceLanguageCode varchar [not null] + targetLanguageCode varchar [not null] + summary json + meanings json + relatedForms json + isEssential boolean + + indexes { + (word, sourceLanguageCode, targetLanguageCode) [unique, name: 'word_language_pair_idx'] + (word, targetLanguageCode) [name: 'word_target_language_idx'] + } +} + +Table word_variants { + id varchar [pk] + word varchar [not null] + originalForm varchar [not null, note: 'Logical ref to words.word'] + variantTypes json + + indexes { + (word, originalForm) [unique, name: 'word_original_idx'] + } +} + +Table invalidWords { + id varchar [pk] + word varchar [unique] + attemptedAt datetime + attemptCount int +} + +Table wordBookmarks { + id varchar [pk] + userId varchar [not null] + word varchar [not null, note: 'Bookmarked original-form string, not words.id'] + bookmarkedAt datetime + + indexes { + (userId, word) [unique, name: 'userId_word_unique'] + } +} + +Table userStudyReports { + id varchar [pk] + userId varchar [not null, unique] + currentStreak int + longestStreak int + lastCompletionDate date + streakStartDate date + lastLearningTimestamp datetime + availableFreezes int + totalReadingTimeSeconds bigint + completedContentIds json + preferredStudyHour int + preferredStudyHourUpdatedAt datetime + createdAt datetime + updatedAt datetime +} + +Table dailyCompletions { + id varchar [pk] + userId varchar [not null] + completionDate date + firstCompletionCount int + totalCompletionCount int + completedContents json [note: 'Embedded array: type, contentId, chapterId, completedAt, readingTime, category, difficultyLevel, streakStatus'] + streakCount int + streakStatus varchar + createdAt datetime + + indexes { + (userId, completionDate) [unique, name: 'idx_userId_completionDate'] + } +} + +Table freezeTransactions { + id varchar [pk] + userId varchar [not null] + amount int + description text + createdAt datetime +} + +Table books { + id varchar [pk] + title varchar + titleTranslations json + author varchar + coverImageUrl varchar + difficultyLevel varchar + chapterCount int + readingTime int + averageRating float + reviewCount int + viewCount int + tags json + createdAt datetime +} + +Table chapters { + id varchar [pk] + bookId varchar [not null] + chapterNumber int + title varchar + chapterImageUrl varchar + description text + readingTime int +} + +Table chunks { + id varchar [pk] + chapterId varchar [not null] + chunkNumber int + difficultyLevel varchar + type varchar + content text + description text + + indexes { + (chapterId, difficultyLevel, chunkNumber) [name: 'chapter_difficulty_chunk_idx'] + } +} + +Table bookProgress { + id varchar [pk] + userId varchar [not null] + bookId varchar [not null] + chapterId varchar + chunkId varchar + currentReadChapterNumber int + maxReadChapterNumber int + normalizedProgress float + maxNormalizedProgress float + currentDifficultyLevel varchar + chapterProgresses json [note: 'Embedded array: chapterNumber, progressPercentage, isCompleted, completedAt'] + isCompleted boolean + completedAt datetime + updatedAt datetime + + indexes { + (userId, bookId) [unique, name: 'idx_user_book_progress'] + } +} + +Table articles { + id varchar [pk] + title varchar + author varchar + coverImageUrl varchar + originUrl varchar + difficultyLevel varchar + readingTime int + averageRating float + reviewCount int + viewCount int + category varchar + tags json + targetLanguageCode json + createdAt datetime +} + +Table articleChunks { + id varchar [pk] + articleId varchar [not null] + chunkNumber int + difficultyLevel varchar + type varchar + content text + description text + + indexes { + (articleId, difficultyLevel, chunkNumber) [name: 'article_difficulty_chunk_idx'] + } +} + +Table articleProgress { + id varchar [pk] + userId varchar [not null] + articleId varchar [not null] + chunkId varchar + normalizedProgress float + maxNormalizedProgress float + currentDifficultyLevel varchar + isCompleted boolean + completedAt datetime + updatedAt datetime + + indexes { + (userId, articleId) [unique, name: 'idx_user_article_progress'] + } +} + +Table feedSources { + id varchar [pk] + url varchar [unique] + domain varchar + name varchar + coverImageDsl text + contentType varchar + category varchar + tags json + isActive boolean + createdAt datetime + updatedAt datetime +} + +Table feeds { + id varchar [pk] + contentType varchar + title varchar + url varchar [unique] + thumbnailUrl varchar + author varchar + description text + category varchar + tags json + sourceProvider varchar [note: 'Provider/domain string, not feedSources.id'] + publishedAt datetime + displayOrder int + viewCount int + avgReadTimeSeconds float + createdAt datetime + deleted boolean + deletedAt datetime +} + +Table contentRequests { + id varchar [pk] + userId varchar [not null] + title varchar [not null] + originalText text + contentType varchar + originAuthor varchar + targetDifficultyLevels json + originUrl varchar + originDomain varchar + coverImageUrl varchar + status varchar + progress int + createdAt datetime + completedAt datetime + deletedAt datetime + errorMessage text + resultCustomContentId varchar [note: 'Logical ref to customContents.id after generation'] + updatedAt datetime +} + +Table customContents { + id varchar [pk] + userId varchar [not null] + contentRequestId varchar [not null] + isDeleted boolean + title varchar [not null] + author varchar + coverImageUrl varchar + difficultyLevel varchar + targetDifficultyLevels json + readingTime int + averageRating float + reviewCount int + viewCount int + tags json + originUrl varchar + originDomain varchar + createdAt datetime + updatedAt datetime + deletedAt datetime +} + +Table customContentChunks { + id varchar [pk] + customContentId varchar [not null] + userId varchar [not null] + difficultyLevel varchar + chapterNum int + chunkNum int + type varchar + chunkText text + description text + isDeleted boolean + createdAt datetime + updatedAt datetime + deletedAt datetime + + indexes { + (customContentId, difficultyLevel, chapterNum, chunkNum) [name: 'custom_content_difficulty_chapter_chunk_idx'] + (userId, isDeleted, createdAt) [name: 'user_deleted_created_idx'] + } +} + +Table customProgress { + id varchar [pk] + userId varchar [not null] + customId varchar [not null] + chunkId varchar + normalizedProgress float + maxNormalizedProgress float + currentDifficultyLevel varchar + isCompleted boolean + completedAt datetime + updatedAt datetime + + indexes { + (userId, customId) [unique, name: 'idx_user_custom_progress'] + } +} + +Table userCustomContents { + id varchar [pk] + userId varchar [not null] + customContentId varchar [not null] + contentRequestId varchar [not null] + unlockedAt datetime + + indexes { + (userId, customContentId) [unique, name: 'user_content_idx'] + } +} + +Table contentAccessLogs { + id varchar [pk] + userId varchar [not null] + contentId varchar [note: 'Polymorphic content ref: book/article/custom/feed'] + contentType varchar + category varchar + readTimeSeconds int + accessedAt datetime + + indexes { + (userId, accessedAt) [name: 'user_accessed_idx'] + (userId, category) [name: 'user_category_idx'] + (userId, contentType) [name: 'user_content_type_idx'] + } +} + +Table userCategoryPreferences { + id varchar [pk] + userId varchar [not null, unique] + primaryCategory varchar + categoryScores json + rawAccessCounts json + tagScores json + totalAccessCount int + lastUpdatedAt datetime +} + +Ref: refreshTokens.userId > users.id +Ref: userTickets.userId - users.id +Ref: ticketTransactions.userId > users.id +Ref: fcmTokens.userId > users.id +Ref: pushLogs.userId > users.id +Ref: wordBookmarks.userId > users.id +Ref: userStudyReports.userId - users.id +Ref: dailyCompletions.userId > users.id +Ref: freezeTransactions.userId > users.id + +Ref: chapters.bookId > books.id +Ref: chunks.chapterId > chapters.id +Ref: bookProgress.userId > users.id +Ref: bookProgress.bookId > books.id +Ref: bookProgress.chapterId > chapters.id +Ref: bookProgress.chunkId > chunks.id + +Ref: articleChunks.articleId > articles.id +Ref: articleProgress.userId > users.id +Ref: articleProgress.articleId > articles.id +Ref: articleProgress.chunkId > articleChunks.id + +Ref: contentRequests.userId > users.id +Ref: contentRequests.resultCustomContentId > customContents.id +Ref: customContents.userId > users.id +Ref: customContents.contentRequestId > contentRequests.id +Ref: customContentChunks.customContentId > customContents.id +Ref: customContentChunks.userId > users.id +Ref: customProgress.userId > users.id +Ref: customProgress.customId > customContents.id +Ref: customProgress.chunkId > customContentChunks.id +Ref: userCustomContents.userId > users.id +Ref: userCustomContents.customContentId > customContents.id +Ref: userCustomContents.contentRequestId > contentRequests.id + +Ref: contentAccessLogs.userId > users.id +Ref: userCategoryPreferences.userId - users.id From 0e0214a22368eef2e5100eb3bdc6eb9b2542bb42 Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 31 Mar 2026 16:42:09 +0900 Subject: [PATCH 02/28] Track k6 scripts and seed scaffolding --- .gitignore | 2 - k6/scripts/book-chapters-test.js | 28 ++ k6/scripts/books-performance-test.js | 213 +++++++++ k6/scripts/image-performance-test.js | 150 ++++++ k6/scripts/smoke-test.js | 38 ++ k6/seed/README.md | 64 +++ .../book/seed-books-content.mongosh.js | 447 ++++++++++++++++++ 7 files changed, 940 insertions(+), 2 deletions(-) create mode 100644 k6/scripts/book-chapters-test.js create mode 100644 k6/scripts/books-performance-test.js create mode 100644 k6/scripts/image-performance-test.js create mode 100644 k6/scripts/smoke-test.js create mode 100644 k6/seed/README.md create mode 100644 k6/seed/content/book/seed-books-content.mongosh.js diff --git a/.gitignore b/.gitignore index 731afaf..06b6796 100644 --- a/.gitignore +++ b/.gitignore @@ -55,6 +55,4 @@ monitoring/prometheus-*.yml monitoring/alertmanager.yml ### K6 data ### -k6/data/** k6/reports/** -k6/scripts/** diff --git a/k6/scripts/book-chapters-test.js b/k6/scripts/book-chapters-test.js new file mode 100644 index 0000000..be7e7d9 --- /dev/null +++ b/k6/scripts/book-chapters-test.js @@ -0,0 +1,28 @@ +import http from 'k6/http'; +import { check, sleep } from 'k6'; + +export const options = { + stages: [ + { duration: '30s', target: 20 }, + { duration: '1m', target: 20 }, + { duration: '10s', target: 0 }, + ], + thresholds: { + 'http_req_duration': ['p(95)<500'], // 95% of requests must complete below 500ms + }, +}; + +export default function () { + const url = 'http://host.docker.internal:8080/api/v1/books/68ee1d08d8f6b741f8b90c08/chapters?page=1&limit=200'; + const params = { + headers: { + 'accept': '*/*', + 'X-Test-Username': '2', + }, + }; + const res = http.get(url, params); + check(res, { + 'is status 200': (r) => r.status === 200, + }); + sleep(1); +} \ No newline at end of file diff --git a/k6/scripts/books-performance-test.js b/k6/scripts/books-performance-test.js new file mode 100644 index 0000000..c238b15 --- /dev/null +++ b/k6/scripts/books-performance-test.js @@ -0,0 +1,213 @@ +import http from 'k6/http'; +import { check, sleep } from 'k6'; +import { Rate, Trend } from 'k6/metrics'; + +const BASE_URL = (__ENV.BASE_URL || 'http://host.docker.internal:8080').replace(/\/$/, ''); +const TEST_USERNAME = __ENV.TEST_USERNAME || ''; +const AUTH_TOKEN = __ENV.AUTH_TOKEN || ''; +const LANGUAGE_CODE = __ENV.LANGUAGE_CODE || 'EN'; +const SORT_BY = __ENV.SORT_BY || 'created_at'; +const PAGE = Number(__ENV.PAGE || 1); +const DEFAULT_LIMIT = Number(__ENV.DEFAULT_LIMIT || 20); +const PAGINATION_LIMITS = (__ENV.PAGINATION_LIMITS || '10,20,50') + .split(',') + .map((value) => Number(value.trim())) + .filter((value) => Number.isFinite(value) && value > 0); +const PROGRESS_FILTERS = (__ENV.PROGRESS_FILTERS || 'NOT_STARTED,IN_PROGRESS') + .split(',') + .map((value) => value.trim()) + .filter(Boolean); +const RUN_MODE = __ENV.RUN_MODE || 'all'; +const TARGET_VUS = Number(__ENV.TARGET_VUS || 20); +const RAMP_UP_DURATION = __ENV.RAMP_UP_DURATION || '30s'; +const STEADY_DURATION = __ENV.STEADY_DURATION || '1m'; +const RAMP_DOWN_DURATION = __ENV.RAMP_DOWN_DURATION || '10s'; +const THINK_TIME = Number(__ENV.THINK_TIME || 1); + +const defaultListSuccess = new Rate('books_default_list_success'); +const defaultListDuration = new Trend('books_default_list_duration', true); +const progressFilterSuccess = new Rate('books_progress_filter_success'); +const progressFilterDuration = new Trend('books_progress_filter_duration', true); +const paginationSuccess = new Rate('books_pagination_success'); +const paginationDuration = new Trend('books_pagination_duration', true); + +function buildScenario(exec, startTime = '0s') { + return { + executor: 'ramping-vus', + exec, + startTime, + stages: [ + { duration: RAMP_UP_DURATION, target: TARGET_VUS }, + { duration: STEADY_DURATION, target: TARGET_VUS }, + { duration: RAMP_DOWN_DURATION, target: 0 }, + ], + gracefulRampDown: '5s', + }; +} + +function buildScenarios() { + switch (RUN_MODE) { + case 'default_list': + return { + default_list: buildScenario('defaultListScenario'), + }; + case 'progress_filter': + return { + progress_filter: buildScenario('progressFilterScenario'), + }; + case 'pagination': + return { + pagination: buildScenario('paginationScenario'), + }; + default: + return { + default_list: buildScenario('defaultListScenario', '0s'), + progress_filter: buildScenario('progressFilterScenario', '2m'), + pagination: buildScenario('paginationScenario', '4m'), + }; + } +} + +export const options = { + scenarios: buildScenarios(), + thresholds: { + http_req_failed: ['rate<0.05'], + http_req_duration: ['p(95)<1000'], + books_default_list_success: ['rate>0.95'], + books_progress_filter_success: ['rate>0.95'], + books_pagination_success: ['rate>0.95'], + books_default_list_duration: ['p(95)<1000'], + books_progress_filter_duration: ['p(95)<1000'], + books_pagination_duration: ['p(95)<1000'], + }, +}; + +function buildHeaders() { + const headers = { + Accept: 'application/json', + }; + + if (TEST_USERNAME) { + headers['X-Test-Username'] = TEST_USERNAME; + } + + if (AUTH_TOKEN) { + headers.Authorization = `Bearer ${AUTH_TOKEN}`; + } + + return headers; +} + +function toQueryString(params) { + return Object.entries(params) + .filter(([, value]) => value !== undefined && value !== null && value !== '') + .map(([key, value]) => `${encodeURIComponent(key)}=${encodeURIComponent(value)}`) + .join('&'); +} + +function buildBooksUrl(overrides = {}) { + const query = { + languageCode: LANGUAGE_CODE, + sortBy: SORT_BY, + page: PAGE, + limit: DEFAULT_LIMIT, + ...overrides, + }; + + return `${BASE_URL}/api/v1/books?${toQueryString(query)}`; +} + +function requestBooks(url, variant, successMetric, durationMetric) { + const response = http.get(url, { + headers: buildHeaders(), + tags: { + endpoint: 'books', + variant, + }, + }); + + const success = check(response, { + 'books status is 200': (res) => res.status === 200, + 'books response has items': (res) => { + const body = res.json(); + return Array.isArray(body?.content); + }, + }); + + successMetric.add(success); + durationMetric.add(response.timings.duration); + + sleep(THINK_TIME); +} + +export function defaultListScenario() { + const url = buildBooksUrl(); + requestBooks(url, 'default_list', defaultListSuccess, defaultListDuration); +} + +export function progressFilterScenario() { + const progress = PROGRESS_FILTERS[__ITER % PROGRESS_FILTERS.length]; + const url = buildBooksUrl({ progress }); + requestBooks(url, `progress_${progress.toLowerCase()}`, progressFilterSuccess, progressFilterDuration); +} + +export function paginationScenario() { + const limit = PAGINATION_LIMITS[__ITER % PAGINATION_LIMITS.length]; + const url = buildBooksUrl({ limit }); + requestBooks(url, `pagination_limit_${limit}`, paginationSuccess, paginationDuration); +} + +function metricSnapshot(metric) { + const values = metric?.values || {}; + return { + avg: Math.round(values.avg || 0), + min: Math.round(values.min || 0), + max: Math.round(values.max || 0), + p95: Math.round(values['p(95)'] || 0), + p99: Math.round(values['p(99)'] || 0), + rate: Math.round((values.rate || 0) * 10000) / 100, + count: values.count || 0, + }; +} + +export function handleSummary(data) { + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + + const analysis = { + test_info: { + runMode: RUN_MODE, + baseUrl: BASE_URL, + testUsername: TEST_USERNAME || null, + page: PAGE, + defaultLimit: DEFAULT_LIMIT, + paginationLimits: PAGINATION_LIMITS, + progressFilters: PROGRESS_FILTERS, + targetVus: TARGET_VUS, + }, + scenario_metrics: { + default_list: { + success: metricSnapshot(data.metrics.books_default_list_success), + duration: metricSnapshot(data.metrics.books_default_list_duration), + }, + progress_filter: { + success: metricSnapshot(data.metrics.books_progress_filter_success), + duration: metricSnapshot(data.metrics.books_progress_filter_duration), + }, + pagination: { + success: metricSnapshot(data.metrics.books_pagination_success), + duration: metricSnapshot(data.metrics.books_pagination_duration), + }, + }, + transport_metrics: { + http_req_duration: metricSnapshot(data.metrics.http_req_duration), + http_req_failed: metricSnapshot(data.metrics.http_req_failed), + http_reqs: metricSnapshot(data.metrics.http_reqs), + data_received: metricSnapshot(data.metrics.data_received), + }, + }; + + return { + [`/reports/books-test-${timestamp}.json`]: JSON.stringify(data, null, 2), + [`/reports/books-analysis-${timestamp}.json`]: JSON.stringify(analysis, null, 2), + }; +} diff --git a/k6/scripts/image-performance-test.js b/k6/scripts/image-performance-test.js new file mode 100644 index 0000000..51ba3f6 --- /dev/null +++ b/k6/scripts/image-performance-test.js @@ -0,0 +1,150 @@ +import http from 'k6/http'; +import { check } from 'k6'; +import { Rate, Trend, Counter } from 'k6/metrics'; + +const imageLoadRate = new Rate('image_load_success'); +const imageLoadTime = new Trend('image_load_duration', true); +const imageSizeMetric = new Trend('image_size_bytes', true); +const cacheHitRate = new Rate('cache_hit_rate'); +const totalRequests = new Counter('total_requests'); + +export let options = { + stages: [ + { duration: '30s', target: 5 }, // 워밍업 + { duration: '30s', target: 20 }, // 부하 증가 + { duration: '1m', target: 50 }, // 최대 부하 유지 + { duration: '30s', target: 0 }, // 종료 + ], + thresholds: { + 'image_load_success': ['rate > 0.95'], + 'image_load_duration': ['p(95) < 5000'], + 'http_req_duration': ['p(95) < 3000'], + 'http_req_failed': ['rate < 0.05'], + }, +}; + +// 테스트할 URL (직접 수정해서 사용) +const IMAGE_URL = 'https://static.linglevel.com/cozy_sofa.jpg.webp'; + +export default function () { + const startTime = new Date(); + + const response = http.get(IMAGE_URL, { + headers: { + 'User-Agent': 'k6-image-performance-test', + 'Accept': 'image/webp,image/jpeg,image/png,image/*,*/*;q=0.8', + 'Accept-Encoding': 'gzip, deflate, br', + }, + timeout: '30s', + }); + + const endTime = new Date(); + const loadTime = endTime - startTime; + const imageSize = response.body ? response.body.length : 0; + + // 캐시 상태 확인 (여러 CDN 헤더 지원) + const cacheHeaders = [ + response.headers['X-Cache'], + response.headers['CF-Cache-Status'], + response.headers['X-Amz-Cf-Id'], + response.headers['X-Cache-Status'], + response.headers['Cache-Control'] + ].filter(Boolean); + + const cacheStatus = cacheHeaders.join(', ') || 'no-cache-info'; + const isCacheHit = cacheStatus.toLowerCase().includes('hit') || + cacheStatus.toLowerCase().includes('edge') || + cacheStatus.toLowerCase().includes('cloudfront'); + + const success = check(response, { + 'status is 200': (r) => r.status === 200, + 'content-type is image': (r) => { + const contentType = r.headers['Content-Type'] || ''; + return contentType.includes('image'); + }, + 'response body size > 0': (r) => r.body && r.body.length > 0, + 'load time < 10s': () => loadTime < 10000, + 'image size reasonable': () => imageSize > 1000 && imageSize < 10000000, // 1KB ~ 10MB + }); + + // 메트릭 기록 + imageLoadRate.add(success); + imageLoadTime.add(loadTime); + imageSizeMetric.add(imageSize); + cacheHitRate.add(isCacheHit); + totalRequests.add(1); + + // 상세 로그 + if (response.status !== 200) { + console.error(`❌ Failed: Status ${response.status}, URL: ${IMAGE_URL}`); + } else { + const sizeKB = Math.round(imageSize / 1024); + console.log(`✅ Success: ${loadTime}ms, ${sizeKB}KB, Cache: ${cacheStatus}`); + } + + // 응답 헤더 정보 (첫 번째 요청에서만 출력) + if (__ITER === 0) { + console.log('\n📊 Response Headers Analysis:'); + console.log(`Content-Type: ${response.headers['Content-Type'] || 'N/A'}`); + console.log(`Content-Length: ${response.headers['Content-Length'] || 'N/A'}`); + console.log(`Cache-Control: ${response.headers['Cache-Control'] || 'N/A'}`); + console.log(`Server: ${response.headers['Server'] || 'N/A'}`); + console.log(`X-Cache: ${response.headers['X-Cache'] || 'N/A'}`); + console.log(`CF-Cache-Status: ${response.headers['CF-Cache-Status'] || 'N/A'}`); + console.log(`X-Amz-Cf-Id: ${response.headers['X-Amz-Cf-Id'] || 'N/A'}`); + console.log(''); + } +} + +export function handleSummary(data) { + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + + // 상세 성능 분석 + const analysis = { + test_info: { + url: IMAGE_URL, + timestamp: timestamp, + total_duration_seconds: Math.round(data.state.testRunDurationMs / 1000), + total_requests: data.metrics.total_requests?.values.count || 0, + }, + performance_metrics: { + success_rate: Math.round((data.metrics.image_load_success?.values.rate || 0) * 100 * 100) / 100, + cache_hit_rate: Math.round((data.metrics.cache_hit_rate?.values.rate || 0) * 100 * 100) / 100, + load_time_ms: { + avg: Math.round(data.metrics.image_load_duration?.values.avg || 0), + min: Math.round(data.metrics.image_load_duration?.values.min || 0), + max: Math.round(data.metrics.image_load_duration?.values.max || 0), + p50: Math.round(data.metrics.image_load_duration?.values.med || 0), + p95: Math.round(data.metrics.image_load_duration?.values['p(95)'] || 0), + p99: Math.round(data.metrics.image_load_duration?.values['p(99)'] || 0), + }, + image_size_kb: { + avg: Math.round((data.metrics.image_size_bytes?.values.avg || 0) / 1024), + min: Math.round((data.metrics.image_size_bytes?.values.min || 0) / 1024), + max: Math.round((data.metrics.image_size_bytes?.values.max || 0) / 1024), + }, + throughput: { + requests_per_second: Math.round(data.metrics.http_reqs?.values.rate || 0), + data_received_mb_per_second: Math.round((data.metrics.data_received?.values.rate || 0) / 1024 / 1024 * 100) / 100, + } + } + }; + + console.log('\n🎯 === PERFORMANCE TEST SUMMARY ==='); + console.log(`URL: ${IMAGE_URL}`); + console.log(`Total Requests: ${analysis.test_info.total_requests}`); + console.log(`Success Rate: ${analysis.performance_metrics.success_rate}%`); + console.log(`Cache Hit Rate: ${analysis.performance_metrics.cache_hit_rate}%`); + console.log(`\n⏱️ Load Time:`); + console.log(` Average: ${analysis.performance_metrics.load_time_ms.avg}ms`); + console.log(` P95: ${analysis.performance_metrics.load_time_ms.p95}ms`); + console.log(` P99: ${analysis.performance_metrics.load_time_ms.p99}ms`); + console.log(`\n📦 Image Size: ${analysis.performance_metrics.image_size_kb.avg}KB (avg)`); + console.log(`\n🚀 Throughput: ${analysis.performance_metrics.throughput.requests_per_second} RPS`); + console.log(`📥 Data Rate: ${analysis.performance_metrics.throughput.data_received_mb_per_second} MB/s`); + + return { + [`/reports/image-test-${timestamp}.json`]: JSON.stringify(data, null, 2), + [`/reports/analysis-${timestamp}.json`]: JSON.stringify(analysis, null, 2), + }; +} \ No newline at end of file diff --git a/k6/scripts/smoke-test.js b/k6/scripts/smoke-test.js new file mode 100644 index 0000000..4ca91da --- /dev/null +++ b/k6/scripts/smoke-test.js @@ -0,0 +1,38 @@ +import http from 'k6/http'; +import { check } from 'k6'; + +export const options = { + vus: 1, + stages: [ + { duration: "1m", target: 10 }, + { duration: "3m", target: 10 }, + { duration: "1m", target: 50 }, + { duration: "3m", target: 50 }, + { duration: "1m", target: 100 }, + { duration: "3m", target: 100 }, + { duration: "1m", target: 200 }, + { duration: "3m", target: 200 }, + { duration: "1m", target: 300 }, + { duration: "3m", target: 300 }, + { duration: "1m", target: 0 }, + ], + thresholds: { + http_req_duration: ['p(95)<500'], // 95%가 500ms 이하 + http_req_failed: ['rate<0.1'], // 에러율 10% 이하 + }, +}; + +export default function () { + // Health check + const healthRes = http.get('http://host.docker.internal:8080/actuator/health'); + check(healthRes, { + 'health check status is 200': (r) => r.status === 200, + }); + + // API 테스트 예시 + // const apiRes = http.get('http://host.docker.internal:8080/api/some-endpoint'); + // check(apiRes, { + // 'api status is 200': (r) => r.status === 200, + // 'response time < 200ms': (r) => r.timings.duration < 200, + // }); +} \ No newline at end of file diff --git a/k6/seed/README.md b/k6/seed/README.md new file mode 100644 index 0000000..329a0c9 --- /dev/null +++ b/k6/seed/README.md @@ -0,0 +1,64 @@ +# K6 Seed Scripts + +이 디렉토리에는 로컬 `k6` 성능 실험용 시드 생성 스크립트를 둔다. +도메인 구조가 바로 드러나도록 `k6/seed///...` 형태를 기본 원칙으로 사용한다. + +## 디렉토리 원칙 + +- `k6/seed/content/book/...` +- `k6/seed/streak/...` +- `k6/seed/word/...` + +생성 결과물이나 임시 데이터는 `k6/data`에 두고, 버전 관리 대상은 `k6/seed` 아래에 둔다. + +## 포함된 스크립트 + +- `content/book/seed-books-content.mongosh.js` + - `user`, `books`, `chapters`, `chunks` 컬렉션에 재실행 가능한 업서트 시드를 넣는다. + - 기본 분포는 `short / medium / long` 책 구성을 섞고, 챕터 수와 청크 수를 현실적인 범위로 퍼뜨린다. + +## 실행 + +로컬 MongoDB 컨테이너가 떠 있는 상태에서 실행한다. + +```bash +docker compose exec -T mongo mongosh llv_api_local < k6/seed/content/book/seed-books-content.mongosh.js +``` + +환경변수를 조정해서 규모를 바꿀 수 있다. + +```bash +SEED_PREFIX=k6seed \ +BOOK_COUNT=180 \ +USER_COUNT=4 \ +EXTRA_DIFFICULTY_RATIO=0.25 \ +docker compose exec -T mongo mongosh llv_api_local < k6/seed/content/book/seed-books-content.mongosh.js +``` + +같은 prefix 데이터만 지우고 다시 만들고 싶으면 `RESET_EXISTING=true` 를 함께 준다. + +```bash +RESET_EXISTING=true \ +docker compose exec -T mongo mongosh llv_api_local < k6/seed/content/book/seed-books-content.mongosh.js +``` + +## 기본 특성 + +- 책 수 기본값: `240` +- 사용자 수 기본값: `4` +- 책 길이 분포: + - `short`: 20% + - `medium`: 60% + - `long`: 20% +- 기본 난이도 기준 챕터당 평균 청크 수는 약 `30개`를 목표로 둔다. +- 프로필별 청크 범위: + - `short`: 18~24 + - `medium`: 26~34 + - `long`: 34~42 +- 각 책은 기본 난이도 1개를 갖고, 일부는 인접 난이도 청크를 추가로 가진다. +- 이미지 청크는 소량만 섞어서 응답 shape 를 단조롭게 만들지 않는다. + +## 주의 + +- `NOT_STARTED` 같은 progress 기반 필터 검증까지 하려면 이후 `bookProgress` 시드를 별도로 추가하는 편이 좋다. +- 현재 스크립트는 콘텐츠 그래프만 만드는 용도다. diff --git a/k6/seed/content/book/seed-books-content.mongosh.js b/k6/seed/content/book/seed-books-content.mongosh.js new file mode 100644 index 0000000..4e4d1e6 --- /dev/null +++ b/k6/seed/content/book/seed-books-content.mongosh.js @@ -0,0 +1,447 @@ +/* + * K6 content seed for local MongoDB. + * + * Usage: + * docker compose exec -T mongo mongosh llv_api_local < k6/seed/content/book/seed-books-content.mongosh.js + * + * Optional env vars: + * SEED_PREFIX=k6seed + * BOOK_COUNT=240 + * USER_COUNT=4 + * EXTRA_DIFFICULTY_RATIO=0.3 + * RESET_EXISTING=false + */ + +(function seedBooksContent() { + const env = typeof process !== 'undefined' ? process.env : {}; + + const config = { + seedPrefix: env.SEED_PREFIX || 'k6seed', + bookCount: positiveInt(env.BOOK_COUNT, 240), + userCount: positiveInt(env.USER_COUNT, 4), + extraDifficultyRatio: boundedNumber(env.EXTRA_DIFFICULTY_RATIO, 0.3, 0, 1), + resetExisting: parseBoolean(env.RESET_EXISTING || 'false'), + now: new Date(), + }; + + const random = createRandom(config.seedPrefix); + const databaseName = db.getName(); + + const collections = { + users: db.getCollection('user'), + books: db.getCollection('books'), + chapters: db.getCollection('chapters'), + chunks: db.getCollection('chunks'), + }; + + print(`[seed] Database: ${databaseName}`); + print(`[seed] Prefix: ${config.seedPrefix}`); + print(`[seed] Books: ${config.bookCount}, Users: ${config.userCount}`); + + if (config.resetExisting) { + resetExistingSeed(collections, config.seedPrefix); + } + + const users = buildUsers(config); + const content = buildContentGraph(config, random); + + upsertUsers(collections.users, users); + upsertBooks(collections.books, content.books); + upsertChapters(collections.chapters, content.chapters); + upsertChunks(collections.chunks, content.chunks); + + print('[seed] Completed successfully'); + printjson({ + users: users.length, + books: content.books.length, + chapters: content.chapters.length, + chunks: content.chunks.length, + }); +})(); + +function resetExistingSeed(collections, seedPrefix) { + const idRegex = new RegExp(`^${escapeRegex(seedPrefix)}-`); + const usernameRegex = new RegExp(`^${escapeRegex(seedPrefix)}-user-`); + + const deletedChunks = collections.chunks.deleteMany({ id: idRegex }).deletedCount; + const deletedChapters = collections.chapters.deleteMany({ id: idRegex }).deletedCount; + const deletedBooks = collections.books.deleteMany({ id: idRegex }).deletedCount; + const deletedUsers = collections.users.deleteMany({ username: usernameRegex }).deletedCount; + + print(`[seed] Reset existing seed docs: users=${deletedUsers}, books=${deletedBooks}, chapters=${deletedChapters}, chunks=${deletedChunks}`); +} + +function buildUsers(config) { + const users = []; + + for (let index = 1; index <= config.userCount; index += 1) { + const username = `${config.seedPrefix}-user-${pad(index, 2)}`; + + users.push({ + id: `${config.seedPrefix}-user-doc-${pad(index, 2)}`, + username, + email: `${username}@example.local`, + displayName: `K6 Seed User ${index}`, + provider: 'test', + profileImageUrl: `https://static.linglevel.local/profiles/${config.seedPrefix}/${pad(index, 2)}.png`, + role: 'USER', + deleted: false, + createdAt: daysAgo(index * 7), + deletedAt: null, + }); + } + + return users; +} + +function buildContentGraph(config, random) { + const books = []; + const chapters = []; + const chunks = []; + + for (let bookIndex = 1; bookIndex <= config.bookCount; bookIndex += 1) { + const profile = pickBookProfile(random); + const bookId = `${config.seedPrefix}-book-${pad(bookIndex, 4)}`; + const createdAt = daysAgo(randomInt(random, 0, 365)); + const titleSeed = buildTitleSeed(bookIndex); + const primaryDifficulty = pickPrimaryDifficulty(random); + const difficultyLevels = buildDifficultyLevels(primaryDifficulty, config.extraDifficultyRatio, random); + + let totalReadingTime = 0; + const chapterCount = randomInt(random, profile.chapterRange.min, profile.chapterRange.max); + + for (let chapterNumber = 1; chapterNumber <= chapterCount; chapterNumber += 1) { + const chapterId = `${bookId}-chapter-${pad(chapterNumber, 2)}`; + const chunkPlan = buildChunkPlan(profile, chapterNumber, random); + const chapterReadingTime = estimateChapterReadingTime(chunkPlan.primaryChunkCount, difficultyLevels.length); + + chapters.push({ + id: chapterId, + bookId, + chapterNumber, + title: `Chapter ${chapterNumber}. ${buildChapterTitle(titleSeed.baseNoun, chapterNumber)}`, + chapterImageUrl: chapterNumber % 5 === 0 + ? `https://static.linglevel.local/books/${bookId}/chapters/${pad(chapterNumber, 2)}.jpg` + : null, + description: `${titleSeed.baseAdjective} events unfold around ${titleSeed.baseNoun.toLowerCase()} in chapter ${chapterNumber}.`, + readingTime: chapterReadingTime, + }); + + totalReadingTime += chapterReadingTime; + + for (let levelIndex = 0; levelIndex < difficultyLevels.length; levelIndex += 1) { + const difficultyLevel = difficultyLevels[levelIndex]; + const chunkCount = adjustChunkCountByLevel(chunkPlan.primaryChunkCount, levelIndex, random); + + for (let chunkNumber = 1; chunkNumber <= chunkCount; chunkNumber += 1) { + const chunkId = `${chapterId}-${difficultyLevel.toLowerCase()}-chunk-${pad(chunkNumber, 2)}`; + const isImage = shouldCreateImageChunk(chunkNumber, chapterNumber, random); + + chunks.push({ + id: chunkId, + chapterId, + chunkNumber, + difficultyLevel, + type: isImage ? 'IMAGE' : 'TEXT', + content: isImage + ? `https://static.linglevel.local/books/${bookId}/chapters/${pad(chapterNumber, 2)}/images/${difficultyLevel.toLowerCase()}-${pad(chunkNumber, 2)}.jpg` + : buildChunkText(titleSeed, chapterNumber, chunkNumber, difficultyLevel), + description: isImage + ? `${titleSeed.baseAdjective} illustration for chapter ${chapterNumber}, chunk ${chunkNumber}.` + : null, + }); + } + } + } + + books.push({ + id: bookId, + title: `${titleSeed.baseAdjective} ${titleSeed.baseNoun}`, + titleTranslations: { + ko: `${titleSeed.baseNounKo}의 ${titleSeed.baseAdjectiveKo}`, + ja: `${titleSeed.baseAdjectiveJa} ${titleSeed.baseNounJa}`, + }, + author: buildAuthorName(bookIndex), + coverImageUrl: `https://static.linglevel.local/books/${bookId}/cover-small.jpg`, + difficultyLevel: primaryDifficulty, + chapterCount, + readingTime: totalReadingTime, + averageRating: buildAverageRating(random), + reviewCount: buildReviewCount(profile, random), + viewCount: buildViewCount(profile, random), + tags: buildTags(profile, random), + createdAt, + }); + } + + return { books, chapters, chunks }; +} + +function upsertUsers(collection, users) { + collection.bulkWrite( + users.map((user) => ({ + updateOne: { + filter: { username: user.username }, + update: { $set: user }, + upsert: true, + }, + })), + { ordered: false } + ); +} + +function upsertBooks(collection, books) { + collection.bulkWrite( + books.map((book) => ({ + updateOne: { + filter: { id: book.id }, + update: { $set: book }, + upsert: true, + }, + })), + { ordered: false } + ); +} + +function upsertChapters(collection, chapters) { + collection.bulkWrite( + chapters.map((chapter) => ({ + updateOne: { + filter: { id: chapter.id }, + update: { $set: chapter }, + upsert: true, + }, + })), + { ordered: false } + ); +} + +function upsertChunks(collection, chunks) { + const batchSize = 1000; + + for (let index = 0; index < chunks.length; index += batchSize) { + const batch = chunks.slice(index, index + batchSize); + + collection.bulkWrite( + batch.map((chunk) => ({ + updateOne: { + filter: { id: chunk.id }, + update: { $set: chunk }, + upsert: true, + }, + })), + { ordered: false } + ); + } +} + +function pickBookProfile(random) { + const value = random(); + + if (value < 0.2) { + return { + name: 'short', + chapterRange: { min: 6, max: 8 }, + chunkRange: { min: 18, max: 24 }, + tags: ['starter', 'dialogue', 'daily-life', 'school'], + }; + } + + if (value < 0.8) { + return { + name: 'medium', + chapterRange: { min: 10, max: 15 }, + chunkRange: { min: 26, max: 34 }, + tags: ['classic', 'growth', 'friendship', 'mystery', 'travel'], + }; + } + + return { + name: 'long', + chapterRange: { min: 20, max: 30 }, + chunkRange: { min: 34, max: 42 }, + tags: ['epic', 'history', 'adventure', 'war', 'politics'], + }; +} + +function buildChunkPlan(profile, chapterNumber, random) { + const primaryChunkCount = randomInt(random, profile.chunkRange.min, profile.chunkRange.max); + const chapterWeight = chapterNumber % 7 === 0 ? 1 : 0; + + return { + primaryChunkCount: primaryChunkCount + chapterWeight, + }; +} + +function pickPrimaryDifficulty(random) { + const levels = ['A2', 'B1', 'B2', 'C1']; + return levels[randomInt(random, 0, levels.length - 1)]; +} + +function buildDifficultyLevels(primaryDifficulty, extraDifficultyRatio, random) { + const order = ['A0', 'A1', 'A2', 'B1', 'B2', 'C1', 'C2']; + const primaryIndex = order.indexOf(primaryDifficulty); + const levels = [primaryDifficulty]; + + if (random() >= extraDifficultyRatio) { + return levels; + } + + const candidates = []; + + if (primaryIndex > 0) { + candidates.push(order[primaryIndex - 1]); + } + if (primaryIndex < order.length - 1) { + candidates.push(order[primaryIndex + 1]); + } + + if (candidates.length > 0) { + levels.push(candidates[randomInt(random, 0, candidates.length - 1)]); + } + + return levels; +} + +function adjustChunkCountByLevel(primaryChunkCount, levelIndex, random) { + if (levelIndex === 0) { + return primaryChunkCount; + } + + return Math.max(3, primaryChunkCount + randomInt(random, -1, 1)); +} + +function shouldCreateImageChunk(chunkNumber, chapterNumber, random) { + if ((chapterNumber + chunkNumber) % 11 === 0) { + return true; + } + + return random() < 0.03; +} + +function buildChunkText(titleSeed, chapterNumber, chunkNumber, difficultyLevel) { + const sentence = `${titleSeed.baseAdjective} ${titleSeed.baseNoun.toLowerCase()} moves through chapter ${chapterNumber}, section ${chunkNumber}, at ${difficultyLevel} pace.`; + return [ + sentence, + 'The character observes small details, reacts to change, and keeps the scene moving with clear narrative beats.', + 'This placeholder text is intentionally stable so local k6 comparisons focus on query cost rather than random payload drift.', + ].join(' '); +} + +function buildTitleSeed(bookIndex) { + const adjectives = ['Silent', 'Hidden', 'Burning', 'Golden', 'Fading', 'Northern', 'Restless', 'Last']; + const nouns = ['Garden', 'Harbor', 'Compass', 'Archive', 'Forest', 'Letters', 'Skyline', 'Bridge']; + const adjectivesKo = ['조용한', '숨겨진', '타오르는', '황금빛', '희미한', '북쪽의', '불안한', '마지막']; + const nounsKo = ['정원', '항구', '나침반', '기록보관소', '숲', '편지', '스카이라인', '다리']; + const adjectivesJa = ['静かな', '隠された', '燃える', '黄金の', '薄れる', '北の', '落ち着かない', '最後の']; + const nounsJa = ['庭', '港', '羅針盤', '記録庫', '森', '手紙', 'スカイライン', '橋']; + + const adjectiveIndex = bookIndex % adjectives.length; + const nounIndex = Math.floor(bookIndex / adjectives.length) % nouns.length; + + return { + baseAdjective: adjectives[adjectiveIndex], + baseNoun: nouns[nounIndex], + baseAdjectiveKo: adjectivesKo[adjectiveIndex], + baseNounKo: nounsKo[nounIndex], + baseAdjectiveJa: adjectivesJa[adjectiveIndex], + baseNounJa: nounsJa[nounIndex], + }; +} + +function buildChapterTitle(baseNoun, chapterNumber) { + const patterns = ['Arrival', 'Signal', 'Detour', 'Witness', 'Crossing', 'Turn', 'Distance', 'Echo']; + return `${patterns[(chapterNumber - 1) % patterns.length]} of the ${baseNoun}`; +} + +function buildAuthorName(bookIndex) { + const firstNames = ['Mina', 'Elias', 'Harper', 'Jun', 'Noah', 'Sora', 'Lena', 'Theo']; + const lastNames = ['Park', 'Rivera', 'Bennett', 'Tanaka', 'Kim', 'Silva', 'Walker', 'Ito']; + + return `${firstNames[bookIndex % firstNames.length]} ${lastNames[Math.floor(bookIndex / firstNames.length) % lastNames.length]}`; +} + +function buildAverageRating(random) { + return Number((3.4 + random() * 1.4).toFixed(1)); +} + +function buildReviewCount(profile, random) { + const multiplier = profile.name === 'long' ? 1.4 : profile.name === 'short' ? 0.7 : 1; + return Math.round((20 + random() * 180) * multiplier); +} + +function buildViewCount(profile, random) { + const base = profile.name === 'long' ? 600 : profile.name === 'short' ? 80 : 250; + const heavyTail = Math.pow(random(), 0.35); + return Math.round(base + heavyTail * 6000); +} + +function buildTags(profile, random) { + const tags = []; + const candidates = profile.tags.slice(); + const tagCount = randomInt(random, 1, Math.min(3, candidates.length)); + + while (tags.length < tagCount) { + const index = randomInt(random, 0, candidates.length - 1); + tags.push(candidates.splice(index, 1)[0]); + } + + return tags; +} + +function estimateChapterReadingTime(primaryChunkCount, difficultyCount) { + return Math.max(3, Math.round((primaryChunkCount * (difficultyCount > 1 ? 1.15 : 1)) * 1.1)); +} + +function createRandom(seedString) { + let seed = 0; + + for (let index = 0; index < seedString.length; index += 1) { + seed = (seed * 31 + seedString.charCodeAt(index)) >>> 0; + } + + return function next() { + seed = (seed + 0x6D2B79F5) >>> 0; + let value = seed; + value = Math.imul(value ^ (value >>> 15), value | 1); + value ^= value + Math.imul(value ^ (value >>> 7), value | 61); + return ((value ^ (value >>> 14)) >>> 0) / 4294967296; + }; +} + +function randomInt(random, min, max) { + return Math.floor(random() * (max - min + 1)) + min; +} + +function positiveInt(value, fallback) { + const parsed = Number(value); + return Number.isInteger(parsed) && parsed > 0 ? parsed : fallback; +} + +function boundedNumber(value, fallback, min, max) { + const parsed = Number(value); + + if (!Number.isFinite(parsed)) { + return fallback; + } + + return Math.min(max, Math.max(min, parsed)); +} + +function parseBoolean(value) { + return ['1', 'true', 'yes', 'on'].includes(String(value).toLowerCase()); +} + +function pad(value, length) { + return String(value).padStart(length, '0'); +} + +function daysAgo(days) { + const date = new Date(); + date.setUTCDate(date.getUTCDate() - days); + return date; +} + +function escapeRegex(value) { + return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +} From 25395d45207f618b4aaf8b3faf50c2b703d30ed2 Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 31 Mar 2026 16:51:20 +0900 Subject: [PATCH 03/28] Refine k6 README overview --- k6/README.md | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/k6/README.md b/k6/README.md index f70740f..852683b 100644 --- a/k6/README.md +++ b/k6/README.md @@ -1,18 +1,5 @@ # K6 Performance Testing -## 디렉토리 구조 -``` -k6/ -├── docker-compose.yml # K6 실행 환경 -├── scripts/ # 테스트 스크립트 -│ ├── smoke-test.js # 기본 연결 테스트 -│ ├── load-test.js # 부하 테스트 -│ └── stress-test.js # 스트레스 테스트 -├── data/ # 테스트 데이터 파일 -├── reports/ # 테스트 결과 리포트 -└── README.md -``` - ## 테스트 실행 ### 기본 연결 테스트 (Smoke Test) From c88fc4899a2f621c298220149a8a6f4bc6f2eb1d Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 31 Mar 2026 18:21:35 +0900 Subject: [PATCH 04/28] Add book progress to k6 book seed --- k6/seed/README.md | 14 +- .../book/seed-books-content.mongosh.js | 256 +++++++++++++++++- 2 files changed, 263 insertions(+), 7 deletions(-) diff --git a/k6/seed/README.md b/k6/seed/README.md index 329a0c9..449a18e 100644 --- a/k6/seed/README.md +++ b/k6/seed/README.md @@ -14,8 +14,9 @@ ## 포함된 스크립트 - `content/book/seed-books-content.mongosh.js` - - `user`, `books`, `chapters`, `chunks` 컬렉션에 재실행 가능한 업서트 시드를 넣는다. + - `user`, `books`, `chapters`, `chunks`, `bookProgress` 컬렉션에 재실행 가능한 업서트 시드를 넣는다. - 기본 분포는 `short / medium / long` 책 구성을 섞고, 챕터 수와 청크 수를 현실적인 범위로 퍼뜨린다. + - 같은 콘텐츠 그래프를 기준으로 `NOT_STARTED`, `IN_PROGRESS`, `COMPLETED` 상태가 사용자별로 자연스럽게 섞인 `bookProgress`를 함께 생성한다. ## 실행 @@ -57,8 +58,15 @@ docker compose exec -T mongo mongosh llv_api_local < k6/seed/content/book/seed-b - `long`: 34~42 - 각 책은 기본 난이도 1개를 갖고, 일부는 인접 난이도 청크를 추가로 가진다. - 이미지 청크는 소량만 섞어서 응답 shape 를 단조롭게 만들지 않는다. +- `bookProgress`는 사용자 프로필별로 분포를 다르게 준다. + - `mostly-unread` + - `balanced` + - `active-reader` + - `completion-heavy` +- `NOT_STARTED`는 progress 문서를 만들지 않는 방식으로 표현한다. +- `IN_PROGRESS`는 `chapterProgresses`와 `maxReadChunkNumber`를 함께 채워서 현재 필터와 V3 응답 계산이 모두 자연스럽게 동작하게 한다. ## 주의 -- `NOT_STARTED` 같은 progress 기반 필터 검증까지 하려면 이후 `bookProgress` 시드를 별도로 추가하는 편이 좋다. -- 현재 스크립트는 콘텐츠 그래프만 만드는 용도다. +- 현재 스크립트는 `books` 콘텐츠 그래프와 `bookProgress`를 함께 만든다. +- 이미 같은 prefix로 넣은 시드를 다시 깔끔하게 만들고 싶으면 `RESET_EXISTING=true`로 재실행한다. diff --git a/k6/seed/content/book/seed-books-content.mongosh.js b/k6/seed/content/book/seed-books-content.mongosh.js index 4e4d1e6..8d151d6 100644 --- a/k6/seed/content/book/seed-books-content.mongosh.js +++ b/k6/seed/content/book/seed-books-content.mongosh.js @@ -12,7 +12,7 @@ * RESET_EXISTING=false */ -(function seedBooksContent() { +function seedBooksContent() { const env = typeof process !== 'undefined' ? process.env : {}; const config = { @@ -32,6 +32,7 @@ books: db.getCollection('books'), chapters: db.getCollection('chapters'), chunks: db.getCollection('chunks'), + bookProgresses: db.getCollection('bookProgress'), }; print(`[seed] Database: ${databaseName}`); @@ -44,11 +45,16 @@ const users = buildUsers(config); const content = buildContentGraph(config, random); + const progressSeed = buildBookProgresses(config, users, content.bookCatalog, createRandom(`${config.seedPrefix}:progress`)); + print(`[seed] Prepared bookProgress documents: ${progressSeed.bookProgresses.length}`); upsertUsers(collections.users, users); upsertBooks(collections.books, content.books); upsertChapters(collections.chapters, content.chapters); upsertChunks(collections.chunks, content.chunks); + print('[seed] Chunk upserts completed'); + upsertBookProgresses(collections.bookProgresses, progressSeed.bookProgresses); + print('[seed] Book progress upserts completed'); print('[seed] Completed successfully'); printjson({ @@ -56,19 +62,32 @@ books: content.books.length, chapters: content.chapters.length, chunks: content.chunks.length, + bookProgresses: progressSeed.bookProgresses.length, + progressProfiles: progressSeed.summaryByUser, }); -})(); + + if (typeof quit === 'function') { + quit(0); + } +} function resetExistingSeed(collections, seedPrefix) { const idRegex = new RegExp(`^${escapeRegex(seedPrefix)}-`); const usernameRegex = new RegExp(`^${escapeRegex(seedPrefix)}-user-`); + const deletedProgresses = collections.bookProgresses.deleteMany({ + $or: [ + { id: idRegex }, + { userId: idRegex }, + { bookId: idRegex }, + ], + }).deletedCount; const deletedChunks = collections.chunks.deleteMany({ id: idRegex }).deletedCount; const deletedChapters = collections.chapters.deleteMany({ id: idRegex }).deletedCount; const deletedBooks = collections.books.deleteMany({ id: idRegex }).deletedCount; const deletedUsers = collections.users.deleteMany({ username: usernameRegex }).deletedCount; - print(`[seed] Reset existing seed docs: users=${deletedUsers}, books=${deletedBooks}, chapters=${deletedChapters}, chunks=${deletedChunks}`); + print(`[seed] Reset existing seed docs: users=${deletedUsers}, books=${deletedBooks}, chapters=${deletedChapters}, chunks=${deletedChunks}, bookProgresses=${deletedProgresses}`); } function buildUsers(config) { @@ -98,6 +117,7 @@ function buildContentGraph(config, random) { const books = []; const chapters = []; const chunks = []; + const bookCatalog = []; for (let bookIndex = 1; bookIndex <= config.bookCount; bookIndex += 1) { const profile = pickBookProfile(random); @@ -106,6 +126,7 @@ function buildContentGraph(config, random) { const titleSeed = buildTitleSeed(bookIndex); const primaryDifficulty = pickPrimaryDifficulty(random); const difficultyLevels = buildDifficultyLevels(primaryDifficulty, config.extraDifficultyRatio, random); + const bookChapterCatalog = []; let totalReadingTime = 0; const chapterCount = randomInt(random, profile.chapterRange.min, profile.chapterRange.max); @@ -114,6 +135,11 @@ function buildContentGraph(config, random) { const chapterId = `${bookId}-chapter-${pad(chapterNumber, 2)}`; const chunkPlan = buildChunkPlan(profile, chapterNumber, random); const chapterReadingTime = estimateChapterReadingTime(chunkPlan.primaryChunkCount, difficultyLevels.length); + const chapterCatalog = { + id: chapterId, + chapterNumber, + chunkIdsByDifficulty: {}, + }; chapters.push({ id: chapterId, @@ -137,6 +163,11 @@ function buildContentGraph(config, random) { const chunkId = `${chapterId}-${difficultyLevel.toLowerCase()}-chunk-${pad(chunkNumber, 2)}`; const isImage = shouldCreateImageChunk(chunkNumber, chapterNumber, random); + if (!chapterCatalog.chunkIdsByDifficulty[difficultyLevel]) { + chapterCatalog.chunkIdsByDifficulty[difficultyLevel] = []; + } + chapterCatalog.chunkIdsByDifficulty[difficultyLevel].push(chunkId); + chunks.push({ id: chunkId, chapterId, @@ -152,6 +183,8 @@ function buildContentGraph(config, random) { }); } } + + bookChapterCatalog.push(chapterCatalog); } books.push({ @@ -172,9 +205,16 @@ function buildContentGraph(config, random) { tags: buildTags(profile, random), createdAt, }); + + bookCatalog.push({ + id: bookId, + primaryDifficulty, + chapterCount, + chapters: bookChapterCatalog, + }); } - return { books, chapters, chunks }; + return { books, chapters, chunks, bookCatalog }; } function upsertUsers(collection, users) { @@ -235,6 +275,208 @@ function upsertChunks(collection, chunks) { } } +function upsertBookProgresses(collection, bookProgresses) { + if (bookProgresses.length === 0) { + return; + } + + collection.bulkWrite( + bookProgresses.map((progress) => ({ + updateOne: { + filter: { userId: progress.userId, bookId: progress.bookId }, + update: { $set: progress }, + upsert: true, + }, + })), + { ordered: false } + ); +} + +function buildBookProgresses(config, users, bookCatalog, random) { + const progressProfiles = [ + { + name: 'mostly-unread', + weights: { NOT_STARTED: 0.72, IN_PROGRESS: 0.18, COMPLETED: 0.10 }, + }, + { + name: 'balanced', + weights: { NOT_STARTED: 0.45, IN_PROGRESS: 0.35, COMPLETED: 0.20 }, + }, + { + name: 'active-reader', + weights: { NOT_STARTED: 0.25, IN_PROGRESS: 0.45, COMPLETED: 0.30 }, + }, + { + name: 'completion-heavy', + weights: { NOT_STARTED: 0.12, IN_PROGRESS: 0.28, COMPLETED: 0.60 }, + }, + ]; + + const bookProgresses = []; + const summaryByUser = []; + + users.forEach((user, userIndex) => { + const profile = progressProfiles[userIndex % progressProfiles.length]; + const counts = { + NOT_STARTED: 0, + IN_PROGRESS: 0, + COMPLETED: 0, + }; + + bookCatalog.forEach((bookEntry, bookIndex) => { + const status = pickWeightedProgressStatus(profile.weights, random); + counts[status] += 1; + + if (status === 'NOT_STARTED') { + return; + } + + const progressId = `${config.seedPrefix}-book-progress-${pad(userIndex + 1, 2)}-${pad(bookIndex + 1, 4)}`; + + if (status === 'COMPLETED') { + bookProgresses.push(buildCompletedBookProgress(progressId, user, bookEntry, random)); + return; + } + + bookProgresses.push(buildInProgressBookProgress(progressId, user, bookEntry, random)); + }); + + summaryByUser.push({ + username: user.username, + profile: profile.name, + counts, + }); + }); + + return { bookProgresses, summaryByUser }; +} + +function buildCompletedBookProgress(progressId, user, bookEntry, random) { + const completedAgo = randomInt(random, 7, 90); + const completedAt = daysAgo(completedAgo); + const updatedAt = daysAgo(randomInt(random, 0, completedAgo)); + const lastChapter = bookEntry.chapters[bookEntry.chapters.length - 1]; + const lastChunkIds = getChunkIdsForProgress(lastChapter, bookEntry.primaryDifficulty); + const lastChunkNumber = lastChunkIds.length; + + return { + id: progressId, + userId: user.id, + bookId: bookEntry.id, + chapterId: lastChapter.id, + chunkId: lastChunkIds[lastChunkIds.length - 1], + currentReadChapterNumber: bookEntry.chapterCount, + maxReadChapterNumber: bookEntry.chapterCount, + currentReadChunkNumber: lastChunkNumber, + maxReadChunkNumber: lastChunkNumber, + normalizedProgress: 100, + maxNormalizedProgress: 100, + currentDifficultyLevel: bookEntry.primaryDifficulty, + chapterProgresses: bookEntry.chapters.map((chapter, index) => ({ + chapterNumber: chapter.chapterNumber, + progressPercentage: 100, + isCompleted: true, + completedAt: daysAgo(completedAgo + (bookEntry.chapterCount - index - 1)), + })), + isCompleted: true, + completedAt, + updatedAt, + }; +} + +function buildInProgressBookProgress(progressId, user, bookEntry, random) { + const chapterCount = bookEntry.chapterCount; + const minimumCompletedChapters = chapterCount >= 5 ? Math.floor(chapterCount * 0.2) : 0; + const maximumCompletedChapters = Math.max( + minimumCompletedChapters, + Math.min(chapterCount - 1, Math.floor(chapterCount * 0.75)) + ); + const completedChapterCount = randomInt(random, minimumCompletedChapters, maximumCompletedChapters); + const currentChapterNumber = Math.min(chapterCount, completedChapterCount + 1); + const currentChapter = bookEntry.chapters[currentChapterNumber - 1]; + const currentChunkIds = getChunkIdsForProgress(currentChapter, bookEntry.primaryDifficulty); + const minimumChunkNumber = Math.max(1, Math.floor(currentChunkIds.length * 0.25)); + const maximumChunkNumber = Math.max( + minimumChunkNumber, + Math.min(currentChunkIds.length - 1, Math.ceil(currentChunkIds.length * 0.85)) + ); + const currentReadChunkNumber = randomInt(random, minimumChunkNumber, maximumChunkNumber); + const currentChunkId = currentChunkIds[currentReadChunkNumber - 1]; + const currentChapterProgress = roundToOneDecimal((currentReadChunkNumber * 100) / currentChunkIds.length); + const updatedAt = daysAgo(randomInt(random, 0, 21)); + + return { + id: progressId, + userId: user.id, + bookId: bookEntry.id, + chapterId: currentChapter.id, + chunkId: currentChunkId, + currentReadChapterNumber: currentChapterNumber, + maxReadChapterNumber: currentChapterNumber, + currentReadChunkNumber, + maxReadChunkNumber: currentReadChunkNumber, + normalizedProgress: roundToOneDecimal((completedChapterCount * 100) / chapterCount), + maxNormalizedProgress: roundToOneDecimal((completedChapterCount * 100) / chapterCount), + currentDifficultyLevel: bookEntry.primaryDifficulty, + chapterProgresses: buildInProgressChapterProgresses( + bookEntry.chapters, + completedChapterCount, + currentChapterNumber, + currentChapterProgress, + random + ), + isCompleted: false, + completedAt: null, + updatedAt, + }; +} + +function buildInProgressChapterProgresses(chapters, completedChapterCount, currentChapterNumber, currentChapterProgress, random) { + const chapterProgresses = []; + + for (let index = 0; index < completedChapterCount; index += 1) { + chapterProgresses.push({ + chapterNumber: chapters[index].chapterNumber, + progressPercentage: 100, + isCompleted: true, + completedAt: daysAgo(randomInt(random, 2, 45)), + }); + } + + chapterProgresses.push({ + chapterNumber: currentChapterNumber, + progressPercentage: currentChapterProgress, + isCompleted: false, + completedAt: null, + }); + + return chapterProgresses; +} + +function pickWeightedProgressStatus(weights, random) { + const value = random(); + + if (value < weights.NOT_STARTED) { + return 'NOT_STARTED'; + } + + if (value < weights.NOT_STARTED + weights.IN_PROGRESS) { + return 'IN_PROGRESS'; + } + + return 'COMPLETED'; +} + +function getChunkIdsForProgress(chapter, difficultyLevel) { + const chunkIds = chapter.chunkIdsByDifficulty[difficultyLevel] || []; + + if (chunkIds.length === 0) { + throw new Error(`No chunks found for chapter=${chapter.id}, difficulty=${difficultyLevel}`); + } + + return chunkIds; +} + function pickBookProfile(random) { const value = random(); @@ -428,6 +670,10 @@ function boundedNumber(value, fallback, min, max) { return Math.min(max, Math.max(min, parsed)); } +function roundToOneDecimal(value) { + return Math.round(value * 10) / 10; +} + function parseBoolean(value) { return ['1', 'true', 'yes', 'on'].includes(String(value).toLowerCase()); } @@ -445,3 +691,5 @@ function daysAgo(days) { function escapeRegex(value) { return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); } + +seedBooksContent(); From 075969974a6fdf6ebef8ab674335a443019f82c3 Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 31 Mar 2026 23:23:40 +0900 Subject: [PATCH 05/28] Refactor k6 scripts around load profiles --- k6/README.md | 123 ++++++++++++++-- k6/scripts/baseline.js | 19 +++ k6/scripts/book-chapters-test.js | 28 ---- k6/scripts/books-performance-test.js | 213 --------------------------- k6/scripts/common/endpoints.js | 159 ++++++++++++++++++++ k6/scripts/common/http.js | 71 +++++++++ k6/scripts/common/profiles.js | 56 +++++++ k6/scripts/common/summary.js | 47 ++++++ k6/scripts/image-performance-test.js | 150 ------------------- k6/scripts/load.js | 19 +++ k6/scripts/mixed-load.js | 22 +++ k6/scripts/smoke-test.js | 38 ----- k6/scripts/stress.js | 19 +++ 13 files changed, 522 insertions(+), 442 deletions(-) create mode 100644 k6/scripts/baseline.js delete mode 100644 k6/scripts/book-chapters-test.js delete mode 100644 k6/scripts/books-performance-test.js create mode 100644 k6/scripts/common/endpoints.js create mode 100644 k6/scripts/common/http.js create mode 100644 k6/scripts/common/profiles.js create mode 100644 k6/scripts/common/summary.js delete mode 100644 k6/scripts/image-performance-test.js create mode 100644 k6/scripts/load.js create mode 100644 k6/scripts/mixed-load.js delete mode 100644 k6/scripts/smoke-test.js create mode 100644 k6/scripts/stress.js diff --git a/k6/README.md b/k6/README.md index 852683b..f62b6c2 100644 --- a/k6/README.md +++ b/k6/README.md @@ -1,27 +1,124 @@ # K6 Performance Testing -## 테스트 실행 +`k6` 디렉토리는 정량 부하 테스트를 `부하 프로필` 중심으로 관리한다. +요청 이름보다 `baseline`, `load`, `stress`, `mixed-load` 같은 실험 목적이 먼저 보이도록 두고, 실제 엔드포인트는 환경변수로 주입한다. + +## 현재 구조 + +```text +k6/ +├── docker-compose.yml +├── README.md +├── seed/ +│ └── ... +└── scripts/ + ├── baseline.js + ├── load.js + ├── stress.js + ├── mixed-load.js + └── common/ + ├── endpoints.js + ├── http.js + ├── profiles.js + └── summary.js +``` + +## 전제 조건 + +- 애플리케이션이 로컬에서 실행 중이어야 한다. +- MongoDB 에는 seed 데이터가 들어 있어야 한다. +- 테스트용 사용자는 `X-Test-Username` 으로 인증 가능해야 한다. +- 시드 생성은 [README.md](/Users/solfe/Desktop/WORK/llv/llv-api/k6/seed/README.md)를 따른다. +- 기본 `BASE_URL` 은 `http://host.docker.internal:8080` 이다. + +## 기본 엔드포인트 이름 + +- `books.default_list` +- `books.progress_filter` +- `books.pagination` + +새 엔드포인트를 추가할 때는 `k6/scripts/common/endpoints.js` 에 등록한다. + +## 실행 예시 + +### Baseline +낮은 부하로 기준 응답시간과 기본 안정성을 확인한다. -### 기본 연결 테스트 (Smoke Test) ```bash -docker-compose run --rm k6 run /scripts/smoke-test.js +docker compose -f k6/docker-compose.yml run --rm \ + -e ENDPOINT_NAME=books.default_list \ + -e TEST_USERNAME=k6seed-user-02 \ + k6 run /scripts/baseline.js ``` -### 부하 테스트 (Load Test) +### Sustained Load +일반적인 목표 부하를 일정 시간 유지하면서 지속 성능을 본다. + ```bash -docker-compose run --rm k6 run /scripts/load-test.js +docker compose -f k6/docker-compose.yml run --rm \ + -e ENDPOINT_NAME=books.progress_filter \ + -e TEST_USERNAME=k6seed-user-02 \ + -e TARGET_VUS=20 \ + k6 run /scripts/load.js ``` -### 커스텀 설정으로 실행 +### Stress +일반 부하보다 더 높은 요청량으로 밀어 한계 구간과 급격한 성능 저하 지점을 찾는다. + ```bash -docker-compose run --rm k6 run /scripts/smoke-test.js --vus 10 --duration 1m +docker compose -f k6/docker-compose.yml run --rm \ + -e ENDPOINT_NAME=books.pagination \ + -e TEST_USERNAME=k6seed-user-02 \ + -e TARGET_VUS=20 \ + -e STRESS_TARGET_VUS=80 \ + k6 run /scripts/stress.js ``` +### Mixed Load +여러 요청을 비율대로 섞어서 실제 사용 패턴에 가까운 혼합 부하를 본다. + +```bash +docker compose -f k6/docker-compose.yml run --rm \ + -e ENDPOINT_NAMES=books.default_list,books.progress_filter,books.pagination \ + -e ENDPOINT_WEIGHTS=7,2,1 \ + -e TEST_USERNAME=k6seed-user-02 \ + k6 run /scripts/mixed-load.js +``` + +### Custom Endpoint +등록되지 않은 엔드포인트를 직접 지정해서 같은 부하 프로필로 측정한다. + +```bash +docker compose -f k6/docker-compose.yml run --rm \ + -e ENDPOINT_PATH=/api/v1/books \ + -e ENDPOINT_TAG=books \ + -e ENDPOINT_EXPECTS_ARRAY_AT=content \ + -e TEST_USERNAME=k6seed-user-02 \ + k6 run /scripts/load.js +``` + +## 자주 바꾸는 환경변수 + +- `BASE_URL` +- `TEST_USERNAME` +- `ENDPOINT_NAME` +- `ENDPOINT_NAMES` +- `ENDPOINT_WEIGHTS` +- `TARGET_VUS` +- `STRESS_TARGET_VUS` +- `THINK_TIME` + +부하 프로필별 세부 duration 은 `baseline.js`, `load.js`, `stress.js` 가 참조하는 `k6/scripts/common/profiles.js` 환경변수로 조정할 수 있다. + +## 관리 원칙 + +- 테스트 파일은 요청 중심이 아니라 부하 프로필 중심으로 둔다. +- 엔드포인트는 `ENDPOINT_NAME` 또는 `ENDPOINT_PATH` 로 주입한다. +- 여러 요청을 묶고 싶을 때는 `mixed-load.js` 에서 엔드포인트 모듈을 조합한다. +- 대조군 비교 시에는 같은 seed prefix, 같은 사용자, 같은 정렬 기준을 고정한다. + ## 결과 확인 -- 콘솔에서 실시간 확인 -- `/reports` 폴더에 JSON 결과 저장 -- Grafana 대시보드 연동 가능 -## 네트워크 설정 -- `host.docker.internal:8080`로 로컬 API 접근 -- 운영 환경 테스트 시 URL 변경 필요 \ No newline at end of file +- 콘솔 실시간 출력 +- `/reports` 아래 JSON 결과 +- 필요하면 Grafana / InfluxDB 연동 diff --git a/k6/scripts/baseline.js b/k6/scripts/baseline.js new file mode 100644 index 0000000..6f6a9ba --- /dev/null +++ b/k6/scripts/baseline.js @@ -0,0 +1,19 @@ +import { requestEndpoint, getSharedTestInfo } from './common/http.js'; +import { resolveSingleEndpoint } from './common/endpoints.js'; +import { createProfileOptions } from './common/profiles.js'; +import { createMetrics, createSummaryHandler } from './common/summary.js'; + +const endpoint = resolveSingleEndpoint(); +const metrics = createMetrics('baseline'); + +export const options = createProfileOptions('baseline', metrics.prefix); + +export default function () { + requestEndpoint(endpoint, metrics, __ITER); +} + +export const handleSummary = createSummaryHandler(metrics, { + profileName: 'baseline', + endpointName: endpoint.name, + ...getSharedTestInfo(), +}); diff --git a/k6/scripts/book-chapters-test.js b/k6/scripts/book-chapters-test.js deleted file mode 100644 index be7e7d9..0000000 --- a/k6/scripts/book-chapters-test.js +++ /dev/null @@ -1,28 +0,0 @@ -import http from 'k6/http'; -import { check, sleep } from 'k6'; - -export const options = { - stages: [ - { duration: '30s', target: 20 }, - { duration: '1m', target: 20 }, - { duration: '10s', target: 0 }, - ], - thresholds: { - 'http_req_duration': ['p(95)<500'], // 95% of requests must complete below 500ms - }, -}; - -export default function () { - const url = 'http://host.docker.internal:8080/api/v1/books/68ee1d08d8f6b741f8b90c08/chapters?page=1&limit=200'; - const params = { - headers: { - 'accept': '*/*', - 'X-Test-Username': '2', - }, - }; - const res = http.get(url, params); - check(res, { - 'is status 200': (r) => r.status === 200, - }); - sleep(1); -} \ No newline at end of file diff --git a/k6/scripts/books-performance-test.js b/k6/scripts/books-performance-test.js deleted file mode 100644 index c238b15..0000000 --- a/k6/scripts/books-performance-test.js +++ /dev/null @@ -1,213 +0,0 @@ -import http from 'k6/http'; -import { check, sleep } from 'k6'; -import { Rate, Trend } from 'k6/metrics'; - -const BASE_URL = (__ENV.BASE_URL || 'http://host.docker.internal:8080').replace(/\/$/, ''); -const TEST_USERNAME = __ENV.TEST_USERNAME || ''; -const AUTH_TOKEN = __ENV.AUTH_TOKEN || ''; -const LANGUAGE_CODE = __ENV.LANGUAGE_CODE || 'EN'; -const SORT_BY = __ENV.SORT_BY || 'created_at'; -const PAGE = Number(__ENV.PAGE || 1); -const DEFAULT_LIMIT = Number(__ENV.DEFAULT_LIMIT || 20); -const PAGINATION_LIMITS = (__ENV.PAGINATION_LIMITS || '10,20,50') - .split(',') - .map((value) => Number(value.trim())) - .filter((value) => Number.isFinite(value) && value > 0); -const PROGRESS_FILTERS = (__ENV.PROGRESS_FILTERS || 'NOT_STARTED,IN_PROGRESS') - .split(',') - .map((value) => value.trim()) - .filter(Boolean); -const RUN_MODE = __ENV.RUN_MODE || 'all'; -const TARGET_VUS = Number(__ENV.TARGET_VUS || 20); -const RAMP_UP_DURATION = __ENV.RAMP_UP_DURATION || '30s'; -const STEADY_DURATION = __ENV.STEADY_DURATION || '1m'; -const RAMP_DOWN_DURATION = __ENV.RAMP_DOWN_DURATION || '10s'; -const THINK_TIME = Number(__ENV.THINK_TIME || 1); - -const defaultListSuccess = new Rate('books_default_list_success'); -const defaultListDuration = new Trend('books_default_list_duration', true); -const progressFilterSuccess = new Rate('books_progress_filter_success'); -const progressFilterDuration = new Trend('books_progress_filter_duration', true); -const paginationSuccess = new Rate('books_pagination_success'); -const paginationDuration = new Trend('books_pagination_duration', true); - -function buildScenario(exec, startTime = '0s') { - return { - executor: 'ramping-vus', - exec, - startTime, - stages: [ - { duration: RAMP_UP_DURATION, target: TARGET_VUS }, - { duration: STEADY_DURATION, target: TARGET_VUS }, - { duration: RAMP_DOWN_DURATION, target: 0 }, - ], - gracefulRampDown: '5s', - }; -} - -function buildScenarios() { - switch (RUN_MODE) { - case 'default_list': - return { - default_list: buildScenario('defaultListScenario'), - }; - case 'progress_filter': - return { - progress_filter: buildScenario('progressFilterScenario'), - }; - case 'pagination': - return { - pagination: buildScenario('paginationScenario'), - }; - default: - return { - default_list: buildScenario('defaultListScenario', '0s'), - progress_filter: buildScenario('progressFilterScenario', '2m'), - pagination: buildScenario('paginationScenario', '4m'), - }; - } -} - -export const options = { - scenarios: buildScenarios(), - thresholds: { - http_req_failed: ['rate<0.05'], - http_req_duration: ['p(95)<1000'], - books_default_list_success: ['rate>0.95'], - books_progress_filter_success: ['rate>0.95'], - books_pagination_success: ['rate>0.95'], - books_default_list_duration: ['p(95)<1000'], - books_progress_filter_duration: ['p(95)<1000'], - books_pagination_duration: ['p(95)<1000'], - }, -}; - -function buildHeaders() { - const headers = { - Accept: 'application/json', - }; - - if (TEST_USERNAME) { - headers['X-Test-Username'] = TEST_USERNAME; - } - - if (AUTH_TOKEN) { - headers.Authorization = `Bearer ${AUTH_TOKEN}`; - } - - return headers; -} - -function toQueryString(params) { - return Object.entries(params) - .filter(([, value]) => value !== undefined && value !== null && value !== '') - .map(([key, value]) => `${encodeURIComponent(key)}=${encodeURIComponent(value)}`) - .join('&'); -} - -function buildBooksUrl(overrides = {}) { - const query = { - languageCode: LANGUAGE_CODE, - sortBy: SORT_BY, - page: PAGE, - limit: DEFAULT_LIMIT, - ...overrides, - }; - - return `${BASE_URL}/api/v1/books?${toQueryString(query)}`; -} - -function requestBooks(url, variant, successMetric, durationMetric) { - const response = http.get(url, { - headers: buildHeaders(), - tags: { - endpoint: 'books', - variant, - }, - }); - - const success = check(response, { - 'books status is 200': (res) => res.status === 200, - 'books response has items': (res) => { - const body = res.json(); - return Array.isArray(body?.content); - }, - }); - - successMetric.add(success); - durationMetric.add(response.timings.duration); - - sleep(THINK_TIME); -} - -export function defaultListScenario() { - const url = buildBooksUrl(); - requestBooks(url, 'default_list', defaultListSuccess, defaultListDuration); -} - -export function progressFilterScenario() { - const progress = PROGRESS_FILTERS[__ITER % PROGRESS_FILTERS.length]; - const url = buildBooksUrl({ progress }); - requestBooks(url, `progress_${progress.toLowerCase()}`, progressFilterSuccess, progressFilterDuration); -} - -export function paginationScenario() { - const limit = PAGINATION_LIMITS[__ITER % PAGINATION_LIMITS.length]; - const url = buildBooksUrl({ limit }); - requestBooks(url, `pagination_limit_${limit}`, paginationSuccess, paginationDuration); -} - -function metricSnapshot(metric) { - const values = metric?.values || {}; - return { - avg: Math.round(values.avg || 0), - min: Math.round(values.min || 0), - max: Math.round(values.max || 0), - p95: Math.round(values['p(95)'] || 0), - p99: Math.round(values['p(99)'] || 0), - rate: Math.round((values.rate || 0) * 10000) / 100, - count: values.count || 0, - }; -} - -export function handleSummary(data) { - const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); - - const analysis = { - test_info: { - runMode: RUN_MODE, - baseUrl: BASE_URL, - testUsername: TEST_USERNAME || null, - page: PAGE, - defaultLimit: DEFAULT_LIMIT, - paginationLimits: PAGINATION_LIMITS, - progressFilters: PROGRESS_FILTERS, - targetVus: TARGET_VUS, - }, - scenario_metrics: { - default_list: { - success: metricSnapshot(data.metrics.books_default_list_success), - duration: metricSnapshot(data.metrics.books_default_list_duration), - }, - progress_filter: { - success: metricSnapshot(data.metrics.books_progress_filter_success), - duration: metricSnapshot(data.metrics.books_progress_filter_duration), - }, - pagination: { - success: metricSnapshot(data.metrics.books_pagination_success), - duration: metricSnapshot(data.metrics.books_pagination_duration), - }, - }, - transport_metrics: { - http_req_duration: metricSnapshot(data.metrics.http_req_duration), - http_req_failed: metricSnapshot(data.metrics.http_req_failed), - http_reqs: metricSnapshot(data.metrics.http_reqs), - data_received: metricSnapshot(data.metrics.data_received), - }, - }; - - return { - [`/reports/books-test-${timestamp}.json`]: JSON.stringify(data, null, 2), - [`/reports/books-analysis-${timestamp}.json`]: JSON.stringify(analysis, null, 2), - }; -} diff --git a/k6/scripts/common/endpoints.js b/k6/scripts/common/endpoints.js new file mode 100644 index 0000000..f6d9fc9 --- /dev/null +++ b/k6/scripts/common/endpoints.js @@ -0,0 +1,159 @@ +function splitCsv(value, fallback) { + return (value || fallback) + .split(',') + .map((item) => item.trim()) + .filter(Boolean); +} + +function splitNumberCsv(value, fallback) { + return splitCsv(value, fallback) + .map((item) => Number(item)) + .filter((item) => Number.isFinite(item) && item > 0); +} + +function getByPath(target, path) { + if (!path) { + return target; + } + + return path.split('.').reduce((current, key) => current?.[key], target); +} + +function validateArrayResponse(response, path = 'content') { + try { + const body = response.json(); + return Array.isArray(getByPath(body, path)); + } catch (error) { + return false; + } +} + +const DEFAULT_LIMIT = Number(__ENV.DEFAULT_LIMIT || 20); +const DEFAULT_LANGUAGE_CODE = __ENV.LANGUAGE_CODE || 'EN'; +const DEFAULT_SORT_BY = __ENV.SORT_BY || 'created_at'; +const DEFAULT_PAGE = Number(__ENV.PAGE || 1); +const PROGRESS_FILTERS = splitCsv(__ENV.PROGRESS_FILTERS, 'NOT_STARTED,IN_PROGRESS,COMPLETED'); +const PAGINATION_LIMITS = splitNumberCsv(__ENV.PAGINATION_LIMITS, '10,20,50,100'); + +function buildBooksBaseQuery() { + return { + languageCode: DEFAULT_LANGUAGE_CODE, + sortBy: DEFAULT_SORT_BY, + page: DEFAULT_PAGE, + limit: DEFAULT_LIMIT, + }; +} + +const endpointCatalog = { + 'books.default_list': { + name: 'books.default_list', + tag: 'books', + buildRequest: () => ({ + path: '/api/v1/books', + query: buildBooksBaseQuery(), + variant: 'default_list', + }), + validate: (response) => validateArrayResponse(response, 'content'), + }, + 'books.progress_filter': { + name: 'books.progress_filter', + tag: 'books', + buildRequest: ({ iteration }) => { + const progress = PROGRESS_FILTERS[iteration % PROGRESS_FILTERS.length]; + return { + path: '/api/v1/books', + query: { + ...buildBooksBaseQuery(), + progress, + }, + variant: `progress_${progress.toLowerCase()}`, + }; + }, + validate: (response) => validateArrayResponse(response, 'content'), + }, + 'books.pagination': { + name: 'books.pagination', + tag: 'books', + buildRequest: ({ iteration }) => { + const limit = PAGINATION_LIMITS[iteration % PAGINATION_LIMITS.length]; + return { + path: '/api/v1/books', + query: { + ...buildBooksBaseQuery(), + limit, + }, + variant: `pagination_limit_${limit}`, + }; + }, + validate: (response) => validateArrayResponse(response, 'content'), + }, +}; + +function createCustomEndpoint() { + const path = __ENV.ENDPOINT_PATH; + const tag = __ENV.ENDPOINT_TAG || 'custom'; + const variant = __ENV.ENDPOINT_VARIANT || tag; + const arrayPath = __ENV.ENDPOINT_EXPECTS_ARRAY_AT || ''; + + return { + name: 'custom.endpoint', + tag, + buildRequest: () => ({ + path, + variant, + query: {}, + }), + validate: (response) => (arrayPath ? validateArrayResponse(response, arrayPath) : true), + }; +} + +export function resolveEndpointByName(name) { + const endpoint = endpointCatalog[name]; + + if (!endpoint) { + throw new Error(`Unknown endpoint: ${name}`); + } + + return endpoint; +} + +export function resolveSingleEndpoint() { + if (__ENV.ENDPOINT_PATH) { + return createCustomEndpoint(); + } + + return resolveEndpointByName(__ENV.ENDPOINT_NAME || 'books.default_list'); +} + +export function resolveEndpointSet() { + if (__ENV.ENDPOINT_PATH) { + return [createCustomEndpoint()]; + } + + return splitCsv(__ENV.ENDPOINT_NAMES, 'books.default_list,books.progress_filter,books.pagination') + .map(resolveEndpointByName); +} + +export function resolveWeights(count) { + const weights = splitNumberCsv(__ENV.ENDPOINT_WEIGHTS, ''); + + if (weights.length === count) { + return weights; + } + + return Array.from({ length: count }, () => 1); +} + +export function selectWeightedEndpoint(endpoints, weights, iteration) { + const totalWeight = weights.reduce((sum, value) => sum + value, 0); + let cursor = iteration % totalWeight; + + for (let index = 0; index < endpoints.length; index += 1) { + cursor -= weights[index]; + if (cursor < 0) { + return endpoints[index]; + } + } + + return endpoints[endpoints.length - 1]; +} diff --git a/k6/scripts/common/http.js b/k6/scripts/common/http.js new file mode 100644 index 0000000..966a2d9 --- /dev/null +++ b/k6/scripts/common/http.js @@ -0,0 +1,71 @@ +import http from 'k6/http'; +import { check, sleep } from 'k6'; + +const BASE_URL = (__ENV.BASE_URL || 'http://host.docker.internal:8080').replace(/\/$/, ''); +const TEST_USERNAME = __ENV.TEST_USERNAME || ''; +const AUTH_TOKEN = __ENV.AUTH_TOKEN || ''; +const THINK_TIME = Number(__ENV.THINK_TIME || 1); + +export function buildHeaders() { + const headers = { + Accept: 'application/json', + }; + + if (TEST_USERNAME) { + headers['X-Test-Username'] = TEST_USERNAME; + } + + if (AUTH_TOKEN) { + headers.Authorization = `Bearer ${AUTH_TOKEN}`; + } + + return headers; +} + +export function getSharedTestInfo() { + return { + baseUrl: BASE_URL, + testUsername: TEST_USERNAME || null, + thinkTime: THINK_TIME, + }; +} + +export function buildUrl(path, query = {}) { + const normalizedPath = path.startsWith('http') + ? path + : `${BASE_URL}${path.startsWith('/') ? path : `/${path}`}`; + const queryString = Object.entries(query) + .filter(([, value]) => value !== undefined && value !== null && value !== '') + .map(([key, value]) => `${encodeURIComponent(key)}=${encodeURIComponent(value)}`) + .join('&'); + + if (!queryString) { + return normalizedPath; + } + + return `${normalizedPath}${normalizedPath.includes('?') ? '&' : '?'}${queryString}`; +} + +export function requestEndpoint(endpoint, metrics, iteration) { + const request = endpoint.buildRequest({ iteration }); + const url = buildUrl(request.path, request.query); + const response = http.get(url, { + headers: buildHeaders(), + tags: { + endpoint: endpoint.tag, + variant: request.variant, + }, + }); + + const success = check(response, { + [`${endpoint.tag} status is 200`]: (res) => res.status === 200, + [`${endpoint.tag} response shape is valid`]: (res) => endpoint.validate(res), + }); + + metrics.success.add(success); + metrics.duration.add(response.timings.duration); + + sleep(THINK_TIME); + + return response; +} diff --git a/k6/scripts/common/profiles.js b/k6/scripts/common/profiles.js new file mode 100644 index 0000000..96d30d3 --- /dev/null +++ b/k6/scripts/common/profiles.js @@ -0,0 +1,56 @@ +function positiveNumber(value, fallback) { + const parsed = Number(value); + return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback; +} + +function buildRampingScenario(stages, exec = 'default') { + return { + executor: 'ramping-vus', + exec, + stages, + gracefulRampDown: '5s', + }; +} + +const BASELINE_VUS = positiveNumber(__ENV.BASELINE_VUS, 5); +const TARGET_VUS = positiveNumber(__ENV.TARGET_VUS, 20); +const STRESS_TARGET_VUS = positiveNumber(__ENV.STRESS_TARGET_VUS, 60); + +export function createProfileScenario(profileName, exec = 'default') { + switch (profileName) { + case 'baseline': + return buildRampingScenario([ + { duration: __ENV.BASELINE_RAMP_UP_DURATION || '15s', target: BASELINE_VUS }, + { duration: __ENV.BASELINE_STEADY_DURATION || '45s', target: BASELINE_VUS }, + { duration: __ENV.BASELINE_RAMP_DOWN_DURATION || '10s', target: 0 }, + ], exec); + case 'stress': + return buildRampingScenario([ + { duration: __ENV.STRESS_RAMP_UP_DURATION || '20s', target: TARGET_VUS }, + { duration: __ENV.STRESS_STEP_DURATION || '30s', target: STRESS_TARGET_VUS }, + { duration: __ENV.STRESS_STEADY_DURATION || '30s', target: STRESS_TARGET_VUS }, + { duration: __ENV.STRESS_RAMP_DOWN_DURATION || '15s', target: 0 }, + ], exec); + case 'load': + default: + return buildRampingScenario([ + { duration: __ENV.RAMP_UP_DURATION || '30s', target: TARGET_VUS }, + { duration: __ENV.STEADY_DURATION || '1m', target: TARGET_VUS }, + { duration: __ENV.RAMP_DOWN_DURATION || '10s', target: 0 }, + ], exec); + } +} + +export function createProfileOptions(profileName, metricPrefix, exec = 'default') { + return { + scenarios: { + [profileName]: createProfileScenario(profileName, exec), + }, + thresholds: { + http_req_failed: ['rate<0.05'], + http_req_duration: ['p(95)<1000'], + [`${metricPrefix}_success`]: ['rate>0.95'], + [`${metricPrefix}_duration`]: ['p(95)<1000'], + }, + }; +} diff --git a/k6/scripts/common/summary.js b/k6/scripts/common/summary.js new file mode 100644 index 0000000..9408747 --- /dev/null +++ b/k6/scripts/common/summary.js @@ -0,0 +1,47 @@ +import { Rate, Trend } from 'k6/metrics'; + +export function createMetrics(metricPrefix) { + return { + prefix: metricPrefix, + success: new Rate(`${metricPrefix}_success`), + duration: new Trend(`${metricPrefix}_duration`, true), + }; +} + +function metricSnapshot(metric) { + const values = metric?.values || {}; + return { + avg: Math.round(values.avg || 0), + min: Math.round(values.min || 0), + max: Math.round(values.max || 0), + p95: Math.round(values['p(95)'] || 0), + p99: Math.round(values['p(99)'] || 0), + rate: Math.round((values.rate || 0) * 10000) / 100, + count: values.count || 0, + }; +} + +export function createSummaryHandler(metrics, metadata) { + return function handleSummary(data) { + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + const analysis = { + test_info: metadata, + metric_name: metrics.prefix, + scenario_metrics: { + success: metricSnapshot(data.metrics[`${metrics.prefix}_success`]), + duration: metricSnapshot(data.metrics[`${metrics.prefix}_duration`]), + }, + transport_metrics: { + http_req_duration: metricSnapshot(data.metrics.http_req_duration), + http_req_failed: metricSnapshot(data.metrics.http_req_failed), + http_reqs: metricSnapshot(data.metrics.http_reqs), + data_received: metricSnapshot(data.metrics.data_received), + }, + }; + + return { + [`/reports/${metrics.prefix}-test-${timestamp}.json`]: JSON.stringify(data, null, 2), + [`/reports/${metrics.prefix}-analysis-${timestamp}.json`]: JSON.stringify(analysis, null, 2), + }; + }; +} diff --git a/k6/scripts/image-performance-test.js b/k6/scripts/image-performance-test.js deleted file mode 100644 index 51ba3f6..0000000 --- a/k6/scripts/image-performance-test.js +++ /dev/null @@ -1,150 +0,0 @@ -import http from 'k6/http'; -import { check } from 'k6'; -import { Rate, Trend, Counter } from 'k6/metrics'; - -const imageLoadRate = new Rate('image_load_success'); -const imageLoadTime = new Trend('image_load_duration', true); -const imageSizeMetric = new Trend('image_size_bytes', true); -const cacheHitRate = new Rate('cache_hit_rate'); -const totalRequests = new Counter('total_requests'); - -export let options = { - stages: [ - { duration: '30s', target: 5 }, // 워밍업 - { duration: '30s', target: 20 }, // 부하 증가 - { duration: '1m', target: 50 }, // 최대 부하 유지 - { duration: '30s', target: 0 }, // 종료 - ], - thresholds: { - 'image_load_success': ['rate > 0.95'], - 'image_load_duration': ['p(95) < 5000'], - 'http_req_duration': ['p(95) < 3000'], - 'http_req_failed': ['rate < 0.05'], - }, -}; - -// 테스트할 URL (직접 수정해서 사용) -const IMAGE_URL = 'https://static.linglevel.com/cozy_sofa.jpg.webp'; - -export default function () { - const startTime = new Date(); - - const response = http.get(IMAGE_URL, { - headers: { - 'User-Agent': 'k6-image-performance-test', - 'Accept': 'image/webp,image/jpeg,image/png,image/*,*/*;q=0.8', - 'Accept-Encoding': 'gzip, deflate, br', - }, - timeout: '30s', - }); - - const endTime = new Date(); - const loadTime = endTime - startTime; - const imageSize = response.body ? response.body.length : 0; - - // 캐시 상태 확인 (여러 CDN 헤더 지원) - const cacheHeaders = [ - response.headers['X-Cache'], - response.headers['CF-Cache-Status'], - response.headers['X-Amz-Cf-Id'], - response.headers['X-Cache-Status'], - response.headers['Cache-Control'] - ].filter(Boolean); - - const cacheStatus = cacheHeaders.join(', ') || 'no-cache-info'; - const isCacheHit = cacheStatus.toLowerCase().includes('hit') || - cacheStatus.toLowerCase().includes('edge') || - cacheStatus.toLowerCase().includes('cloudfront'); - - const success = check(response, { - 'status is 200': (r) => r.status === 200, - 'content-type is image': (r) => { - const contentType = r.headers['Content-Type'] || ''; - return contentType.includes('image'); - }, - 'response body size > 0': (r) => r.body && r.body.length > 0, - 'load time < 10s': () => loadTime < 10000, - 'image size reasonable': () => imageSize > 1000 && imageSize < 10000000, // 1KB ~ 10MB - }); - - // 메트릭 기록 - imageLoadRate.add(success); - imageLoadTime.add(loadTime); - imageSizeMetric.add(imageSize); - cacheHitRate.add(isCacheHit); - totalRequests.add(1); - - // 상세 로그 - if (response.status !== 200) { - console.error(`❌ Failed: Status ${response.status}, URL: ${IMAGE_URL}`); - } else { - const sizeKB = Math.round(imageSize / 1024); - console.log(`✅ Success: ${loadTime}ms, ${sizeKB}KB, Cache: ${cacheStatus}`); - } - - // 응답 헤더 정보 (첫 번째 요청에서만 출력) - if (__ITER === 0) { - console.log('\n📊 Response Headers Analysis:'); - console.log(`Content-Type: ${response.headers['Content-Type'] || 'N/A'}`); - console.log(`Content-Length: ${response.headers['Content-Length'] || 'N/A'}`); - console.log(`Cache-Control: ${response.headers['Cache-Control'] || 'N/A'}`); - console.log(`Server: ${response.headers['Server'] || 'N/A'}`); - console.log(`X-Cache: ${response.headers['X-Cache'] || 'N/A'}`); - console.log(`CF-Cache-Status: ${response.headers['CF-Cache-Status'] || 'N/A'}`); - console.log(`X-Amz-Cf-Id: ${response.headers['X-Amz-Cf-Id'] || 'N/A'}`); - console.log(''); - } -} - -export function handleSummary(data) { - const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); - - // 상세 성능 분석 - const analysis = { - test_info: { - url: IMAGE_URL, - timestamp: timestamp, - total_duration_seconds: Math.round(data.state.testRunDurationMs / 1000), - total_requests: data.metrics.total_requests?.values.count || 0, - }, - performance_metrics: { - success_rate: Math.round((data.metrics.image_load_success?.values.rate || 0) * 100 * 100) / 100, - cache_hit_rate: Math.round((data.metrics.cache_hit_rate?.values.rate || 0) * 100 * 100) / 100, - load_time_ms: { - avg: Math.round(data.metrics.image_load_duration?.values.avg || 0), - min: Math.round(data.metrics.image_load_duration?.values.min || 0), - max: Math.round(data.metrics.image_load_duration?.values.max || 0), - p50: Math.round(data.metrics.image_load_duration?.values.med || 0), - p95: Math.round(data.metrics.image_load_duration?.values['p(95)'] || 0), - p99: Math.round(data.metrics.image_load_duration?.values['p(99)'] || 0), - }, - image_size_kb: { - avg: Math.round((data.metrics.image_size_bytes?.values.avg || 0) / 1024), - min: Math.round((data.metrics.image_size_bytes?.values.min || 0) / 1024), - max: Math.round((data.metrics.image_size_bytes?.values.max || 0) / 1024), - }, - throughput: { - requests_per_second: Math.round(data.metrics.http_reqs?.values.rate || 0), - data_received_mb_per_second: Math.round((data.metrics.data_received?.values.rate || 0) / 1024 / 1024 * 100) / 100, - } - } - }; - - console.log('\n🎯 === PERFORMANCE TEST SUMMARY ==='); - console.log(`URL: ${IMAGE_URL}`); - console.log(`Total Requests: ${analysis.test_info.total_requests}`); - console.log(`Success Rate: ${analysis.performance_metrics.success_rate}%`); - console.log(`Cache Hit Rate: ${analysis.performance_metrics.cache_hit_rate}%`); - console.log(`\n⏱️ Load Time:`); - console.log(` Average: ${analysis.performance_metrics.load_time_ms.avg}ms`); - console.log(` P95: ${analysis.performance_metrics.load_time_ms.p95}ms`); - console.log(` P99: ${analysis.performance_metrics.load_time_ms.p99}ms`); - console.log(`\n📦 Image Size: ${analysis.performance_metrics.image_size_kb.avg}KB (avg)`); - console.log(`\n🚀 Throughput: ${analysis.performance_metrics.throughput.requests_per_second} RPS`); - console.log(`📥 Data Rate: ${analysis.performance_metrics.throughput.data_received_mb_per_second} MB/s`); - - return { - [`/reports/image-test-${timestamp}.json`]: JSON.stringify(data, null, 2), - [`/reports/analysis-${timestamp}.json`]: JSON.stringify(analysis, null, 2), - }; -} \ No newline at end of file diff --git a/k6/scripts/load.js b/k6/scripts/load.js new file mode 100644 index 0000000..7ca42dd --- /dev/null +++ b/k6/scripts/load.js @@ -0,0 +1,19 @@ +import { requestEndpoint, getSharedTestInfo } from './common/http.js'; +import { resolveSingleEndpoint } from './common/endpoints.js'; +import { createProfileOptions } from './common/profiles.js'; +import { createMetrics, createSummaryHandler } from './common/summary.js'; + +const endpoint = resolveSingleEndpoint(); +const metrics = createMetrics('load'); + +export const options = createProfileOptions('load', metrics.prefix); + +export default function () { + requestEndpoint(endpoint, metrics, __ITER); +} + +export const handleSummary = createSummaryHandler(metrics, { + profileName: 'load', + endpointName: endpoint.name, + ...getSharedTestInfo(), +}); diff --git a/k6/scripts/mixed-load.js b/k6/scripts/mixed-load.js new file mode 100644 index 0000000..6241aa9 --- /dev/null +++ b/k6/scripts/mixed-load.js @@ -0,0 +1,22 @@ +import { requestEndpoint, getSharedTestInfo } from './common/http.js'; +import { resolveEndpointSet, resolveWeights, selectWeightedEndpoint } from './common/endpoints.js'; +import { createProfileOptions } from './common/profiles.js'; +import { createMetrics, createSummaryHandler } from './common/summary.js'; + +const endpoints = resolveEndpointSet(); +const weights = resolveWeights(endpoints.length); +const metrics = createMetrics('mixed_load'); + +export const options = createProfileOptions('load', metrics.prefix); + +export default function () { + const endpoint = selectWeightedEndpoint(endpoints, weights, __ITER); + requestEndpoint(endpoint, metrics, __ITER); +} + +export const handleSummary = createSummaryHandler(metrics, { + profileName: 'load', + endpointNames: endpoints.map((endpoint) => endpoint.name), + weights, + ...getSharedTestInfo(), +}); diff --git a/k6/scripts/smoke-test.js b/k6/scripts/smoke-test.js deleted file mode 100644 index 4ca91da..0000000 --- a/k6/scripts/smoke-test.js +++ /dev/null @@ -1,38 +0,0 @@ -import http from 'k6/http'; -import { check } from 'k6'; - -export const options = { - vus: 1, - stages: [ - { duration: "1m", target: 10 }, - { duration: "3m", target: 10 }, - { duration: "1m", target: 50 }, - { duration: "3m", target: 50 }, - { duration: "1m", target: 100 }, - { duration: "3m", target: 100 }, - { duration: "1m", target: 200 }, - { duration: "3m", target: 200 }, - { duration: "1m", target: 300 }, - { duration: "3m", target: 300 }, - { duration: "1m", target: 0 }, - ], - thresholds: { - http_req_duration: ['p(95)<500'], // 95%가 500ms 이하 - http_req_failed: ['rate<0.1'], // 에러율 10% 이하 - }, -}; - -export default function () { - // Health check - const healthRes = http.get('http://host.docker.internal:8080/actuator/health'); - check(healthRes, { - 'health check status is 200': (r) => r.status === 200, - }); - - // API 테스트 예시 - // const apiRes = http.get('http://host.docker.internal:8080/api/some-endpoint'); - // check(apiRes, { - // 'api status is 200': (r) => r.status === 200, - // 'response time < 200ms': (r) => r.timings.duration < 200, - // }); -} \ No newline at end of file diff --git a/k6/scripts/stress.js b/k6/scripts/stress.js new file mode 100644 index 0000000..02667a1 --- /dev/null +++ b/k6/scripts/stress.js @@ -0,0 +1,19 @@ +import { requestEndpoint, getSharedTestInfo } from './common/http.js'; +import { resolveSingleEndpoint } from './common/endpoints.js'; +import { createProfileOptions } from './common/profiles.js'; +import { createMetrics, createSummaryHandler } from './common/summary.js'; + +const endpoint = resolveSingleEndpoint(); +const metrics = createMetrics('stress'); + +export const options = createProfileOptions('stress', metrics.prefix); + +export default function () { + requestEndpoint(endpoint, metrics, __ITER); +} + +export const handleSummary = createSummaryHandler(metrics, { + profileName: 'stress', + endpointName: endpoint.name, + ...getSharedTestInfo(), +}); From 7fa2e2ab0d672b9865d2c7d3adcea10c251d544e Mon Sep 17 00:00:00 2001 From: solfe Date: Wed, 1 Apr 2026 22:02:34 +0900 Subject: [PATCH 06/28] Simplify architecture mini-map docs --- docs/architecture/README.md | 28 +++---- docs/architecture/content-book.md | 100 +++++++++++------------ docs/architecture/overview.md | 100 +++++------------------ docs/architecture/streak.md | 85 ++++++-------------- docs/architecture/word.md | 74 +++++------------ docs/templates/architecture-template.md | 102 ++++++++++++------------ 6 files changed, 173 insertions(+), 316 deletions(-) diff --git a/docs/architecture/README.md b/docs/architecture/README.md index 8d3bace..f25965c 100644 --- a/docs/architecture/README.md +++ b/docs/architecture/README.md @@ -1,21 +1,13 @@ # Architecture Documents -이 디렉터리는 프로젝트의 구조와 핵심 흐름을 정리하는 문서 모음이다. +이 디렉터리는 리팩터링이나 성능 개선 전에 빠르게 구조를 파악하기 위한 미니맵 문서 모음이다. -## 포함 대상 +## 문서 원칙 -- 시스템 개요 -- 도메인 관계 -- 대표 요청 흐름 -- 상태 전이 -- 외부 시스템 연결 구조 - -## 작성 기준 - -- 클래스 전체 나열보다 도메인과 흐름을 우선 정리한다. -- Mermaid 다이어그램은 핵심 구조와 흐름만 표현한다. -- 문서는 실제 리팩터링과 성능 개선 판단에 도움이 되는 수준으로 유지한다. -- 구현 상세보다 “어디가 핵심이고 어디가 위험한지”가 먼저 보이게 적는다. +- 목적은 현재 구조를 빠르게 이해하는 것이다. +- 문서는 책임, 외부 의존성, 핵심 기능 흐름만 남긴다. +- 핵심 기능은 중요도와 이해 난이도를 기준으로 2~3개만 고른다. +- 도메인 내부 구현 상세나 세부 설계 판단은 `MISSIONS.md` 또는 `docs/decisions`에서 다룬다. ## 템플릿 @@ -23,8 +15,8 @@ ## 현재 문서 -- [프로젝트 전체 구조 개요](overview.md) -- [Streak 도메인 구조](streak.md) -- [Word 도메인 구조](word.md) -- [Book 도메인 구조](content-book.md) +- [프로젝트 전체 컨텍스트](overview.md) +- [Streak 도메인 미니맵](streak.md) +- [Word 도메인 미니맵](word.md) +- [Book 도메인 미니맵](content-book.md) - [MongoDB 논리 ERD (dbdiagram.io용 DBML)](mongodb-logical-erd.dbml) diff --git a/docs/architecture/content-book.md b/docs/architecture/content-book.md index 8df73e8..dd094ec 100644 --- a/docs/architecture/content-book.md +++ b/docs/architecture/content-book.md @@ -1,33 +1,17 @@ -# Book 도메인 구조 +# Book 도메인 미니맵 -## 목적 +## 현재 시스템의 책임 -이 문서는 책 콘텐츠 도메인이 조회, 진행도, import, 이미지 처리를 어떻게 함께 다루는지 설명한다. +- 책은 여러 개의 챕터를 가지고 챕터는 여러개의 청크를 가진다. +- 사용자 책 읽기 진행도를 계산하고 저장한다. +- 관리자에 의해서 책이 추가로 등록될 수 있다. +- 청크는 이미지와 글자와 같은 타입을 가질 수 있다. -## 범위 +## 외부 시스템 의존성 -- `BookService` -- `ChapterService` -- `ProgressService` -- 책 import 및 이미지 처리 - -## 핵심 구성 요소 - -- `BooksController`, `BooksProgressController` -- `BookService` -- `ChapterService` -- `ProgressService` -- `BookRepository`, `ChapterRepository`, `ChunkRepository`, `BookProgressRepository` - -## 구조 요약 - -Book 도메인은 사용자에게 보이는 조회 API와 운영성 있는 import 파이프라인이 함께 들어 있는 구조다. -책 기본 정보와 챕터/청크는 MongoDB에 저장되고, import 시에는 AI 결과 파일 다운로드, 이미지 이동, 썸네일 생성이 같이 수행된다. -사용자 진행도는 책 단위가 아니라 챕터와 청크를 기반으로 계산되며, 읽기 완료는 스트릭 갱신으로 이어진다. - -## Mermaid 다이어그램 - -### 구조 관계 +- MongoDB: 책, 챕터, 청크, 진행도 저장 +- S3 / R2: 표지 이미지와 import 산출물 저장 +- StreakService: 읽기 완료 이후 스트릭 반영 ```mermaid flowchart TD @@ -58,7 +42,37 @@ flowchart TD ProgressService --> Streak ``` -### 대표 흐름: 진행도 업데이트와 스트릭 연결 +## 핵심 기능 + +- 책 목록 조회 +- 진행도 업데이트 +- 책 import 및 이미지 처리 + +## 핵심 기능 흐름 + +### 책 목록 조회 + +```mermaid +sequenceDiagram + participant Client + participant BooksController + participant BookService + participant BookRepository + participant BookProgressRepository + participant Mongo + + Client->>BooksController: GET /api/v1/books + BooksController->>BookService: getBooks(...) + BookService->>BookRepository: find books + BookService->>BookProgressRepository: load user progress + BookRepository->>Mongo: query books + BookProgressRepository->>Mongo: query bookProgress + Mongo-->>BookService: books + progress + BookService-->>BooksController: BookResponse list + BooksController-->>Client: response +``` + +### 진행도 업데이트와 스트릭 연결 ```mermaid sequenceDiagram @@ -81,35 +95,11 @@ sequenceDiagram ProgressService-->>Client: ProgressResponse ``` -## 주요 흐름 설명 - -1. `BookService`는 책 조회와 import를 함께 담당한다. -2. 조회 시에는 사용자 진행도를 합쳐 `BookResponse`를 만들고, import 시에는 AI 결과 파일 다운로드와 이미지 후처리까지 수행한다. -3. `ChapterService`는 챕터 목록 조회와 탐색을 맡고, 챕터별 청크 수와 사용자 진행도를 조합해 응답을 만든다. -4. `ProgressService`는 청크 단위 요청을 챕터 단위 진행도와 책 완료 상태로 변환하고, 마지막 청크를 읽은 경우 `StreakService`를 호출한다. - -## 핵심 데이터 - -- `Book` - - 책 메타데이터, 표지 이미지, 난이도, 챕터 수 -- `Chapter` - - 챕터 번호, 설명, 읽기 시간 -- `Chunk` - - 난이도별 세부 텍스트 조각 -- `BookProgress` - - 사용자별 현재 청크, 챕터별 진행도, 완료 여부 - -## 이 도메인의 특징 - -- 조회 응답이 단순 조회가 아니라 사용자 진행도와 이미지 URL 조합을 포함한다. -- import와 조회가 가까운 서비스에 있어 운영 기능과 사용자 API가 한 도메인 아래 모여 있다. -- 챕터 완료 판단은 청크 수 기준으로 계산되고, 책 완료 여부는 챕터 진행도 배열을 기반으로 계산된다. - -## 개선 포인트 +## 핵심 기능 선정 기준 -- `BookService`는 import, 이미지 처리, 조회 응답 조립이 함께 있어 책임 분리가 가능하다. -- `ProgressService`는 검증, 진행도 계산, 읽기 완료 처리, 스트릭 연계를 한 번에 수행한다. -- `ChapterService`는 조회 응답 조립과 view count 증가, backward compatibility 로직이 같이 들어 있다. +1. 책 목록 조회는 사용자 트래픽과 성능 이슈가 가장 자주 모이는 진입점이다. +2. 진행도 업데이트는 `book`, `chapter`, `chunk`, `streak`를 함께 이해해야 한다. +3. import는 운영 기능이지만 파일 저장과 후처리가 함께 묶여 있어 읽기 진입점으로 가치가 있다. ## 참고 코드 diff --git a/docs/architecture/overview.md b/docs/architecture/overview.md index 1d8630a..c997b55 100644 --- a/docs/architecture/overview.md +++ b/docs/architecture/overview.md @@ -1,31 +1,27 @@ -# 프로젝트 전체 구조 개요 +# 프로젝트 전체 컨텍스트 -## 목적 +## 현재 시스템의 책임 -이 문서는 `llv-api`의 상위 구조를 한 장으로 설명하기 위한 문서다. -세부 구현보다 어떤 도메인이 핵심이고, 어떤 저장소와 외부 시스템이 붙어 있는지 빠르게 파악하는 데 초점을 둔다. +- 모바일 학습 앱의 API를 제공한다. +- 책 읽기, 단어 조회, 스트릭 유지 같은 핵심 학습 기능을 한 애플리케이션에서 처리한다. +- 추천, 알림, 크롤링, 파일 처리 같은 보조 기능도 함께 운영한다. -## 범위 +## 외부 시스템 의존성 -- 주요 사용자 요청 경로 -- 핵심 도메인 묶음 -- 공통 인프라와 외부 시스템 -- 우선 문서화 대상 도메인 +- MongoDB: 주요 도메인 데이터와 로그 저장 +- Redis: 읽기 세션과 짧은 상태 관리 +- S3 / R2: 이미지와 파일 저장 +- AI Model: 단어 분석과 생성 요청 +- FCM: 푸시 알림 발송 +- External Content Sites: 크롤링 대상 -## 핵심 구성 요소 +## 핵심 기능 -- API 진입점: `controller` -- 핵심 도메인: `streak`, `word`, `content/book` -- 보조 도메인: `content/recommendation`, `fcm`, `crawling` -- 공통 인프라: MongoDB, Redis, S3/R2, Spring AI, FCM +- 책 읽기와 진행도 반영 +- 단어 조회와 AI 기반 보완 +- 스트릭 계산과 보상/알림 처리 -## 구조 요약 - -이 프로젝트는 하나의 Spring Boot 애플리케이션 안에 학습 콘텐츠, 단어 분석, 스트릭, 추천, 알림, 크롤링, 파일 처리 기능을 함께 두고 있다. -데이터 저장은 MongoDB를 중심으로 하고, Redis는 읽기 세션과 rate limit 같은 짧은 상태 관리에 사용한다. -외부 연동은 AI 모델, FCM, S3/R2, 크롤링 대상 사이트가 중심이며, 도메인 서비스가 이 인프라를 직접 조합하는 구조가 많다. - -## Mermaid 다이어그램 +## 핵심 기능 흐름 ```mermaid flowchart TD @@ -73,62 +69,8 @@ flowchart TD Crawl --> External ``` -## 주요 흐름 설명 - -1. `book`, `word`, `streak` 요청은 각각 전용 서비스로 들어가지만, 실제 사용자 학습 흐름에서는 서로 연결된다. -2. `content/book`의 읽기 완료는 `streak` 갱신과 이어지고, 읽기 로그는 `content/recommendation`에서 선호도 집계에 사용된다. -3. `word`는 MongoDB 캐시와 AI 호출을 조합해 결과를 만들고, `streak`는 Redis 읽기 세션과 MongoDB 리포트를 함께 사용한다. -4. `crawling`과 `feed`는 외부 사이트 구조 변화에 영향을 많이 받는 별도 리스크 영역이다. - -## 핵심 도메인 - -### `streak` - -- 사용자 학습 연속성, 프리즈, 보상, 알림을 담당한다. -- Redis 기반 읽기 세션과 MongoDB 기반 누적 리포트를 함께 사용한다. -- 스케줄러와 알림이 얽혀 있어 구조적으로 가장 복잡한 영역 중 하나다. - -### `word` - -- 단어 조회, 원형/변형 매핑, AI 분석, 유효하지 않은 단어 차단을 담당한다. -- 캐시와 AI 호출, 응답 검증이 한 흐름에 들어가 있어 비용과 안정성 측면에서 중요하다. - -### `content/book` - -- 책 조회, 챕터/청크, 진행도, 이미지 처리, 가져오기(import)까지 맡는다. -- 조회 성능과 진행도 계산, 다른 도메인과의 연결 지점이 함께 모여 있다. - -## 공통 인프라 - -### MongoDB - -- 주요 도메인 엔티티와 로그, 추천 데이터를 저장한다. -- 도메인 서비스는 Mongo 문서 구조를 직접 전제로 동작하는 경우가 많다. - -### Redis - -- `streak` 읽기 세션과 `common/ratelimit` 같은 짧은 상태 관리에 사용된다. - -### S3 / R2 - -- 책 이미지와 AI 생성 결과 파일 처리를 담당한다. -- `content/book`는 import 이후 이미지 이동과 썸네일 생성까지 이어진다. - -### AI / FCM / External Sites - -- AI는 `word` 분석의 핵심 의존성이다. -- FCM은 `streak`, `notification` 쪽에서 사용된다. -- 외부 사이트는 `crawling`, `feed` 영역의 가장 큰 불안정 요소다. - -## 현재 문서화 우선순위 - -- [Streak 도메인 구조](streak.md) -- [Word 도메인 구조](word.md) -- [Book 도메인 구조](content-book.md) - -## 개선 포인트 +## 핵심 기능 선정 기준 -- `streak`는 상태 계산, 보상, 통계, 알림 관련 책임이 큰 서비스에 집중돼 있다. -- `word`는 캐시 정책과 AI 실패 처리, 응답 검증이 서비스 흐름 안에 함께 들어가 있다. -- `content/book`는 조회, import, 이미지 처리, 진행도 계산이 서로 가까이 있어 변경 영향 범위가 넓다. -- 외부 의존성이 큰 `crawling`, `feed`는 이후 안정성 문서에서 별도로 다루는 편이 맞다. +1. 실제 사용자 요청이 자주 통과하는 기능이다. +2. 외부 의존성이나 도메인 결합이 있어 이해 난이도가 높다. +3. 리팩터링이나 성능 개선 시 영향 범위가 큰 영역이다. diff --git a/docs/architecture/streak.md b/docs/architecture/streak.md index 5d5fce5..0a405dd 100644 --- a/docs/architecture/streak.md +++ b/docs/architecture/streak.md @@ -1,33 +1,18 @@ -# Streak 도메인 구조 +# Streak 도메인 미니맵 -## 목적 +## 현재 시스템의 책임 -이 문서는 스트릭 도메인이 어떻게 학습 완료, 보상, 프리즈, 알림을 처리하는지 설명한다. +- 사용자 학습 연속일 수를 계산한다. +- 읽기 세션과 학습 시간을 관리한다. +- 프리즈, 보상, 완료 기록을 갱신한다. +- 보호 알림과 관련 스케줄 작업을 수행한다. -## 범위 +## 외부 시스템 의존성 -- `StreakService` -- `ReadingSessionService` -- 스트릭 관련 스케줄러 -- `UserStudyReport`, `DailyCompletion`, `FreezeTransaction` - -## 핵심 구성 요소 - -- `StreakController` -- `StreakService` -- `ReadingSessionService` -- `StreakProtectionScheduler` -- `UserStudyReportRepository`, `DailyCompletionRepository`, `FreezeTransactionRepository` - -## 구조 요약 - -스트릭 도메인은 사용자의 학습 연속성을 계산하는 핵심 서비스다. -읽기 세션은 Redis에 짧게 저장하고, 실제 누적 리포트와 완료 기록은 MongoDB에 저장한다. -책 읽기 완료나 다른 콘텐츠 완료 흐름에서 `StreakService`를 호출해 스트릭을 갱신하고, 스케줄러는 밤 시간대에 보호 알림을 보낸다. - -## Mermaid 다이어그램 - -### 구조 관계 +- MongoDB: 누적 리포트와 완료 기록 저장 +- Redis: 읽기 세션과 짧은 상태 저장 +- FCM: 보호 알림 발송 +- content/book: 읽기 완료 이벤트가 유입되는 주요 호출 지점 ```mermaid flowchart TD @@ -52,7 +37,15 @@ flowchart TD Scheduler --> FCM ``` -### 대표 흐름: 읽기 완료 후 스트릭 갱신 +## 핵심 기능 + +- 읽기 완료 후 스트릭 갱신 +- 읽기 세션 관리 +- 보호 알림 스케줄링 + +## 핵심 기능 흐름 + +### 읽기 완료 후 스트릭 갱신 ```mermaid sequenceDiagram @@ -72,41 +65,11 @@ sequenceDiagram ProgressService-->>Client: progress response ``` -### 상태 관점 - -```mermaid -stateDiagram-v2 - [*] --> Active - Active --> CompletedToday: 오늘 학습 완료 - Active --> AtRisk: 학습 없이 하루 종료 - AtRisk --> Protected: freeze로 스트릭 보호 - AtRisk --> Reset: 보호 수단 없음 - Protected --> Active: 다음 학습일에 연속 유지 - CompletedToday --> Active: 다음 날짜로 이동 - Reset --> Active: 새 스트릭 시작 -``` - -## 주요 흐름 설명 - -1. 사용자가 학습을 시작하면 `ReadingSessionService`가 Redis에 읽기 세션을 저장한다. -2. 읽기 완료 시 `ProgressService` 같은 상위 도메인이 `StreakService`를 호출해 학습 시간, 스트릭, 완료 콘텐츠를 갱신한다. -3. `StreakService`는 오늘/어제 상태, 누락 일수, 프리즈 사용 여부를 계산하고 보상 지급 여부도 함께 판단한다. -4. `StreakProtectionScheduler`는 밤 9시에 오늘 미완료 사용자를 찾아 FCM 보호 알림을 보낸다. - -## 핵심 데이터 - -- `UserStudyReport` - - 현재 스트릭, 최장 스트릭, 사용 가능 프리즈, 총 학습 시간 등 누적 상태 -- `DailyCompletion` - - 일자별 완료 상태 -- `FreezeTransaction` - - 프리즈 지급/사용 내역 - -## 개선 포인트 +## 핵심 기능 선정 기준 -- `StreakService`에 상태 계산, 보상 지급, 통계 응답 조립이 많이 모여 있어 분리 여지가 크다. -- Redis 세션 검증, 읽기 시간 계산, 콘텐츠 완료 처리 경계가 다른 도메인과 섞여 있다. -- 스케줄러 알림 정책과 도메인 규칙이 점점 가까워지면 테스트 경계가 흐려질 수 있다. +1. 스트릭 갱신은 다른 학습 도메인에서 공통으로 호출하는 핵심 교차 지점이다. +2. Redis, MongoDB, FCM이 함께 등장해 의존성 파악 가치가 크다. +3. 세션, 누적 상태, 스케줄러가 모두 연결돼 있어 처음 읽는 난이도가 높다. ## 참고 코드 diff --git a/docs/architecture/word.md b/docs/architecture/word.md index 132f364..20324ea 100644 --- a/docs/architecture/word.md +++ b/docs/architecture/word.md @@ -1,32 +1,16 @@ -# Word 도메인 구조 +# Word 도메인 미니맵 -## 목적 +## 현재 시스템의 책임 -이 문서는 단어 조회와 AI 분석 흐름이 어떻게 결합돼 있는지 설명한다. +- 단어 조회 API를 제공한다. +- 입력 단어와 원형 단어의 관계를 관리한다. +- 단어 데이터가 없을 때 AI 분석으로 보완한다. +- 실패한 단어를 차단 캐시로 관리한다. -## 범위 +## 외부 시스템 의존성 -- `WordService` -- `WordAiService` -- `WordVariant`, `InvalidWord`, `Word` -- 단어 조회 및 생성 흐름 - -## 핵심 구성 요소 - -- `WordsController` -- `WordService` -- `WordAiService` -- `WordVariantRepository`, `WordRepository`, `InvalidWordRepository` - -## 구조 요약 - -Word 도메인은 사용자가 입력한 단어를 바로 조회하지 않고, 먼저 원형/변형 관계를 확인한 뒤 필요한 경우 AI 분석으로 보완한다. -MongoDB에는 단어 본문, 변형 형태, 실패 캐시를 따로 저장하고, AI 결과는 검증과 필터링을 거친 뒤 저장한다. -즉 이 도메인은 조회 API처럼 보이지만 실제로는 캐시, 분석, 검증, 저장이 한 흐름에 묶인 구조다. - -## Mermaid 다이어그램 - -### 구조 관계 +- MongoDB: 단어 본문, variant, invalid cache 저장 +- AI Model: 새 단어 분석과 생성 요청 ```mermaid flowchart TD @@ -52,7 +36,15 @@ flowchart TD InvalidRepo --> Mongo ``` -### 대표 흐름: 단어 조회 및 생성 +## 핵심 기능 + +- 단어 조회 +- variant 기반 원형 매핑 +- AI 기반 신규 단어 생성 + +## 핵심 기능 흐름 + +### 단어 조회 및 생성 ```mermaid sequenceDiagram @@ -80,33 +72,11 @@ sequenceDiagram WordService-->>Client: WordSearchResponse ``` -## 주요 흐름 설명 - -1. 먼저 `WordVariantRepository`에서 입력 단어가 이미 다른 원형에 연결된 변형인지 확인한다. -2. 데이터가 없으면 `InvalidWordRepository`를 확인해 반복 실패 단어를 빠르게 차단한다. -3. AI 호출이 필요하면 `WordAiService`가 강한 프롬프트, Bean schema, validation, enum 필터링, homograph 병합을 적용한다. -4. 성공 결과는 `Word`와 `WordVariant`로 나눠 저장하고, 이후 요청에서는 캐시처럼 재사용한다. - -## 핵심 데이터 - -- `Word` - - 원형 단어, 번역, 의미, 활용형 정보 -- `WordVariant` - - 입력 단어와 원형 단어 연결 -- `InvalidWord` - - 반복 실패한 단어에 대한 차단 캐시 - -## 이 도메인의 특징 - -- AI 응답을 그대로 신뢰하지 않고 validation과 enum 정리를 한 번 더 거친다. -- 같은 원형으로 합쳐야 하는 homograph/variant 처리를 서비스 쪽에서 보정한다. -- 실패한 단어를 `InvalidWord`로 캐시해 불필요한 재호출을 줄인다. - -## 개선 포인트 +## 핵심 기능 선정 기준 -- `WordService`가 캐시 판단, 예외 전략, 저장 규칙까지 많이 알고 있어 책임이 크다. -- `WordAiService`는 프롬프트, 비용 로깅, 응답 검증을 함께 갖고 있어 분리 후보가 될 수 있다. -- AI 실패 정책과 사용자 응답 정책을 더 명확히 나누면 테스트가 쉬워질 수 있다. +1. 조회처럼 보이지만 캐시, 저장, AI 호출이 함께 묶여 있어 흐름이 길다. +2. variant와 invalid cache를 함께 이해해야 실제 동작을 읽을 수 있다. +3. 외부 AI 의존성이 있어 실패 경로까지 같이 파악해야 한다. ## 참고 코드 diff --git a/docs/templates/architecture-template.md b/docs/templates/architecture-template.md index 44dc6d1..87c82df 100644 --- a/docs/templates/architecture-template.md +++ b/docs/templates/architecture-template.md @@ -4,45 +4,45 @@ 짧고 명확한 문서 제목 -## 목적 +## 현재 시스템의 책임 -이 문서가 어떤 구조나 흐름을 설명하기 위한 것인지 적는다. +- 이 도메인이나 시스템이 담당하는 핵심 책임 2~4개 +- 조회, 저장, 계산, 외부 연동 중 무엇이 중심인지 +- 다른 도메인과 구분되는 역할이 무엇인지 -## 범위 +## 외부 시스템 의존성 -어떤 도메인, 기능, 요청 흐름을 다루는지 적는다. +- 사용하는 저장소, 캐시, 메시징, 외부 API +- 의존 시스템이 없다면 생략 가능 +- 가능하면 "왜 붙는지"를 한 줄로 적는다 +- 거시적인 관점에서 architecture context diagram을 포함한다. -## 핵심 구성 요소 +예시: -- 구성 요소 1 -- 구성 요소 2 -- 구성 요소 3 +- MongoDB: 핵심 도메인 데이터 저장 +- Redis: 짧은 상태 또는 세션 저장 +- S3 / R2: 파일 저장 +- FCM: 알림 발송 +- AI Model: 분석 또는 생성 요청 -## 구조 요약 +## 핵심 기능 -현재 구조를 짧게 설명한다. +- 핵심 기능은 2~3개만 고른다 +- 각 기능은 중요도 + 복잡도/이해 난이도를 기준으로 선정한다 +- 이름만 봐도 읽기 시작점을 알 수 있게 쓴다 -## Mermaid 다이어그램 +예시: -필요한 경우 아래 예시 중 하나를 복사해서 사용한다. +- 책 목록 조회 +- 진행도 업데이트 +- 단어 조회 및 생성 -### 시스템/도메인 관계 예시 +## 핵심 기능 흐름 -```mermaid -flowchart TD - Client[Client] - Api[Spring API] - Mongo[MongoDB] - Redis[Redis] - External[External Services] - - Client --> Api - Api --> Mongo - Api --> Redis - Api --> External -``` +핵심 기능마다 시퀀스 다이어그램 또는 데이터 흐름 중 하나만 둔다. +기능당 다이어그램 1개면 충분하다. -### 요청 흐름 예시 +### 기능 흐름 예시 ```mermaid sequenceDiagram @@ -50,41 +50,41 @@ sequenceDiagram participant Controller participant Service participant Repository - participant Mongo + participant External Client->>Controller: Request - Controller->>Service: Call use case - Service->>Repository: Query or save - Repository->>Mongo: Access data - Mongo-->>Repository: Result - Repository-->>Service: Result - Service-->>Controller: Response DTO - Controller-->>Client: HTTP Response + Controller->>Service: Use case call + Service->>Repository: Load or save + Service->>External: Optional dependency call + Repository-->>Service: Data + Service-->>Controller: Result + Controller-->>Client: Response ``` -### 상태 전이 예시 +### 데이터 흐름 예시 ```mermaid -stateDiagram-v2 - [*] --> Pending - Pending --> InProgress - InProgress --> Completed - InProgress --> Failed +flowchart TD + Client[Client] + Controller[Controller] + Service[Service] + Mongo[(MongoDB)] + External[External System] + + Client --> Controller + Controller --> Service + Service --> Mongo + Service --> External ``` -## 주요 흐름 설명 +## 핵심 기능 선정 기준 -다이어그램만으로 부족한 핵심 흐름을 짧게 설명한다. +각 기능을 왜 핵심으로 봤는지 짧게 적는다. 1. 요청이 어디서 시작되는가 -2. 어떤 서비스가 핵심 규칙을 담당하는가 -3. 어떤 저장소나 외부 시스템에 의존하는가 - -## 개선 포인트 - -- 현재 구조의 문제 -- 리팩터링 후보 -- 성능 또는 안정성 리스크 +2. 어느 서비스나 도메인이 중심 책임을 갖는가 +3. 어떤 저장소나 외부 시스템과 결합되는가 +4. 왜 중요하거나 복잡한가 ## 참고 코드 From 767ac3d47f31cff7c32e5793075a53a5a51693c4 Mon Sep 17 00:00:00 2001 From: solfe Date: Wed, 1 Apr 2026 23:06:11 +0900 Subject: [PATCH 07/28] Rename context diagram terminology --- docs/architecture/README.md | 2 +- docs/architecture/overview.md | 4 ++-- docs/templates/architecture-template.md | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/architecture/README.md b/docs/architecture/README.md index f25965c..aa0e40b 100644 --- a/docs/architecture/README.md +++ b/docs/architecture/README.md @@ -15,7 +15,7 @@ ## 현재 문서 -- [프로젝트 전체 컨텍스트](overview.md) +- [시스템 컨텍스트 다이어그램](overview.md) - [Streak 도메인 미니맵](streak.md) - [Word 도메인 미니맵](word.md) - [Book 도메인 미니맵](content-book.md) diff --git a/docs/architecture/overview.md b/docs/architecture/overview.md index c997b55..0b5a4ab 100644 --- a/docs/architecture/overview.md +++ b/docs/architecture/overview.md @@ -1,4 +1,4 @@ -# 프로젝트 전체 컨텍스트 +# 시스템 컨텍스트 다이어그램 ## 현재 시스템의 책임 @@ -21,7 +21,7 @@ - 단어 조회와 AI 기반 보완 - 스트릭 계산과 보상/알림 처리 -## 핵심 기능 흐름 +## 시스템 컨텍스트 다이어그램 ```mermaid flowchart TD diff --git a/docs/templates/architecture-template.md b/docs/templates/architecture-template.md index 87c82df..f7120ce 100644 --- a/docs/templates/architecture-template.md +++ b/docs/templates/architecture-template.md @@ -15,7 +15,7 @@ - 사용하는 저장소, 캐시, 메시징, 외부 API - 의존 시스템이 없다면 생략 가능 - 가능하면 "왜 붙는지"를 한 줄로 적는다 -- 거시적인 관점에서 architecture context diagram을 포함한다. +- 거시적인 관점에서 system context diagram을 포함한다. 예시: From 934f3a51706c1fd6f16236709fc86775819e3dc0 Mon Sep 17 00:00:00 2001 From: solfe Date: Thu, 2 Apr 2026 09:58:37 +0900 Subject: [PATCH 08/28] Align domain docs with mini-map structure --- docs/architecture/content-book.md | 13 ++++++++----- docs/architecture/streak.md | 11 +++++++++-- docs/architecture/word.md | 9 ++++++++- docs/templates/architecture-template.md | 11 +++++++++-- 4 files changed, 34 insertions(+), 10 deletions(-) diff --git a/docs/architecture/content-book.md b/docs/architecture/content-book.md index dd094ec..5160b61 100644 --- a/docs/architecture/content-book.md +++ b/docs/architecture/content-book.md @@ -1,11 +1,14 @@ # Book 도메인 미니맵 ## 현재 시스템의 책임 - -- 책은 여러 개의 챕터를 가지고 챕터는 여러개의 청크를 가진다. -- 사용자 책 읽기 진행도를 계산하고 저장한다. -- 관리자에 의해서 책이 추가로 등록될 수 있다. -- 청크는 이미지와 글자와 같은 타입을 가질 수 있다. +- 책, 챕터, 청크로 구성된 콘텐츠를 관리한다. +- 사용자에게 책 콘텐츠를 제공하고, 읽기 진행도를 계산·저장한다. +- 관리자가 새로운 책과 콘텐츠를 등록할 수 있도록 한다. + +## 도메인 구조 +- 책은 여러 개의 챕터로 구성된다. +- 챕터는 여러 개의 청크로 구성된다. +- 청크는 텍스트, 이미지 등의 타입을 가질 수 있다. ## 외부 시스템 의존성 diff --git a/docs/architecture/streak.md b/docs/architecture/streak.md index 0a405dd..468d40c 100644 --- a/docs/architecture/streak.md +++ b/docs/architecture/streak.md @@ -7,6 +7,12 @@ - 프리즈, 보상, 완료 기록을 갱신한다. - 보호 알림과 관련 스케줄 작업을 수행한다. +## 도메인 구조 + +- 스트릭은 사용자별 누적 리포트와 일자별 완료 기록으로 상태를 계산한다. +- 읽기 세션은 Redis에 짧게 저장되고, 확정된 상태는 MongoDB에 반영된다. +- 프리즈와 보상 기록은 별도 트랜잭션/이력 데이터로 관리된다. + ## 외부 시스템 의존성 - MongoDB: 누적 리포트와 완료 기록 저장 @@ -68,8 +74,9 @@ sequenceDiagram ## 핵심 기능 선정 기준 1. 스트릭 갱신은 다른 학습 도메인에서 공통으로 호출하는 핵심 교차 지점이다. -2. Redis, MongoDB, FCM이 함께 등장해 의존성 파악 가치가 크다. -3. 세션, 누적 상태, 스케줄러가 모두 연결돼 있어 처음 읽는 난이도가 높다. +2. `리딩 세션 -> 누적 상태 -> 알림`으로 이어지는 도메인 구조를 같이 이해해야 한다. +3. Redis, MongoDB, FCM이 함께 등장해 의존성 파악 가치가 크다. +4. 세션, 누적 상태, 스케줄러가 모두 연결돼 있어 처음 읽는 난이도가 높다. ## 참고 코드 diff --git a/docs/architecture/word.md b/docs/architecture/word.md index 20324ea..e719ec0 100644 --- a/docs/architecture/word.md +++ b/docs/architecture/word.md @@ -7,6 +7,12 @@ - 단어 데이터가 없을 때 AI 분석으로 보완한다. - 실패한 단어를 차단 캐시로 관리한다. +## 도메인 구조 + +- 원형 단어 본문은 `Word`로 저장된다. +- 입력 단어와 원형 단어의 연결은 `WordVariant`로 관리된다. +- 반복 실패 단어는 `InvalidWord`에 저장해 재시도를 줄인다. + ## 외부 시스템 의존성 - MongoDB: 단어 본문, variant, invalid cache 저장 @@ -75,8 +81,9 @@ sequenceDiagram ## 핵심 기능 선정 기준 1. 조회처럼 보이지만 캐시, 저장, AI 호출이 함께 묶여 있어 흐름이 길다. -2. variant와 invalid cache를 함께 이해해야 실제 동작을 읽을 수 있다. +2. `Word`, `WordVariant`, `InvalidWord`의 역할을 같이 이해해야 실제 동작을 읽을 수 있다. 3. 외부 AI 의존성이 있어 실패 경로까지 같이 파악해야 한다. +4. variant와 invalid cache가 조회 흐름 초반에 분기점 역할을 한다. ## 참고 코드 diff --git a/docs/templates/architecture-template.md b/docs/templates/architecture-template.md index f7120ce..c730c4d 100644 --- a/docs/templates/architecture-template.md +++ b/docs/templates/architecture-template.md @@ -10,6 +10,12 @@ - 조회, 저장, 계산, 외부 연동 중 무엇이 중심인지 - 다른 도메인과 구분되는 역할이 무엇인지 +## 도메인 구조 + +- 이 도메인을 구성하는 핵심 개체나 하위 구조를 짧게 적는다 +- 1:N 관계나 상위/하위 개념이 있다면 여기서 먼저 정리한다 +- 읽는 사람이 도메인 내부 shape를 빠르게 잡을 수 있으면 충분하다 + ## 외부 시스템 의존성 - 사용하는 저장소, 캐시, 메시징, 외부 API @@ -83,8 +89,9 @@ flowchart TD 1. 요청이 어디서 시작되는가 2. 어느 서비스나 도메인이 중심 책임을 갖는가 -3. 어떤 저장소나 외부 시스템과 결합되는가 -4. 왜 중요하거나 복잡한가 +3. 도메인 구조를 이해해야 읽을 수 있는 포인트가 있는가 +4. 어떤 저장소나 외부 시스템과 결합되는가 +5. 왜 중요하거나 복잡한가 ## 참고 코드 From 7ad6bcc01722af4656ba8154c77eca51d787ba01 Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 7 Apr 2026 00:50:15 +0900 Subject: [PATCH 09/28] test: add BookImportService coverage --- .../book/service/BookImportServiceTest.java | 276 ++++++++++++++++++ 1 file changed, 276 insertions(+) create mode 100644 src/test/java/com/linglevel/api/content/book/service/BookImportServiceTest.java diff --git a/src/test/java/com/linglevel/api/content/book/service/BookImportServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/BookImportServiceTest.java new file mode 100644 index 0000000..2978398 --- /dev/null +++ b/src/test/java/com/linglevel/api/content/book/service/BookImportServiceTest.java @@ -0,0 +1,276 @@ +package com.linglevel.api.content.book.service; + +import com.linglevel.api.content.book.dto.BookImportData; +import com.linglevel.api.content.book.entity.Chapter; +import com.linglevel.api.content.book.entity.Chunk; +import com.linglevel.api.content.book.repository.ChapterRepository; +import com.linglevel.api.content.book.repository.ChunkRepository; +import com.linglevel.api.content.common.ChunkType; +import com.linglevel.api.content.common.DifficultyLevel; +import com.linglevel.api.s3.service.S3UrlService; +import com.linglevel.api.s3.strategy.BookPathStrategy; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.util.List; +import java.util.stream.StreamSupport; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +class BookImportServiceTest { + @Mock + private ChapterRepository chapterRepository; + + @Mock + private ChunkRepository chunkRepository; + + @Mock + private S3UrlService s3UrlService; + + @Mock + private BookPathStrategy bookPathStrategy; + + @InjectMocks + private BookImportService bookImportService; + + private BookImportData bookImportData; + private BookImportData.ChapterMetadata chapterMetadata; + + @BeforeEach + void setUp() { + bookImportData = new BookImportData(); + chapterMetadata = new BookImportData.ChapterMetadata(); + BookImportData.TextLevelData textLevelData = new BookImportData.TextLevelData(); + BookImportData.ChapterData chapterData = new BookImportData.ChapterData(); + BookImportData.ChunkData textChunkData = new BookImportData.ChunkData(); + BookImportData.ChunkData imageChunkData = new BookImportData.ChunkData(); + + textChunkData.setChunkNum(1); + textChunkData.setChunkText("내용"); + textChunkData.setIsImage(false); + imageChunkData.setChunkNum(2); + imageChunkData.setChunkText("주소"); + imageChunkData.setIsImage(true); + imageChunkData.setDescription("이미지 설명"); + + chapterData.setChapterNum(1); + chapterData.setChunks(List.of(textChunkData, imageChunkData)); + + textLevelData.setTextLevel("a1"); + textLevelData.setChapters(List.of(chapterData)); + + chapterMetadata.setChapterNum(1); + chapterMetadata.setTitle("제목"); + chapterMetadata.setSummary("요약"); + + bookImportData.setChapterMetadata(List.of(chapterMetadata)); + bookImportData.setLeveledResults(List.of(textLevelData)); + } + + @Test + @DisplayName("chapter metadata를 Chapter 엔티티로 변환해 저장한다.") + void importChapters() { + // given + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass((Class) Iterable.class); + + when(chapterRepository.saveAll(ArgumentMatchers.anyList())).thenAnswer(invocation -> invocation.getArgument(0)); + + // when + List chapters = bookImportService.createChaptersFromMetadata(bookImportData, "bookId"); + + // then + verify(chapterRepository).saveAll(captor.capture()); + + List savedChapters = StreamSupport.stream(captor.getValue().spliterator(), false).toList(); + + assertEquals(1, savedChapters.size()); + + Chapter savedChapter = savedChapters.get(0); + assertEquals("bookId", savedChapter.getBookId()); + assertEquals(1, savedChapter.getChapterNumber()); + assertEquals("제목", savedChapter.getTitle()); + assertEquals("요약", savedChapter.getDescription()); + assertEquals(0, savedChapter.getReadingTime()); + assertEquals(savedChapters, chapters); + } + + @Test + @DisplayName("leveled results를 텍스트와 이미지 Chunk 엔티티로 변환해 저장한다.") + void importChunks() { + // given + Chapter savedChapter = new Chapter(); + savedChapter.setId("chapter-1"); + List chapters = List.of(savedChapter); + + when(s3UrlService.buildImageUrl("bookId", "주소", bookPathStrategy)) + .thenReturn("https://cdn.example.com/image.png"); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = + ArgumentCaptor.forClass((Class) Iterable.class); + + // when + bookImportService.createChunksFromLeveledResults(bookImportData, chapters, "bookId"); + + // then + verify(chunkRepository).saveAll(captor.capture()); + + List savedChunks = + StreamSupport.stream(captor.getValue().spliterator(), false).toList(); + + assertEquals(2, savedChunks.size()); + + Chunk textChunk = savedChunks.get(0); + assertEquals("chapter-1", textChunk.getChapterId()); + assertEquals(1, textChunk.getChunkNumber()); + assertEquals(DifficultyLevel.A1, textChunk.getDifficultyLevel()); + assertEquals(ChunkType.TEXT, textChunk.getType()); + assertEquals("내용", textChunk.getContent()); + assertNull(textChunk.getDescription()); + + Chunk imageChunk = savedChunks.get(1); + assertEquals("chapter-1", imageChunk.getChapterId()); + assertEquals(2, imageChunk.getChunkNumber()); + assertEquals(DifficultyLevel.A1, imageChunk.getDifficultyLevel()); + assertEquals(ChunkType.IMAGE, imageChunk.getType()); + assertEquals("https://cdn.example.com/image.png", imageChunk.getContent()); + assertEquals("이미지 설명", imageChunk.getDescription()); + + verify(s3UrlService).buildImageUrl("bookId", "주소", bookPathStrategy); + } + + @Test + @DisplayName("여러 chapter metadata가 주어지면 chapterNumber를 1부터 순차 증가시켜 저장한다.") + void importChapters_assignSequentialChapterNumbers() { + // given + BookImportData.ChapterMetadata secondMetadata = new BookImportData.ChapterMetadata(); + secondMetadata.setChapterNum(2); + secondMetadata.setTitle("두번째 제목"); + secondMetadata.setSummary("두번째 요약"); + bookImportData.setChapterMetadata(List.of(chapterMetadata, secondMetadata)); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass((Class) Iterable.class); + + when(chapterRepository.saveAll(ArgumentMatchers.anyList())).thenAnswer(invocation -> invocation.getArgument(0)); + + // when + bookImportService.createChaptersFromMetadata(bookImportData, "bookId"); + + // then + verify(chapterRepository).saveAll(captor.capture()); + + List savedChapters = StreamSupport.stream(captor.getValue().spliterator(), false).toList(); + + assertEquals(2, savedChapters.size()); + assertEquals(1, savedChapters.get(0).getChapterNumber()); + assertEquals("제목", savedChapters.get(0).getTitle()); + assertEquals(2, savedChapters.get(1).getChapterNumber()); + assertEquals("두번째 제목", savedChapters.get(1).getTitle()); + } + + @Test + @DisplayName("여러 챕터를 저장할 때 각 챕터의 chunkNumber는 1부터 다시 시작한다.") + void importChunks_resetsChunkNumberPerChapter() { + // given + Chapter firstChapter = new Chapter(); + firstChapter.setId("chapter-1"); + Chapter secondChapter = new Chapter(); + secondChapter.setId("chapter-2"); + List chapters = List.of(firstChapter, secondChapter); + + BookImportData.ChunkData firstTextChunk = createChunkData("첫 챕터 1", false, null); + BookImportData.ChunkData firstImageChunk = createChunkData("first.png", true, "첫 이미지"); + BookImportData.ChunkData secondTextChunk = createChunkData("둘째 챕터 1", false, null); + + BookImportData.ChapterData firstChapterData = createChapterData(List.of(firstTextChunk, firstImageChunk)); + BookImportData.ChapterData secondChapterData = createChapterData(List.of(secondTextChunk)); + + BookImportData.TextLevelData textLevelData = new BookImportData.TextLevelData(); + textLevelData.setTextLevel("a1"); + textLevelData.setChapters(List.of(firstChapterData, secondChapterData)); + bookImportData.setLeveledResults(List.of(textLevelData)); + + when(s3UrlService.buildImageUrl("bookId", "first.png", bookPathStrategy)) + .thenReturn("https://cdn.example.com/first.png"); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass((Class) Iterable.class); + + // when + bookImportService.createChunksFromLeveledResults(bookImportData, chapters, "bookId"); + + // then + verify(chunkRepository).saveAll(captor.capture()); + + List savedChunks = StreamSupport.stream(captor.getValue().spliterator(), false).toList(); + + assertEquals(3, savedChunks.size()); + assertEquals("chapter-1", savedChunks.get(0).getChapterId()); + assertEquals(1, savedChunks.get(0).getChunkNumber()); + assertEquals("chapter-1", savedChunks.get(1).getChapterId()); + assertEquals(2, savedChunks.get(1).getChunkNumber()); + assertEquals("chapter-2", savedChunks.get(2).getChapterId()); + assertEquals(1, savedChunks.get(2).getChunkNumber()); + } + + @Test + @DisplayName("AI chapter 수가 savedChapters보다 적으면 남은 챕터는 건너뛴다.") + void importChunks_skipsRemainingSavedChaptersWhenAiChaptersAreShorter() { + // given + Chapter firstChapter = new Chapter(); + firstChapter.setId("chapter-1"); + Chapter secondChapter = new Chapter(); + secondChapter.setId("chapter-2"); + List chapters = List.of(firstChapter, secondChapter); + + BookImportData.ChunkData onlyChunk = createChunkData("첫 챕터만 저장", false, null); + BookImportData.ChapterData onlyChapterData = createChapterData(List.of(onlyChunk)); + + BookImportData.TextLevelData textLevelData = new BookImportData.TextLevelData(); + textLevelData.setTextLevel("a1"); + textLevelData.setChapters(List.of(onlyChapterData)); + bookImportData.setLeveledResults(List.of(textLevelData)); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass((Class) Iterable.class); + + // when + bookImportService.createChunksFromLeveledResults(bookImportData, chapters, "bookId"); + + // then + verify(chunkRepository).saveAll(captor.capture()); + + List savedChunks = StreamSupport.stream(captor.getValue().spliterator(), false).toList(); + + assertEquals(1, savedChunks.size()); + assertEquals("chapter-1", savedChunks.get(0).getChapterId()); + assertEquals("첫 챕터만 저장", savedChunks.get(0).getContent()); + } + + private BookImportData.ChunkData createChunkData(String chunkText, boolean isImage, String description) { + BookImportData.ChunkData chunkData = new BookImportData.ChunkData(); + chunkData.setChunkText(chunkText); + chunkData.setIsImage(isImage); + chunkData.setDescription(description); + return chunkData; + } + + private BookImportData.ChapterData createChapterData(List chunks) { + BookImportData.ChapterData chapterData = new BookImportData.ChapterData(); + chapterData.setChunks(chunks); + return chapterData; + } +} From 1dab8961904f4633d6ad3c73f0fd80df4fc644ac Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 7 Apr 2026 01:42:01 +0900 Subject: [PATCH 10/28] test: add BookReadingTimeService coverage --- .../service/BookReadingTimeServiceTest.java | 153 ++++++++++++++++++ 1 file changed, 153 insertions(+) create mode 100644 src/test/java/com/linglevel/api/content/book/service/BookReadingTimeServiceTest.java diff --git a/src/test/java/com/linglevel/api/content/book/service/BookReadingTimeServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/BookReadingTimeServiceTest.java new file mode 100644 index 0000000..35dde96 --- /dev/null +++ b/src/test/java/com/linglevel/api/content/book/service/BookReadingTimeServiceTest.java @@ -0,0 +1,153 @@ +package com.linglevel.api.content.book.service; + +import com.linglevel.api.content.book.dto.BookImportData; +import com.linglevel.api.content.book.entity.Book; +import com.linglevel.api.content.book.entity.Chapter; +import com.linglevel.api.content.book.exception.BooksErrorCode; +import com.linglevel.api.content.book.exception.BooksException; +import com.linglevel.api.content.book.repository.BookRepository; +import com.linglevel.api.content.book.repository.ChapterRepository; +import com.linglevel.api.content.common.DifficultyLevel; +import com.linglevel.api.content.common.service.ReadingTimeService; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.util.List; +import java.util.Optional; +import java.util.stream.StreamSupport; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +class BookReadingTimeServiceTest { + + @Mock + private BookRepository bookRepository; + + @Mock + private ChapterRepository chapterRepository; + + @Mock + private ReadingTimeService readingTimeService; + + @InjectMocks + private BookReadingTimeService bookReadingTimeService; + + @Test + @DisplayName("책이 없으면 BOOK_NOT_FOUND 예외를 던진다.") + void updateReadingTimes_throwsWhenBookNotFound() { + // given + BookImportData importData = new BookImportData(); + when(bookRepository.findById("missing-book")).thenReturn(Optional.empty()); + + // when + BooksException exception = assertThrows( + BooksException.class, + () -> bookReadingTimeService.updateReadingTimes("missing-book", importData) + ); + + // then + assertEquals(BooksErrorCode.BOOK_NOT_FOUND.getMessage(), exception.getMessage()); + verify(chapterRepository, never()).findByBookIdOrderByChapterNumber("missing-book"); + verify(chapterRepository, never()).saveAll(org.mockito.ArgumentMatchers.anyList()); + verify(bookRepository, never()).save(org.mockito.ArgumentMatchers.any(Book.class)); + } + + @Test + @DisplayName("책 난이도와 일치하는 leveled results를 사용해 chapter와 book readingTime을 저장한다.") + void updateReadingTimes_updatesChapterAndBookReadingTimes() { + // given + Book book = new Book(); + book.setId("book-1"); + book.setDifficultyLevel(DifficultyLevel.A1); + + Chapter firstChapter = new Chapter(); + firstChapter.setId("chapter-1"); + firstChapter.setBookId("book-1"); + firstChapter.setChapterNumber(1); + + Chapter secondChapter = new Chapter(); + secondChapter.setId("chapter-2"); + secondChapter.setBookId("book-1"); + secondChapter.setChapterNumber(2); + + BookImportData importData = createImportData(); + + when(bookRepository.findById("book-1")).thenReturn(Optional.of(book)); + when(chapterRepository.findByBookIdOrderByChapterNumber("book-1")) + .thenReturn(List.of(firstChapter, secondChapter)); + when(readingTimeService.calculateReadingTimeFromCharacters(5)).thenReturn(3); + when(readingTimeService.calculateReadingTimeFromCharacters(4)).thenReturn(2); + + @SuppressWarnings("unchecked") + ArgumentCaptor> chaptersCaptor = + ArgumentCaptor.forClass((Class) Iterable.class); + ArgumentCaptor bookCaptor = ArgumentCaptor.forClass(Book.class); + + // when + bookReadingTimeService.updateReadingTimes("book-1", importData); + + // then + verify(chapterRepository).saveAll(chaptersCaptor.capture()); + verify(bookRepository).save(bookCaptor.capture()); + + List savedChapters = + StreamSupport.stream(chaptersCaptor.getValue().spliterator(), false).toList(); + Book savedBook = bookCaptor.getValue(); + + assertEquals(2, savedChapters.size()); + assertEquals(3, savedChapters.get(0).getReadingTime()); + assertEquals(2, savedChapters.get(1).getReadingTime()); + assertEquals(5, savedBook.getReadingTime()); + + verify(readingTimeService).calculateReadingTimeFromCharacters(5); + verify(readingTimeService).calculateReadingTimeFromCharacters(4); + } + + private BookImportData createImportData() { + BookImportData.ChunkData firstA1Chunk = createChunkData("abc"); + BookImportData.ChunkData secondA1Chunk = createChunkData("de"); + BookImportData.ChunkData chapterTwoA1Chunk = createChunkData("wxyz"); + BookImportData.ChunkData ignoredB1Chunk = createChunkData("ignored-text"); + + BookImportData.ChapterData firstA1Chapter = createChapterData(1, List.of(firstA1Chunk, secondA1Chunk)); + BookImportData.ChapterData secondA1Chapter = createChapterData(2, List.of(chapterTwoA1Chunk)); + BookImportData.ChapterData ignoredB1Chapter = createChapterData(1, List.of(ignoredB1Chunk)); + + BookImportData.TextLevelData a1Level = createTextLevelData("a1", List.of(firstA1Chapter, secondA1Chapter)); + BookImportData.TextLevelData b1Level = createTextLevelData("b1", List.of(ignoredB1Chapter)); + + BookImportData importData = new BookImportData(); + importData.setLeveledResults(List.of(a1Level, b1Level)); + return importData; + } + + private BookImportData.TextLevelData createTextLevelData(String textLevel, List chapters) { + BookImportData.TextLevelData levelData = new BookImportData.TextLevelData(); + levelData.setTextLevel(textLevel); + levelData.setChapters(chapters); + return levelData; + } + + private BookImportData.ChapterData createChapterData(int chapterNum, List chunks) { + BookImportData.ChapterData chapterData = new BookImportData.ChapterData(); + chapterData.setChapterNum(chapterNum); + chapterData.setChunks(chunks); + return chapterData; + } + + private BookImportData.ChunkData createChunkData(String chunkText) { + BookImportData.ChunkData chunkData = new BookImportData.ChunkData(); + chunkData.setChunkText(chunkText); + return chunkData; + } +} From 502aeb976bbf8488d0761d3116c62b67e0eaa344 Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 7 Apr 2026 13:33:15 +0900 Subject: [PATCH 11/28] test: add ChunkService coverage --- .../book/service/ChunkServiceTest.java | 228 ++++++++++++++++++ 1 file changed, 228 insertions(+) create mode 100644 src/test/java/com/linglevel/api/content/book/service/ChunkServiceTest.java diff --git a/src/test/java/com/linglevel/api/content/book/service/ChunkServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/ChunkServiceTest.java new file mode 100644 index 0000000..ac86b83 --- /dev/null +++ b/src/test/java/com/linglevel/api/content/book/service/ChunkServiceTest.java @@ -0,0 +1,228 @@ +package com.linglevel.api.content.book.service; + +import com.linglevel.api.common.dto.PageResponse; +import com.linglevel.api.content.book.dto.ChunkResponse; +import com.linglevel.api.content.book.dto.GetChunksRequest; +import com.linglevel.api.content.book.entity.Chapter; +import com.linglevel.api.content.book.entity.Chunk; +import com.linglevel.api.content.book.exception.BooksErrorCode; +import com.linglevel.api.content.book.exception.BooksException; +import com.linglevel.api.content.book.repository.ChapterRepository; +import com.linglevel.api.content.book.repository.ChunkRepository; +import com.linglevel.api.content.common.ChunkType; +import com.linglevel.api.content.common.DifficultyLevel; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; + +import java.util.List; +import java.util.Optional; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +class ChunkServiceTest { + + @Mock + private ChunkRepository chunkRepository; + + @Mock + private ChapterRepository chapterRepository; + + @Mock + private BookService bookService; + + @InjectMocks + private ChunkService chunkService; + + @Test + @DisplayName("청크 목록 조회 시 페이지 정보와 ChunkResponse 매핑을 반환한다.") + void getChunks_returnsPagedChunkResponses() { + // given + GetChunksRequest request = GetChunksRequest.builder() + .difficultyLevel(DifficultyLevel.A1) + .page(1) + .limit(300) + .build(); + + Chapter chapter = createChapter("chapter-1", "book-1"); + Chunk firstChunk = createChunk("chunk-1", "chapter-1", 1, ChunkType.TEXT, "first", null); + Chunk secondChunk = createChunk("chunk-2", "chapter-1", 2, ChunkType.IMAGE, "https://cdn/image.png", "image"); + Page chunkPage = new PageImpl<>(List.of(firstChunk, secondChunk)); + + when(bookService.existsById("book-1")).thenReturn(true); + when(chapterRepository.findById("chapter-1")).thenReturn(Optional.of(chapter)); + + ArgumentCaptor pageableCaptor = ArgumentCaptor.forClass(Pageable.class); + when(chunkRepository.findByChapterIdAndDifficultyLevel( + ArgumentMatchers.eq("chapter-1"), + ArgumentMatchers.eq(DifficultyLevel.A1), + pageableCaptor.capture() + )).thenReturn(chunkPage); + + // when + PageResponse response = chunkService.getChunks("book-1", "chapter-1", request, "user-1"); + + // then + assertEquals(2, response.getData().size()); + assertEquals("chunk-1", response.getData().get(0).getId()); + assertEquals(ChunkType.TEXT, response.getData().get(0).getType()); + assertEquals("https://cdn/image.png", response.getData().get(1).getContent()); + assertEquals("image", response.getData().get(1).getDescription()); + + assertEquals(0, pageableCaptor.getValue().getPageNumber()); + assertEquals(200, pageableCaptor.getValue().getPageSize()); + } + + @Test + @DisplayName("책이 없으면 BOOK_NOT_FOUND 예외를 던진다.") + void getChunks_throwsWhenBookNotFound() { + // given + GetChunksRequest request = GetChunksRequest.builder() + .difficultyLevel(DifficultyLevel.A1) + .build(); + when(bookService.existsById("missing-book")).thenReturn(false); + + // when + BooksException exception = assertThrows( + BooksException.class, + () -> chunkService.getChunks("missing-book", "chapter-1", request, "user-1") + ); + + // then + assertEquals(BooksErrorCode.BOOK_NOT_FOUND.getMessage(), exception.getMessage()); + } + + @Test + @DisplayName("챕터가 다른 책에 속하면 CHAPTER_NOT_FOUND_IN_BOOK 예외를 던진다.") + void getChunks_throwsWhenChapterDoesNotBelongToBook() { + // given + GetChunksRequest request = GetChunksRequest.builder() + .difficultyLevel(DifficultyLevel.A1) + .build(); + + when(bookService.existsById("book-1")).thenReturn(true); + when(chapterRepository.findById("chapter-1")) + .thenReturn(Optional.of(createChapter("chapter-1", "another-book"))); + + // when + BooksException exception = assertThrows( + BooksException.class, + () -> chunkService.getChunks("book-1", "chapter-1", request, "user-1") + ); + + // then + assertEquals(BooksErrorCode.CHAPTER_NOT_FOUND_IN_BOOK.getMessage(), exception.getMessage()); + } + + @Test + @DisplayName("단일 청크 조회 시 ChunkResponse로 변환해 반환한다.") + void getChunk_returnsChunkResponse() { + // given + Chapter chapter = createChapter("chapter-1", "book-1"); + Chunk chunk = createChunk("chunk-1", "chapter-1", 3, ChunkType.TEXT, "body", null); + + when(bookService.existsById("book-1")).thenReturn(true); + when(chapterRepository.findById("chapter-1")).thenReturn(Optional.of(chapter)); + when(chunkRepository.findById("chunk-1")).thenReturn(Optional.of(chunk)); + + // when + ChunkResponse response = chunkService.getChunk("book-1", "chapter-1", "chunk-1"); + + // then + assertEquals("chunk-1", response.getId()); + assertEquals(3, response.getChunkNumber()); + assertEquals(ChunkType.TEXT, response.getType()); + assertEquals("body", response.getContent()); + } + + @Test + @DisplayName("청크가 다른 챕터에 속하면 CHUNK_NOT_FOUND 예외를 던진다.") + void getChunk_throwsWhenChunkDoesNotBelongToChapter() { + // given + Chapter chapter = createChapter("chapter-1", "book-1"); + Chunk chunk = createChunk("chunk-1", "chapter-2", 1, ChunkType.TEXT, "body", null); + + when(bookService.existsById("book-1")).thenReturn(true); + when(chapterRepository.findById("chapter-1")).thenReturn(Optional.of(chapter)); + when(chunkRepository.findById("chunk-1")).thenReturn(Optional.of(chunk)); + + // when + BooksException exception = assertThrows( + BooksException.class, + () -> chunkService.getChunk("book-1", "chapter-1", "chunk-1") + ); + + // then + assertEquals(BooksErrorCode.CHUNK_NOT_FOUND.getMessage(), exception.getMessage()); + } + + @Test + @DisplayName("findById는 청크가 없으면 CHUNK_NOT_FOUND 예외를 던진다.") + void findById_throwsWhenChunkNotFound() { + // given + when(chunkRepository.findById("missing-chunk")).thenReturn(Optional.empty()); + + // when + BooksException exception = assertThrows( + BooksException.class, + () -> chunkService.findById("missing-chunk") + ); + + // then + assertEquals(BooksErrorCode.CHUNK_NOT_FOUND.getMessage(), exception.getMessage()); + } + + @Test + @DisplayName("findFirstByChapterId는 첫 번째 청크를 반환한다.") + void findFirstByChapterId_returnsFirstChunk() { + // given + Chunk chunk = createChunk("chunk-1", "chapter-1", 1, ChunkType.TEXT, "body", null); + when(chunkRepository.findFirstByChapterIdOrderByChunkNumberAsc("chapter-1")) + .thenReturn(Optional.of(chunk)); + + // when + Chunk result = chunkService.findFirstByChapterId("chapter-1"); + + // then + assertEquals("chunk-1", result.getId()); + assertEquals(1, result.getChunkNumber()); + } + + private Chapter createChapter(String chapterId, String bookId) { + Chapter chapter = new Chapter(); + chapter.setId(chapterId); + chapter.setBookId(bookId); + return chapter; + } + + private Chunk createChunk( + String chunkId, + String chapterId, + int chunkNumber, + ChunkType type, + String content, + String description + ) { + Chunk chunk = new Chunk(); + chunk.setId(chunkId); + chunk.setChapterId(chapterId); + chunk.setChunkNumber(chunkNumber); + chunk.setDifficultyLevel(DifficultyLevel.A1); + chunk.setType(type); + chunk.setContent(content); + chunk.setDescription(description); + return chunk; + } +} From aae6e96589168aad8d598b62be45e4e075ca9060 Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 7 Apr 2026 13:48:16 +0900 Subject: [PATCH 12/28] test: add importBook service coverage --- .../content/book/service/BookServiceTest.java | 169 +++++++++++++++++- 1 file changed, 165 insertions(+), 4 deletions(-) diff --git a/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java index 559bc68..994d669 100644 --- a/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java @@ -1,20 +1,27 @@ package com.linglevel.api.content.book.service; import com.linglevel.api.common.dto.PageResponse; -import com.linglevel.api.content.book.dto.BookResponse; -import com.linglevel.api.content.book.dto.GetBooksRequest; +import com.linglevel.api.content.book.dto.*; import com.linglevel.api.content.book.entity.Book; import com.linglevel.api.content.book.entity.BookProgress; +import com.linglevel.api.content.book.entity.Chapter; import com.linglevel.api.content.book.repository.BookProgressRepository; import com.linglevel.api.content.book.repository.BookRepository; import com.linglevel.api.content.common.DifficultyLevel; import com.linglevel.api.content.common.ProgressStatus; +import com.linglevel.api.content.common.TitleTranslations; +import com.linglevel.api.s3.service.ImageResizeService; +import com.linglevel.api.s3.service.S3AiService; +import com.linglevel.api.s3.service.S3TransferService; +import com.linglevel.api.s3.service.S3UrlService; +import com.linglevel.api.s3.strategy.BookPathStrategy; import com.linglevel.api.user.entity.User; import com.linglevel.api.user.entity.UserRole; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @@ -28,7 +35,9 @@ import java.util.Optional; import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.ArgumentMatchers.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @ExtendWith(MockitoExtension.class) @@ -40,6 +49,27 @@ class BookServiceTest { @Mock private BookProgressRepository bookProgressRepository; + @Mock + private S3AiService s3AiService; + + @Mock + private S3TransferService s3TransferService; + + @Mock + private S3UrlService s3UrlService; + + @Mock + private BookPathStrategy bookPathStrategy; + + @Mock + private ImageResizeService imageResizeService; + + @Mock + private BookReadingTimeService bookReadingTimeService; + + @Mock + private BookImportService bookImportService; + @InjectMocks private BookService bookService; @@ -56,6 +86,110 @@ void setUp() { testUser.setCreatedAt(LocalDateTime.now()); } + @Test + @DisplayName("importBook는 책 저장, 이미지 처리, 챕터/청크 import, reading time 갱신을 순서대로 수행한다") + void importBook_orchestratesImportFlow() { + // given + BookImportRequest request = new BookImportRequest(); + request.setId("request-1"); + + BookImportData importData = createImportData(); + Chapter savedChapter = new Chapter(); + savedChapter.setId("chapter-1"); + List savedChapters = List.of(savedChapter); + + when(s3AiService.downloadJsonFile("request-1", BookImportData.class, bookPathStrategy)) + .thenReturn(importData); + when(s3UrlService.getCoverImageUrl("request-1", bookPathStrategy)) + .thenReturn("https://cdn/request-cover.jpg"); + when(s3UrlService.getCoverImageUrl("saved-book-id", bookPathStrategy)) + .thenReturn("https://cdn/original-cover.jpg"); + when(bookPathStrategy.generateCoverImagePath("saved-book-id")) + .thenReturn("literature/saved-book-id/images/cover.jpg"); + when(imageResizeService.createSmallImage("literature/saved-book-id/images/cover.jpg")) + .thenReturn("https://cdn/small-cover.webp"); + when(bookRepository.save(any(Book.class))) + .thenAnswer(invocation -> { + Book book = invocation.getArgument(0); + if (book.getId() == null) { + book.setId("saved-book-id"); + } + return book; + }); + when(bookImportService.createChaptersFromMetadata(importData, "saved-book-id")) + .thenReturn(savedChapters); + + ArgumentCaptor bookCaptor = ArgumentCaptor.forClass(Book.class); + + // when + BookImportResponse response = bookService.importBook(request); + + // then + verify(bookRepository, org.mockito.Mockito.times(2)).save(bookCaptor.capture()); + List savedBooks = bookCaptor.getAllValues(); + Book finalSavedBook = savedBooks.get(savedBooks.size() - 1); + + assertThat(response.getId()).isEqualTo("saved-book-id"); + assertThat(finalSavedBook.getId()).isEqualTo("saved-book-id"); + assertThat(finalSavedBook.getTitle()).isEqualTo("Imported title"); + assertThat(finalSavedBook.getDifficultyLevel()).isEqualTo(DifficultyLevel.A1); + assertThat(finalSavedBook.getChapterCount()).isEqualTo(2); + assertThat(finalSavedBook.getCoverImageUrl()).isEqualTo("https://cdn/small-cover.webp"); + + verify(s3TransferService).transferImagesFromAiToStatic("request-1", "saved-book-id", bookPathStrategy); + verify(bookImportService).createChaptersFromMetadata(importData, "saved-book-id"); + verify(bookImportService).createChunksFromLeveledResults(importData, savedChapters, "saved-book-id"); + verify(bookReadingTimeService).updateReadingTimes("saved-book-id", importData); + } + + @Test + @DisplayName("cover image 리사이즈가 실패하면 원본 cover URL을 유지한다") + void importBook_keepsOriginalCoverUrlWhenResizeFails() { + // given + BookImportRequest request = new BookImportRequest(); + request.setId("request-1"); + + BookImportData importData = createImportData(); + List savedChapters = List.of(new Chapter()); + + when(s3AiService.downloadJsonFile("request-1", BookImportData.class, bookPathStrategy)) + .thenReturn(importData); + when(s3UrlService.getCoverImageUrl("request-1", bookPathStrategy)) + .thenReturn("https://cdn/request-cover.jpg"); + when(s3UrlService.getCoverImageUrl("saved-book-id", bookPathStrategy)) + .thenReturn("https://cdn/original-cover.jpg"); + when(bookPathStrategy.generateCoverImagePath("saved-book-id")) + .thenReturn("literature/saved-book-id/images/cover.jpg"); + when(imageResizeService.createSmallImage("literature/saved-book-id/images/cover.jpg")) + .thenThrow(new RuntimeException("resize failed")); + when(bookRepository.save(any(Book.class))) + .thenAnswer(invocation -> { + Book book = invocation.getArgument(0); + if (book.getId() == null) { + book.setId("saved-book-id"); + } + return book; + }); + when(bookImportService.createChaptersFromMetadata(importData, "saved-book-id")) + .thenReturn(savedChapters); + + ArgumentCaptor bookCaptor = ArgumentCaptor.forClass(Book.class); + + // when + BookImportResponse response = bookService.importBook(request); + + // then + verify(bookRepository, org.mockito.Mockito.times(2)).save(bookCaptor.capture()); + List savedBooks = bookCaptor.getAllValues(); + Book finalSavedBook = savedBooks.get(savedBooks.size() - 1); + + assertThat(response.getId()).isEqualTo("saved-book-id"); + assertThat(finalSavedBook.getCoverImageUrl()).isEqualTo("https://cdn/original-cover.jpg"); + + verify(bookImportService).createChunksFromLeveledResults(importData, savedChapters, "saved-book-id"); + verify(bookReadingTimeService).updateReadingTimes("saved-book-id", importData); + } + @Test @DisplayName("진도 필터링과 페이지네이션이 함께 동작할 때 - IN_PROGRESS 필터") void testProgressFilterWithPagination_InProgress() { @@ -300,4 +434,31 @@ private BookProgress createBookProgress(String userId, String bookId, boolean is progress.setUpdatedAt(Instant.now()); return progress; } -} \ No newline at end of file + + private BookImportData createImportData() { + BookImportData importData = new BookImportData(); + importData.setTitle("Imported title"); + importData.setTitleTranslations(new TitleTranslations("가져온 제목", "Imported title")); + importData.setAuthor("Imported author"); + importData.setOriginalTextLevel("a1"); + importData.setLeveledResults(List.of( + createTextLevelData("a1", 2), + createTextLevelData("b1", 1) + )); + return importData; + } + + private BookImportData.TextLevelData createTextLevelData(String level, int chapterCount) { + BookImportData.TextLevelData textLevelData = new BookImportData.TextLevelData(); + textLevelData.setTextLevel(level); + List chapters = new java.util.ArrayList<>(); + for (int i = 1; i <= chapterCount; i++) { + BookImportData.ChapterData chapterData = new BookImportData.ChapterData(); + chapterData.setChapterNum(i); + chapterData.setChunks(List.of()); + chapters.add(chapterData); + } + textLevelData.setChapters(chapters); + return textLevelData; + } +} From bd93ed01889ab8e4f7b6863bf92bc8ba2fdc4702 Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 7 Apr 2026 14:45:30 +0900 Subject: [PATCH 13/28] test: add book progress and repository coverage --- .../book/repository/BookRepositoryImpl.java | 2 + .../repository/ChapterRepositoryImpl.java | 8 +- .../repository/BookRepositoryImplTest.java | 132 ++++++++++++++ .../repository/ChapterRepositoryImplTest.java | 140 +++++++++++++++ .../book/service/ChapterServiceTest.java | 113 +++++++++++- .../book/service/ProgressServiceTest.java | 167 ++++++++++++++++++ 6 files changed, 555 insertions(+), 7 deletions(-) create mode 100644 src/test/java/com/linglevel/api/content/book/repository/BookRepositoryImplTest.java create mode 100644 src/test/java/com/linglevel/api/content/book/repository/ChapterRepositoryImplTest.java diff --git a/src/main/java/com/linglevel/api/content/book/repository/BookRepositoryImpl.java b/src/main/java/com/linglevel/api/content/book/repository/BookRepositoryImpl.java index a3a6425..865d024 100644 --- a/src/main/java/com/linglevel/api/content/book/repository/BookRepositoryImpl.java +++ b/src/main/java/com/linglevel/api/content/book/repository/BookRepositoryImpl.java @@ -94,6 +94,8 @@ private void applyProgressFilter(Query query, ProgressStatus progress, String us List bookIds = getBookIdsByProgress(userId, progress); if (!bookIds.isEmpty()) { query.addCriteria(Criteria.where("id").in(bookIds)); + } else { + query.addCriteria(Criteria.where("_id").is(null)); } } diff --git a/src/main/java/com/linglevel/api/content/book/repository/ChapterRepositoryImpl.java b/src/main/java/com/linglevel/api/content/book/repository/ChapterRepositoryImpl.java index 1ab49f9..7d340ee 100644 --- a/src/main/java/com/linglevel/api/content/book/repository/ChapterRepositoryImpl.java +++ b/src/main/java/com/linglevel/api/content/book/repository/ChapterRepositoryImpl.java @@ -65,7 +65,7 @@ private void applyProgressFilter(Query query, ProgressStatus progress, String bo BookProgress bookProgress = bookProgressRepository.findByUserIdAndBookId(userId, bookId) .orElse(null); - List chapterNumbers = getChapterNumbersByProgress(bookProgress, progress); + List chapterNumbers = getChapterNumbersByProgress(bookId, bookProgress, progress); if (chapterNumbers == null) { // null이면 필터링하지 않음 (모든 챕터 반환) @@ -83,10 +83,10 @@ private void applyProgressFilter(Query query, ProgressStatus progress, String bo /** * 진도 상태별 챕터 번호 목록 조회 */ - private List getChapterNumbersByProgress(BookProgress bookProgress, ProgressStatus progressStatus) { + private List getChapterNumbersByProgress(String bookId, BookProgress bookProgress, ProgressStatus progressStatus) { // 모든 챕터 번호 조회 List allChapters = mongoTemplate.find( - Query.query(Criteria.where("bookId").is(bookProgress.getBookId())), + Query.query(Criteria.where("bookId").is(bookId)), Chapter.class ); List allChapterNumbers = allChapters.stream().map(Chapter::getChapterNumber).toList(); @@ -129,4 +129,4 @@ private List getChapterNumbersByProgress(BookProgress bookProgress, Pro }) .toList(); } -} \ No newline at end of file +} diff --git a/src/test/java/com/linglevel/api/content/book/repository/BookRepositoryImplTest.java b/src/test/java/com/linglevel/api/content/book/repository/BookRepositoryImplTest.java new file mode 100644 index 0000000..75812a9 --- /dev/null +++ b/src/test/java/com/linglevel/api/content/book/repository/BookRepositoryImplTest.java @@ -0,0 +1,132 @@ +package com.linglevel.api.content.book.repository; + +import com.linglevel.api.common.AbstractDatabaseTest; +import com.linglevel.api.content.book.dto.GetBooksRequest; +import com.linglevel.api.content.book.entity.Book; +import com.linglevel.api.content.common.DifficultyLevel; +import com.linglevel.api.content.common.ProgressStatus; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.data.mongo.DataMongoTest; +import org.springframework.context.annotation.Import; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.MongoTemplate; + +import java.time.Instant; +import java.util.List; + +import static org.assertj.core.api.Assertions.assertThat; + +@DataMongoTest +@Import(BookRepositoryImpl.class) +class BookRepositoryImplTest extends AbstractDatabaseTest { + + @Autowired + private BookRepository bookRepository; + + @Autowired + private BookProgressRepository bookProgressRepository; + + @Autowired + private MongoTemplate mongoTemplate; + + private static final String USER_ID = "user-1"; + + @BeforeEach + void setUp() { + bookProgressRepository.deleteAll(); + bookRepository.deleteAll(); + + bookRepository.saveAll(List.of( + createBook("book-1", "Alpha", Instant.parse("2026-01-01T00:00:00Z")), + createBook("book-2", "Beta", Instant.parse("2026-01-02T00:00:00Z")), + createBook("book-3", "Gamma", Instant.parse("2026-01-03T00:00:00Z")) + )); + + mongoTemplate.insert(createProgressDocument("book-2", false, 5), "bookProgress"); + mongoTemplate.insert(createProgressDocument("book-3", true, 10), "bookProgress"); + } + + @Test + @DisplayName("NOT_STARTED 필터는 progress 문서가 없는 책만 반환한다") + void findBooksWithFilters_returnsNotStartedBooks() { + GetBooksRequest request = GetBooksRequest.builder() + .progress(ProgressStatus.NOT_STARTED) + .build(); + + Page result = bookRepository.findBooksWithFilters(request, USER_ID, defaultPageable()); + + assertThat(result.getContent()).extracting(Book::getId).containsExactly("book-1"); + assertThat(result.getTotalElements()).isEqualTo(1); + } + + @Test + @DisplayName("IN_PROGRESS 필터는 완료되지 않았고 읽기 기록이 있는 책만 반환한다") + void findBooksWithFilters_returnsInProgressBooks() { + GetBooksRequest request = GetBooksRequest.builder() + .progress(ProgressStatus.IN_PROGRESS) + .build(); + + Page result = bookRepository.findBooksWithFilters(request, USER_ID, defaultPageable()); + + assertThat(result.getContent()).extracting(Book::getId).containsExactly("book-2"); + assertThat(result.getTotalElements()).isEqualTo(1); + } + + @Test + @DisplayName("COMPLETED 필터는 완료된 책만 반환한다") + void findBooksWithFilters_returnsCompletedBooks() { + GetBooksRequest request = GetBooksRequest.builder() + .progress(ProgressStatus.COMPLETED) + .build(); + + Page result = bookRepository.findBooksWithFilters(request, USER_ID, defaultPageable()); + + assertThat(result.getContent()).extracting(Book::getId).containsExactly("book-3"); + assertThat(result.getTotalElements()).isEqualTo(1); + } + + @Test + @DisplayName("조건에 맞는 progress가 없으면 빈 페이지를 반환한다") + void findBooksWithFilters_returnsEmptyPageWhenNoProgressMatch() { + bookProgressRepository.deleteAll(); + mongoTemplate.insert(createProgressDocument("book-1", false, 0), "bookProgress"); + + GetBooksRequest request = GetBooksRequest.builder() + .progress(ProgressStatus.IN_PROGRESS) + .build(); + + Page result = bookRepository.findBooksWithFilters(request, USER_ID, defaultPageable()); + + assertThat(result.getContent()).isEmpty(); + assertThat(result.getTotalElements()).isZero(); + } + + private Pageable defaultPageable() { + return PageRequest.of(0, 10, Sort.by(Sort.Direction.ASC, "createdAt")); + } + + private Book createBook(String id, String title, Instant createdAt) { + Book book = new Book(); + book.setId(id); + book.setTitle(title); + book.setAuthor("Author"); + book.setDifficultyLevel(DifficultyLevel.A1); + book.setChapterCount(10); + book.setCreatedAt(createdAt); + return book; + } + + private Document createProgressDocument(String bookId, boolean isCompleted, int maxReadChunkNumber) { + return new Document("userId", USER_ID) + .append("bookId", bookId) + .append("isCompleted", isCompleted) + .append("maxReadChunkNumber", maxReadChunkNumber); + } +} diff --git a/src/test/java/com/linglevel/api/content/book/repository/ChapterRepositoryImplTest.java b/src/test/java/com/linglevel/api/content/book/repository/ChapterRepositoryImplTest.java new file mode 100644 index 0000000..4b72e07 --- /dev/null +++ b/src/test/java/com/linglevel/api/content/book/repository/ChapterRepositoryImplTest.java @@ -0,0 +1,140 @@ +package com.linglevel.api.content.book.repository; + +import com.linglevel.api.common.AbstractDatabaseTest; +import com.linglevel.api.content.book.dto.GetChaptersRequest; +import com.linglevel.api.content.book.entity.BookProgress; +import com.linglevel.api.content.book.entity.Chapter; +import com.linglevel.api.content.common.ProgressStatus; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.data.mongo.DataMongoTest; +import org.springframework.context.annotation.Import; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; + +import java.util.List; + +import static org.assertj.core.api.Assertions.assertThat; + +@DataMongoTest +@Import(ChapterRepositoryImpl.class) +class ChapterRepositoryImplTest extends AbstractDatabaseTest { + + @Autowired + private ChapterRepository chapterRepository; + + @Autowired + private BookProgressRepository bookProgressRepository; + + private static final String BOOK_ID = "book-1"; + private static final String USER_ID = "user-1"; + + @BeforeEach + void setUp() { + bookProgressRepository.deleteAll(); + chapterRepository.deleteAll(); + + chapterRepository.saveAll(List.of( + createChapter(1, "Chapter 1"), + createChapter(2, "Chapter 2"), + createChapter(3, "Chapter 3") + )); + } + + @Test + @DisplayName("진도 정보가 없으면 NOT_STARTED 필터는 모든 챕터를 반환한다") + void findChaptersWithFilters_returnsAllChaptersWhenNoProgress() { + GetChaptersRequest request = GetChaptersRequest.builder() + .progress(ProgressStatus.NOT_STARTED) + .build(); + + Page result = chapterRepository.findChaptersWithFilters(BOOK_ID, request, USER_ID, defaultPageable()); + + assertThat(result.getContent()).extracting(Chapter::getChapterNumber).containsExactly(1, 2, 3); + assertThat(result.getTotalElements()).isEqualTo(3); + } + + @Test + @DisplayName("V3 chapterProgresses 기준으로 IN_PROGRESS와 COMPLETED를 구분한다") + void findChaptersWithFilters_usesV3ChapterProgresses() { + BookProgress progress = new BookProgress(); + progress.setUserId(USER_ID); + progress.setBookId(BOOK_ID); + progress.setChapterProgresses(List.of( + BookProgress.ChapterProgressInfo.builder() + .chapterNumber(1) + .progressPercentage(100.0) + .isCompleted(true) + .build(), + BookProgress.ChapterProgressInfo.builder() + .chapterNumber(2) + .progressPercentage(50.0) + .isCompleted(false) + .build() + )); + bookProgressRepository.save(progress); + + GetChaptersRequest inProgressRequest = GetChaptersRequest.builder() + .progress(ProgressStatus.IN_PROGRESS) + .build(); + GetChaptersRequest completedRequest = GetChaptersRequest.builder() + .progress(ProgressStatus.COMPLETED) + .build(); + GetChaptersRequest notStartedRequest = GetChaptersRequest.builder() + .progress(ProgressStatus.NOT_STARTED) + .build(); + + Page inProgress = chapterRepository.findChaptersWithFilters(BOOK_ID, inProgressRequest, USER_ID, defaultPageable()); + Page completed = chapterRepository.findChaptersWithFilters(BOOK_ID, completedRequest, USER_ID, defaultPageable()); + Page notStarted = chapterRepository.findChaptersWithFilters(BOOK_ID, notStartedRequest, USER_ID, defaultPageable()); + + assertThat(inProgress.getContent()).extracting(Chapter::getChapterNumber).containsExactly(2); + assertThat(completed.getContent()).extracting(Chapter::getChapterNumber).containsExactly(1); + assertThat(notStarted.getContent()).extracting(Chapter::getChapterNumber).containsExactly(3); + } + + @Test + @DisplayName("fallback 데이터에서는 currentReadChapterNumber 기준으로 챕터 상태를 구분한다") + void findChaptersWithFilters_usesFallbackProgressData() { + BookProgress progress = new BookProgress(); + progress.setUserId(USER_ID); + progress.setBookId(BOOK_ID); + progress.setCurrentReadChapterNumber(2); + bookProgressRepository.save(progress); + + GetChaptersRequest completedRequest = GetChaptersRequest.builder() + .progress(ProgressStatus.COMPLETED) + .build(); + GetChaptersRequest inProgressRequest = GetChaptersRequest.builder() + .progress(ProgressStatus.IN_PROGRESS) + .build(); + GetChaptersRequest notStartedRequest = GetChaptersRequest.builder() + .progress(ProgressStatus.NOT_STARTED) + .build(); + + Page completed = chapterRepository.findChaptersWithFilters(BOOK_ID, completedRequest, USER_ID, defaultPageable()); + Page inProgress = chapterRepository.findChaptersWithFilters(BOOK_ID, inProgressRequest, USER_ID, defaultPageable()); + Page notStarted = chapterRepository.findChaptersWithFilters(BOOK_ID, notStartedRequest, USER_ID, defaultPageable()); + + assertThat(completed.getContent()).extracting(Chapter::getChapterNumber).containsExactly(1); + assertThat(inProgress.getContent()).extracting(Chapter::getChapterNumber).containsExactly(2); + assertThat(notStarted.getContent()).extracting(Chapter::getChapterNumber).containsExactly(3); + } + + private Pageable defaultPageable() { + return PageRequest.of(0, 10, Sort.by(Sort.Direction.ASC, "chapterNumber")); + } + + private Chapter createChapter(int chapterNumber, String title) { + Chapter chapter = new Chapter(); + chapter.setId("chapter-" + chapterNumber); + chapter.setBookId(BOOK_ID); + chapter.setChapterNumber(chapterNumber); + chapter.setTitle(title); + return chapter; + } +} diff --git a/src/test/java/com/linglevel/api/content/book/service/ChapterServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/ChapterServiceTest.java index 6837178..2495715 100644 --- a/src/test/java/com/linglevel/api/content/book/service/ChapterServiceTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/ChapterServiceTest.java @@ -2,10 +2,14 @@ import com.linglevel.api.common.dto.PageResponse; import com.linglevel.api.content.book.dto.ChapterResponse; +import com.linglevel.api.content.book.dto.ChapterNavigationResponse; +import com.linglevel.api.content.book.dto.ChunkCountByLevelDto; import com.linglevel.api.content.book.dto.GetChaptersRequest; import com.linglevel.api.content.book.entity.Book; import com.linglevel.api.content.book.entity.BookProgress; import com.linglevel.api.content.book.entity.Chapter; +import com.linglevel.api.content.book.exception.BooksErrorCode; +import com.linglevel.api.content.book.exception.BooksException; import com.linglevel.api.content.book.repository.BookProgressRepository; import com.linglevel.api.content.book.repository.BookRepository; import com.linglevel.api.content.book.repository.ChapterRepository; @@ -32,7 +36,10 @@ import java.util.Optional; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.lenient; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @ExtendWith(MockitoExtension.class) @@ -77,10 +84,10 @@ void setUp() { testBook.setChapterCount(10); testBook.setCreatedAt(Instant.now()); - when(bookService.findById(anyString())).thenReturn(testBook); + lenient().when(bookService.findById(anyString())).thenReturn(testBook); // Add stubs for the new repository methods called during refactoring - when(chunkRepository.findChunkCountsByChapterIds(anyList())).thenReturn(Collections.emptyList()); + lenient().when(chunkRepository.findChunkCountsByChapterIds(anyList())).thenReturn(Collections.emptyList()); } @Test @@ -236,6 +243,106 @@ void testNoProgress_NotStarted() { assertThat(response.getTotalCount()).isEqualTo(10); } + @Test + @DisplayName("단일 챕터 조회 시 V3 chapterProgresses 정보를 기준으로 응답을 계산한다") + void getChapter_usesV3ChapterProgressInfo() { + // given + Chapter chapter = createChapter(testBook.getId(), 2, "Chapter 2"); + + BookProgress progress = new BookProgress(); + progress.setUserId(testUser.getId()); + progress.setBookId(testBook.getId()); + progress.setCurrentDifficultyLevel(DifficultyLevel.B1); + progress.setChapterProgresses(List.of( + BookProgress.ChapterProgressInfo.builder() + .chapterNumber(2) + .progressPercentage(37.5) + .isCompleted(false) + .build() + )); + + when(chapterRepository.findById(chapter.getId())).thenReturn(Optional.of(chapter)); + when(bookProgressRepository.findByUserIdAndBookId(testUser.getId(), testBook.getId())) + .thenReturn(Optional.of(progress)); + when(chunkRepository.findChunkCountsByChapterIds(List.of(chapter.getId()))) + .thenReturn(List.of(new ChunkCountByLevelDto(chapter.getId(), DifficultyLevel.B1, 8L))); + + // when + ChapterResponse response = chapterService.getChapter(testBook.getId(), chapter.getId(), testUser.getId()); + + // then + assertThat(response.getId()).isEqualTo(chapter.getId()); + assertThat(response.getCurrentDifficultyLevel()).isEqualTo(DifficultyLevel.B1); + assertThat(response.getChunkCount()).isEqualTo(8); + assertThat(response.getProgressPercentage()).isEqualTo(37.5); + assertThat(response.getCurrentReadChunkNumber()).isEqualTo(3); + assertThat(response.getIsCompleted()).isFalse(); + } + + @Test + @DisplayName("챕터가 다른 책에 속하면 CHAPTER_NOT_FOUND_IN_BOOK 예외를 던진다") + void getChapter_throwsWhenChapterDoesNotBelongToBook() { + // given + Chapter anotherBookChapter = createChapter("another-book", 1, "Wrong Chapter"); + when(chapterRepository.findById(anotherBookChapter.getId())).thenReturn(Optional.of(anotherBookChapter)); + + // when + BooksException exception = assertThrows( + BooksException.class, + () -> chapterService.getChapter(testBook.getId(), anotherBookChapter.getId(), testUser.getId()) + ); + + // then + assertThat(exception.getMessage()).isEqualTo(BooksErrorCode.CHAPTER_NOT_FOUND_IN_BOOK.getMessage()); + } + + @Test + @DisplayName("챕터 네비게이션 조회 시 이전/다음 챕터 정보를 반환한다") + void getChapterNavigation_returnsPreviousAndNextChapter() { + // given + Chapter currentChapter = createChapter(testBook.getId(), 2, "Chapter 2"); + Chapter previousChapter = createChapter(testBook.getId(), 1, "Chapter 1"); + Chapter nextChapter = createChapter(testBook.getId(), 3, "Chapter 3"); + + when(bookService.existsById(testBook.getId())).thenReturn(true); + when(chapterRepository.findById(currentChapter.getId())).thenReturn(Optional.of(currentChapter)); + when(chapterRepository.findByBookIdAndChapterNumber(testBook.getId(), 1)).thenReturn(Optional.of(previousChapter)); + when(chapterRepository.findByBookIdAndChapterNumber(testBook.getId(), 3)).thenReturn(Optional.of(nextChapter)); + + // when + ChapterNavigationResponse response = chapterService.getChapterNavigation(testBook.getId(), currentChapter.getId()); + + // then + assertThat(response.getCurrentChapterId()).isEqualTo(currentChapter.getId()); + assertThat(response.getCurrentChapterNumber()).isEqualTo(2); + assertThat(response.getHasPreviousChapter()).isTrue(); + assertThat(response.getPreviousChapterId()).isEqualTo(previousChapter.getId()); + assertThat(response.getHasNextChapter()).isTrue(); + assertThat(response.getNextChapterId()).isEqualTo(nextChapter.getId()); + } + + @Test + @DisplayName("챕터 목록 조회 시 viewCount를 증가시킨다") + void getChapters_incrementsBookViewCount() { + // given + GetChaptersRequest request = GetChaptersRequest.builder() + .page(1) + .limit(2) + .build(); + + List chapters = createChapters(1, testBook.getId(), 1, "Chapter"); + Page chapterPage = new PageImpl<>(chapters, PageRequest.of(0, 2), 1); + + when(chapterRepository.findChaptersWithFilters(anyString(), any(), any(), any())) + .thenReturn(chapterPage); + + // when + chapterService.getChapters(testBook.getId(), request, testUser.getId()); + + // then + verify(bookRepository).incrementViewCount(testBook.getId()); + } + private List createChapters(int count, String bookId, int startNumber, String titlePrefix) { List chapters = new java.util.ArrayList<>(); for (int i = 0; i < count; i++) { @@ -254,4 +361,4 @@ private Chapter createChapter(String bookId, Integer chapterNumber, String title chapter.setReadingTime(30); return chapter; } -} \ No newline at end of file +} diff --git a/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java index 4f8b79d..fc3c5e0 100644 --- a/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java @@ -1,12 +1,16 @@ package com.linglevel.api.content.book.service; import com.linglevel.api.content.book.dto.ProgressUpdateRequest; +import com.linglevel.api.content.book.dto.ProgressResponse; import com.linglevel.api.content.book.entity.BookProgress; import com.linglevel.api.content.book.entity.Chapter; import com.linglevel.api.content.book.entity.Chunk; +import com.linglevel.api.content.book.exception.BooksErrorCode; +import com.linglevel.api.content.book.exception.BooksException; import com.linglevel.api.content.book.repository.BookProgressRepository; import com.linglevel.api.content.book.repository.ChapterRepository; import com.linglevel.api.content.book.repository.ChunkRepository; +import com.linglevel.api.content.common.DifficultyLevel; import com.linglevel.api.content.common.service.ProgressCalculationService; import com.linglevel.api.content.common.service.ReadingCompletionService; import com.linglevel.api.streak.service.StreakService; @@ -23,7 +27,10 @@ import java.util.Optional; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -114,4 +121,164 @@ void updateProgress_shouldLazyMigrate_forOldBookProgress() { assertThat(savedProgress.getChapterProgresses()).hasSize(1); assertThat(savedProgress.getChapterProgresses().get(0).getChapterNumber()).isEqualTo(1); } + + @Test + @DisplayName("진도 정보가 없으면 첫 챕터와 첫 청크 기준으로 초기 진도를 생성해 반환한다") + void getProgress_initializesProgressWhenMissing() { + // given + String userId = "user-1"; + String bookId = "book-1"; + + Chapter firstChapter = new Chapter(); + firstChapter.setId("chapter-1"); + firstChapter.setChapterNumber(1); + + Chunk firstChunk = new Chunk(); + firstChunk.setId("chunk-1"); + firstChunk.setChapterId("chapter-1"); + firstChunk.setChunkNumber(1); + firstChunk.setDifficultyLevel(DifficultyLevel.A1); + + BookProgress savedProgress = new BookProgress(); + savedProgress.setId("progress-1"); + savedProgress.setUserId(userId); + savedProgress.setBookId(bookId); + savedProgress.setChapterId("chapter-1"); + savedProgress.setChunkId("chunk-1"); + savedProgress.setCurrentReadChapterNumber(1); + savedProgress.setMaxReadChapterNumber(1); + savedProgress.setCurrentDifficultyLevel(DifficultyLevel.A1); + savedProgress.setNormalizedProgress(12.5); + savedProgress.setMaxNormalizedProgress(12.5); + + when(bookService.existsById(bookId)).thenReturn(true); + when(bookProgressRepository.findByUserIdAndBookId(userId, bookId)).thenReturn(Optional.empty()); + when(chapterService.findFirstByBookId(bookId)).thenReturn(firstChapter); + when(chunkService.findFirstByChapterId("chapter-1")).thenReturn(firstChunk); + when(chunkRepository.countByChapterIdAndDifficultyLevel("chapter-1", DifficultyLevel.A1)).thenReturn(8L); + when(progressCalculationService.calculateNormalizedProgress(1, 8L)).thenReturn(12.5); + when(bookProgressRepository.save(any(BookProgress.class))).thenReturn(savedProgress); + when(chunkService.findById("chunk-1")).thenReturn(firstChunk); + + // when + ProgressResponse response = progressService.getProgress(bookId, userId); + + // then + verify(bookProgressRepository).save(bookProgressCaptor.capture()); + BookProgress initialized = bookProgressCaptor.getValue(); + + assertThat(initialized.getUserId()).isEqualTo(userId); + assertThat(initialized.getBookId()).isEqualTo(bookId); + assertThat(initialized.getChapterId()).isEqualTo("chapter-1"); + assertThat(initialized.getChunkId()).isEqualTo("chunk-1"); + assertThat(initialized.getCurrentReadChapterNumber()).isEqualTo(1); + assertThat(initialized.getMaxReadChapterNumber()).isEqualTo(1); + assertThat(initialized.getCurrentDifficultyLevel()).isEqualTo(DifficultyLevel.A1); + + assertThat(response.getId()).isEqualTo("progress-1"); + assertThat(response.getCurrentReadChunkNumber()).isEqualTo(1); + assertThat(response.getNormalizedProgress()).isEqualTo(12.5); + assertThat(response.getStreakUpdated()).isFalse(); + } + + @Test + @DisplayName("기존 챕터 진행률이 있으면 같은 챕터 항목을 업데이트하고 중복 추가하지 않는다") + void updateProgress_updatesExistingChapterProgressEntry() { + // given + String userId = "user-1"; + String bookId = "book-1"; + String chunkId = "chunk-3"; + String chapterId = "chapter-1"; + + BookProgress progress = new BookProgress(); + progress.setId("progress-1"); + progress.setUserId(userId); + progress.setBookId(bookId); + progress.setChapterProgresses(new ArrayList<>()); + progress.getChapterProgresses().add(BookProgress.ChapterProgressInfo.builder() + .chapterNumber(1) + .progressPercentage(20.0) + .isCompleted(false) + .build()); + + Chunk chunk = new Chunk(); + chunk.setId(chunkId); + chunk.setChapterId(chapterId); + chunk.setChunkNumber(3); + chunk.setDifficultyLevel(DifficultyLevel.A1); + + Chapter chapter = new Chapter(); + chapter.setId(chapterId); + chapter.setBookId(bookId); + chapter.setChapterNumber(1); + + ProgressUpdateRequest request = new ProgressUpdateRequest(); + request.setChunkId(chunkId); + + when(bookService.existsById(bookId)).thenReturn(true); + when(chunkService.findById(chunkId)).thenReturn(chunk); + when(chapterService.findById(chapterId)).thenReturn(chapter); + when(bookProgressRepository.findByUserIdAndBookId(userId, bookId)).thenReturn(Optional.of(progress)); + when(chunkRepository.countByChapterIdAndDifficultyLevel(chapterId, DifficultyLevel.A1)).thenReturn(5L); + when(chapterRepository.countByBookId(bookId)).thenReturn(10); + when(readingCompletionService.processReadingCompletion(userId, com.linglevel.api.content.common.ContentType.BOOK, chapterId, null)) + .thenReturn(null); + + // when + ProgressResponse response = progressService.updateProgress(bookId, request, userId); + + // then + verify(bookProgressRepository).save(bookProgressCaptor.capture()); + BookProgress saved = bookProgressCaptor.getValue(); + + assertThat(saved.getChapterProgresses()).hasSize(1); + assertThat(saved.getChapterProgresses().get(0).getChapterNumber()).isEqualTo(1); + assertThat(saved.getChapterProgresses().get(0).getProgressPercentage()).isEqualTo(60.0); + assertThat(saved.getChapterProgresses().get(0).getIsCompleted()).isFalse(); + assertThat(saved.getCurrentReadChapterNumber()).isEqualTo(1); + assertThat(saved.getChunkId()).isEqualTo(chunkId); + assertThat(response.getCurrentReadChunkNumber()).isEqualTo(3); + assertThat(response.getStreakUpdated()).isFalse(); + } + + @Test + @DisplayName("deleteProgress는 기존 진도 정보를 삭제한다") + void deleteProgress_deletesExistingProgress() { + // given + String userId = "user-1"; + String bookId = "book-1"; + + BookProgress progress = new BookProgress(); + progress.setId("progress-1"); + + when(bookService.existsById(bookId)).thenReturn(true); + when(bookProgressRepository.findByUserIdAndBookId(userId, bookId)).thenReturn(Optional.of(progress)); + + // when + progressService.deleteProgress(bookId, userId); + + // then + verify(bookProgressRepository).delete(progress); + } + + @Test + @DisplayName("deleteProgress는 진도 정보가 없으면 PROGRESS_NOT_FOUND 예외를 던진다") + void deleteProgress_throwsWhenProgressMissing() { + // given + String userId = "user-1"; + String bookId = "book-1"; + + when(bookService.existsById(bookId)).thenReturn(true); + when(bookProgressRepository.findByUserIdAndBookId(userId, bookId)).thenReturn(Optional.empty()); + + // when + BooksException exception = assertThrows( + BooksException.class, + () -> progressService.deleteProgress(bookId, userId) + ); + + // then + assertThat(exception.getMessage()).isEqualTo(BooksErrorCode.PROGRESS_NOT_FOUND.getMessage()); + verify(bookProgressRepository, never()).delete(any(BookProgress.class)); + } } From 0b39a2917cbd5968f351910a85058455e6de8a99 Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 7 Apr 2026 15:23:32 +0900 Subject: [PATCH 14/28] test: cover book service edge cases --- .../content/book/service/BookServiceTest.java | 70 +++++++++++++++++++ .../book/service/ChapterServiceTest.java | 37 ++++++++++ .../ProgressServiceIntegrationTest.java | 28 ++++---- .../book/service/ProgressServiceTest.java | 63 +++++++++++++++++ 4 files changed, 186 insertions(+), 12 deletions(-) diff --git a/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java index 994d669..0210c13 100644 --- a/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java @@ -5,11 +5,14 @@ import com.linglevel.api.content.book.entity.Book; import com.linglevel.api.content.book.entity.BookProgress; import com.linglevel.api.content.book.entity.Chapter; +import com.linglevel.api.content.book.exception.BooksErrorCode; +import com.linglevel.api.content.book.exception.BooksException; import com.linglevel.api.content.book.repository.BookProgressRepository; import com.linglevel.api.content.book.repository.BookRepository; import com.linglevel.api.content.common.DifficultyLevel; import com.linglevel.api.content.common.ProgressStatus; import com.linglevel.api.content.common.TitleTranslations; +import com.linglevel.api.i18n.LanguageCode; import com.linglevel.api.s3.service.ImageResizeService; import com.linglevel.api.s3.service.S3AiService; import com.linglevel.api.s3.service.S3TransferService; @@ -35,6 +38,7 @@ import java.util.Optional; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.verify; @@ -190,6 +194,72 @@ void importBook_keepsOriginalCoverUrlWhenResizeFails() { verify(bookReadingTimeService).updateReadingTimes("saved-book-id", importData); } + @Test + @DisplayName("단일 책 조회 시 요청 언어에 맞는 번역 제목을 우선 사용한다") + void getBook_selectsTranslatedTitleByLanguage() { + // given + Book book = createBook("Original title", "Author", List.of("tag1")); + book.setTitleTranslations(new TitleTranslations("번역 제목", "Original title")); + when(bookRepository.findById(book.getId())).thenReturn(Optional.of(book)); + + // when + BookResponse response = bookService.getBook(book.getId(), testUser.getId(), LanguageCode.KO); + + // then + assertThat(response.getTitle()).isEqualTo("번역 제목"); + } + + @Test + @DisplayName("번역 제목이 비어 있으면 원본 제목으로 fallback 한다") + void getBook_fallsBackToOriginalTitleWhenTranslationMissing() { + // given + Book book = createBook("Original title", "Author", List.of("tag1")); + book.setTitleTranslations(new TitleTranslations(null, "Original title")); + when(bookRepository.findById(book.getId())).thenReturn(Optional.of(book)); + + // when + BookResponse response = bookService.getBook(book.getId(), testUser.getId(), LanguageCode.KO); + + // then + assertThat(response.getTitle()).isEqualTo("Original title"); + } + + @Test + @DisplayName("단일 책 조회 시 책이 없으면 BOOK_NOT_FOUND 예외를 던진다") + void getBook_throwsWhenBookMissing() { + // given + when(bookRepository.findById("missing-book")).thenReturn(Optional.empty()); + + // when + BooksException exception = assertThrows( + BooksException.class, + () -> bookService.getBook("missing-book", testUser.getId(), LanguageCode.EN) + ); + + // then + assertThat(exception.getMessage()).isEqualTo(BooksErrorCode.BOOK_NOT_FOUND.getMessage()); + } + + @Test + @DisplayName("지원하지 않는 sortBy 값이면 INVALID_SORT_BY 예외를 던진다") + void getBooks_throwsWhenSortByInvalid() { + // given + GetBooksRequest request = GetBooksRequest.builder() + .sortBy("unknown-sort") + .page(1) + .limit(10) + .build(); + + // when + BooksException exception = assertThrows( + BooksException.class, + () -> bookService.getBooks(request, testUser.getId()) + ); + + // then + assertThat(exception.getMessage()).isEqualTo(BooksErrorCode.INVALID_SORT_BY.getMessage()); + } + @Test @DisplayName("진도 필터링과 페이지네이션이 함께 동작할 때 - IN_PROGRESS 필터") void testProgressFilterWithPagination_InProgress() { diff --git a/src/test/java/com/linglevel/api/content/book/service/ChapterServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/ChapterServiceTest.java index 2495715..f224059 100644 --- a/src/test/java/com/linglevel/api/content/book/service/ChapterServiceTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/ChapterServiceTest.java @@ -279,6 +279,43 @@ void getChapter_usesV3ChapterProgressInfo() { assertThat(response.getIsCompleted()).isFalse(); } + @Test + @DisplayName("단일 챕터 조회 시 V3 데이터가 없으면 fallback progress 정보로 응답을 계산한다") + void getChapter_usesFallbackProgressInfoWhenV3DataMissing() { + // given + Chapter chapter = createChapter(testBook.getId(), 2, "Chapter 2"); + + BookProgress progress = new BookProgress(); + progress.setUserId(testUser.getId()); + progress.setBookId(testBook.getId()); + progress.setChunkId("progress-chunk"); + progress.setCurrentReadChapterNumber(2); + progress.setCurrentDifficultyLevel(DifficultyLevel.B1); + progress.setChapterProgresses(null); + + com.linglevel.api.content.book.entity.Chunk progressChunk = new com.linglevel.api.content.book.entity.Chunk(); + progressChunk.setId("progress-chunk"); + progressChunk.setChunkNumber(3); + + when(chapterRepository.findById(chapter.getId())).thenReturn(Optional.of(chapter)); + when(bookProgressRepository.findByUserIdAndBookId(testUser.getId(), testBook.getId())) + .thenReturn(Optional.of(progress)); + when(chunkRepository.findById("progress-chunk")).thenReturn(Optional.of(progressChunk)); + when(chunkRepository.findChunkCountsByChapterIds(List.of(chapter.getId()))) + .thenReturn(List.of(new ChunkCountByLevelDto(chapter.getId(), DifficultyLevel.B1, 8L))); + + // when + ChapterResponse response = chapterService.getChapter(testBook.getId(), chapter.getId(), testUser.getId()); + + // then + assertThat(response.getId()).isEqualTo(chapter.getId()); + assertThat(response.getCurrentDifficultyLevel()).isEqualTo(DifficultyLevel.B1); + assertThat(response.getChunkCount()).isEqualTo(8); + assertThat(response.getProgressPercentage()).isEqualTo(37.5); + assertThat(response.getCurrentReadChunkNumber()).isEqualTo(3); + assertThat(response.getIsCompleted()).isFalse(); + } + @Test @DisplayName("챕터가 다른 책에 속하면 CHAPTER_NOT_FOUND_IN_BOOK 예외를 던진다") void getChapter_throwsWhenChapterDoesNotBelongToBook() { diff --git a/src/test/java/com/linglevel/api/content/book/service/ProgressServiceIntegrationTest.java b/src/test/java/com/linglevel/api/content/book/service/ProgressServiceIntegrationTest.java index 747afe2..881d1fe 100644 --- a/src/test/java/com/linglevel/api/content/book/service/ProgressServiceIntegrationTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/ProgressServiceIntegrationTest.java @@ -107,6 +107,8 @@ void updateProgress_ChapterCompletion_CallsBothMethods() { .thenReturn(10); // 총 10개 챕터 when(readingCompletionService.processReadingCompletion(TEST_USER_ID, ContentType.BOOK, TEST_CHAPTER_ID, null)) .thenReturn(120L); + when(streakService.updateStreak(TEST_USER_ID, ContentType.BOOK, TEST_CHAPTER_ID)) + .thenReturn(true); // when progressService.updateProgress(TEST_BOOK_ID, request, TEST_USER_ID); @@ -114,12 +116,12 @@ void updateProgress_ChapterCompletion_CallsBothMethods() { // then - 세 가지 메서드가 순서대로 호출됨 verify(streakService).addStudyTime(TEST_USER_ID, 120L); verify(streakService).updateStreak(TEST_USER_ID, ContentType.BOOK, TEST_CHAPTER_ID); - verify(streakService).addCompletedContent(eq(TEST_USER_ID), eq(ContentType.BOOK), eq(TEST_CHAPTER_ID), anyBoolean()); + verify(streakService).addCompletedContent(TEST_USER_ID, ContentType.BOOK, TEST_CHAPTER_ID, true); } @Test - @DisplayName("같은 날 두 번째 챕터 완료 시 - 스트릭은 false, 완료 기록은 정상") - void updateProgress_SecondChapterSameDay_OnlyCompletionRecorded() { + @DisplayName("updateStreak가 false를 반환하면 완료 기록에도 false를 전달한다") + void updateProgress_passesFalseWhenStreakIsNotUpdated() { // given ProgressUpdateRequest request = new ProgressUpdateRequest(); request.setChunkId(TEST_CHUNK_ID); @@ -135,19 +137,21 @@ void updateProgress_SecondChapterSameDay_OnlyCompletionRecorded() { .thenReturn(10); when(readingCompletionService.processReadingCompletion(TEST_USER_ID, ContentType.BOOK, TEST_CHAPTER_ID, null)) .thenReturn(120L); + when(streakService.updateStreak(TEST_USER_ID, ContentType.BOOK, TEST_CHAPTER_ID)) + .thenReturn(false); // when progressService.updateProgress(TEST_BOOK_ID, request, TEST_USER_ID); - // then - addCompletedContent는 여전히 호출됨 + // then verify(streakService).addStudyTime(TEST_USER_ID, 120L); verify(streakService).updateStreak(TEST_USER_ID, ContentType.BOOK, TEST_CHAPTER_ID); - verify(streakService).addCompletedContent(eq(TEST_USER_ID), eq(ContentType.BOOK), eq(TEST_CHAPTER_ID), anyBoolean()); + verify(streakService).addCompletedContent(TEST_USER_ID, ContentType.BOOK, TEST_CHAPTER_ID, false); } @Test - @DisplayName("세션 유효하지 않아도 학습 시간과 완료 기록은 정상 처리") - void updateProgress_InvalidSession_StudyTimeAndCompletionStillRecorded() { + @DisplayName("마지막 청크여도 읽기 시간이 30초 미만이면 스트릭 관련 메서드를 호출하지 않는다") + void updateProgress_shortReadTime_skipsStreakUpdates() { // given ProgressUpdateRequest request = new ProgressUpdateRequest(); request.setChunkId(TEST_CHUNK_ID); @@ -162,15 +166,15 @@ void updateProgress_InvalidSession_StudyTimeAndCompletionStillRecorded() { when(chapterRepository.countByBookId(TEST_BOOK_ID)) .thenReturn(10); when(readingCompletionService.processReadingCompletion(TEST_USER_ID, ContentType.BOOK, TEST_CHAPTER_ID, null)) - .thenReturn(30L); // 짧은 시간 + .thenReturn(29L); // when progressService.updateProgress(TEST_BOOK_ID, request, TEST_USER_ID); - // then - 학습 시간과 완료 기록은 정상 처리됨 - verify(streakService).addStudyTime(TEST_USER_ID, 30L); - verify(streakService).updateStreak(TEST_USER_ID, ContentType.BOOK, TEST_CHAPTER_ID); - verify(streakService).addCompletedContent(eq(TEST_USER_ID), eq(ContentType.BOOK), eq(TEST_CHAPTER_ID), anyBoolean()); + // then + verify(streakService, never()).addStudyTime(any(), anyLong()); + verify(streakService, never()).updateStreak(any(), any(), any()); + verify(streakService, never()).addCompletedContent(any(), any(), any(), anyBoolean()); } @Test diff --git a/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java index fc3c5e0..7d7375d 100644 --- a/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java @@ -241,6 +241,69 @@ void updateProgress_updatesExistingChapterProgressEntry() { assertThat(response.getStreakUpdated()).isFalse(); } + @Test + @DisplayName("마지막 남은 챕터를 완료하면 책 전체를 완료 상태로 저장하고 streakUpdated를 반영한다") + void updateProgress_marksBookCompletedWhenLastRemainingChapterFinishes() { + // given + String userId = "user-1"; + String bookId = "book-1"; + String chunkId = "chunk-4"; + String chapterId = "chapter-2"; + + BookProgress progress = new BookProgress(); + progress.setId("progress-1"); + progress.setUserId(userId); + progress.setBookId(bookId); + progress.setIsCompleted(false); + progress.setChapterProgresses(new ArrayList<>()); + progress.getChapterProgresses().add(BookProgress.ChapterProgressInfo.builder() + .chapterNumber(1) + .progressPercentage(100.0) + .isCompleted(true) + .build()); + + Chunk chunk = new Chunk(); + chunk.setId(chunkId); + chunk.setChapterId(chapterId); + chunk.setChunkNumber(4); + chunk.setDifficultyLevel(DifficultyLevel.A1); + + Chapter chapter = new Chapter(); + chapter.setId(chapterId); + chapter.setBookId(bookId); + chapter.setChapterNumber(2); + + ProgressUpdateRequest request = new ProgressUpdateRequest(); + request.setChunkId(chunkId); + + when(bookService.existsById(bookId)).thenReturn(true); + when(chunkService.findById(chunkId)).thenReturn(chunk); + when(chapterService.findById(chapterId)).thenReturn(chapter); + when(bookProgressRepository.findByUserIdAndBookId(userId, bookId)).thenReturn(Optional.of(progress)); + when(chunkRepository.countByChapterIdAndDifficultyLevel(chapterId, DifficultyLevel.A1)).thenReturn(4L); + when(chapterRepository.countByBookId(bookId)).thenReturn(2); + when(readingCompletionService.processReadingCompletion(userId, com.linglevel.api.content.common.ContentType.BOOK, chapterId, null)) + .thenReturn(45L); + when(streakService.updateStreak(userId, com.linglevel.api.content.common.ContentType.BOOK, chapterId)) + .thenReturn(true); + + // when + ProgressResponse response = progressService.updateProgress(bookId, request, userId); + + // then + verify(bookProgressRepository).save(bookProgressCaptor.capture()); + BookProgress saved = bookProgressCaptor.getValue(); + + assertThat(saved.getChapterProgresses()).hasSize(2); + assertThat(saved.getChapterProgresses().get(1).getChapterNumber()).isEqualTo(2); + assertThat(saved.getChapterProgresses().get(1).getProgressPercentage()).isEqualTo(100.0); + assertThat(saved.getChapterProgresses().get(1).getIsCompleted()).isTrue(); + assertThat(saved.getIsCompleted()).isTrue(); + assertThat(saved.getCompletedAt()).isNotNull(); + assertThat(response.getCurrentReadChunkNumber()).isEqualTo(4); + assertThat(response.getStreakUpdated()).isTrue(); + } + @Test @DisplayName("deleteProgress는 기존 진도 정보를 삭제한다") void deleteProgress_deletesExistingProgress() { From c6d531bfba34eb06cdc1b31dbc496d5d971eb790 Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 7 Apr 2026 15:24:52 +0900 Subject: [PATCH 15/28] fix: correct k6 book response validation and ids --- k6/scripts/common/endpoints.js | 6 +- .../book/seed-books-content.mongosh.js | 55 ++++++++++++++----- 2 files changed, 44 insertions(+), 17 deletions(-) diff --git a/k6/scripts/common/endpoints.js b/k6/scripts/common/endpoints.js index f6d9fc9..9566131 100644 --- a/k6/scripts/common/endpoints.js +++ b/k6/scripts/common/endpoints.js @@ -53,7 +53,7 @@ const endpointCatalog = { query: buildBooksBaseQuery(), variant: 'default_list', }), - validate: (response) => validateArrayResponse(response, 'content'), + validate: (response) => validateArrayResponse(response, 'data'), }, 'books.progress_filter': { name: 'books.progress_filter', @@ -69,7 +69,7 @@ const endpointCatalog = { variant: `progress_${progress.toLowerCase()}`, }; }, - validate: (response) => validateArrayResponse(response, 'content'), + validate: (response) => validateArrayResponse(response, 'data'), }, 'books.pagination': { name: 'books.pagination', @@ -85,7 +85,7 @@ const endpointCatalog = { variant: `pagination_limit_${limit}`, }; }, - validate: (response) => validateArrayResponse(response, 'content'), + validate: (response) => validateArrayResponse(response, 'data'), }, }; diff --git a/k6/seed/content/book/seed-books-content.mongosh.js b/k6/seed/content/book/seed-books-content.mongosh.js index 8d151d6..92fcf21 100644 --- a/k6/seed/content/book/seed-books-content.mongosh.js +++ b/k6/seed/content/book/seed-books-content.mongosh.js @@ -77,15 +77,36 @@ function resetExistingSeed(collections, seedPrefix) { const deletedProgresses = collections.bookProgresses.deleteMany({ $or: [ + { _id: idRegex }, { id: idRegex }, { userId: idRegex }, { bookId: idRegex }, ], }).deletedCount; - const deletedChunks = collections.chunks.deleteMany({ id: idRegex }).deletedCount; - const deletedChapters = collections.chapters.deleteMany({ id: idRegex }).deletedCount; - const deletedBooks = collections.books.deleteMany({ id: idRegex }).deletedCount; - const deletedUsers = collections.users.deleteMany({ username: usernameRegex }).deletedCount; + const deletedChunks = collections.chunks.deleteMany({ + $or: [ + { _id: idRegex }, + { id: idRegex }, + ], + }).deletedCount; + const deletedChapters = collections.chapters.deleteMany({ + $or: [ + { _id: idRegex }, + { id: idRegex }, + ], + }).deletedCount; + const deletedBooks = collections.books.deleteMany({ + $or: [ + { _id: idRegex }, + { id: idRegex }, + ], + }).deletedCount; + const deletedUsers = collections.users.deleteMany({ + $or: [ + { _id: idRegex }, + { username: usernameRegex }, + ], + }).deletedCount; print(`[seed] Reset existing seed docs: users=${deletedUsers}, books=${deletedBooks}, chapters=${deletedChapters}, chunks=${deletedChunks}, bookProgresses=${deletedProgresses}`); } @@ -221,8 +242,8 @@ function upsertUsers(collection, users) { collection.bulkWrite( users.map((user) => ({ updateOne: { - filter: { username: user.username }, - update: { $set: user }, + filter: { _id: user.id }, + update: { $set: toPersistedDocument(user) }, upsert: true, }, })), @@ -234,8 +255,8 @@ function upsertBooks(collection, books) { collection.bulkWrite( books.map((book) => ({ updateOne: { - filter: { id: book.id }, - update: { $set: book }, + filter: { _id: book.id }, + update: { $set: toPersistedDocument(book) }, upsert: true, }, })), @@ -247,8 +268,8 @@ function upsertChapters(collection, chapters) { collection.bulkWrite( chapters.map((chapter) => ({ updateOne: { - filter: { id: chapter.id }, - update: { $set: chapter }, + filter: { _id: chapter.id }, + update: { $set: toPersistedDocument(chapter) }, upsert: true, }, })), @@ -265,8 +286,8 @@ function upsertChunks(collection, chunks) { collection.bulkWrite( batch.map((chunk) => ({ updateOne: { - filter: { id: chunk.id }, - update: { $set: chunk }, + filter: { _id: chunk.id }, + update: { $set: toPersistedDocument(chunk) }, upsert: true, }, })), @@ -283,8 +304,8 @@ function upsertBookProgresses(collection, bookProgresses) { collection.bulkWrite( bookProgresses.map((progress) => ({ updateOne: { - filter: { userId: progress.userId, bookId: progress.bookId }, - update: { $set: progress }, + filter: { _id: progress.id }, + update: { $set: toPersistedDocument(progress) }, upsert: true, }, })), @@ -351,6 +372,12 @@ function buildBookProgresses(config, users, bookCatalog, random) { return { bookProgresses, summaryByUser }; } +function toPersistedDocument(entity) { + const document = Object.assign({}, entity); + delete document.id; + return document; +} + function buildCompletedBookProgress(progressId, user, bookEntry, random) { const completedAgo = randomInt(random, 7, 90); const completedAt = daysAgo(completedAgo); From 28d4723e3a940b76f28a3d6c23031cc4a3a02a33 Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 7 Apr 2026 21:48:25 +0900 Subject: [PATCH 16/28] chore: align monitoring stack and simplify k6 setup --- k6/README.md | 7 ++- k6/docker-compose.yml | 18 +------ .../docker-compose.monitoring-local.yml | 54 ++++++++++++++++--- monitoring/docker-compose.monitoring-prod.yml | 31 +++++++++-- monitoring/loki-config.yml | 6 ++- monitoring/prometheus.yml | 15 ++++++ monitoring/promtail-config.yml | 4 +- 7 files changed, 103 insertions(+), 32 deletions(-) create mode 100644 monitoring/prometheus.yml diff --git a/k6/README.md b/k6/README.md index f62b6c2..141a82a 100644 --- a/k6/README.md +++ b/k6/README.md @@ -30,6 +30,7 @@ k6/ - 테스트용 사용자는 `X-Test-Username` 으로 인증 가능해야 한다. - 시드 생성은 [README.md](/Users/solfe/Desktop/WORK/llv/llv-api/k6/seed/README.md)를 따른다. - 기본 `BASE_URL` 은 `http://host.docker.internal:8080` 이다. +- 시계열 결과 저장이 필요하면 먼저 `influxdb` 컨테이너를 실행한다. ## 기본 엔드포인트 이름 @@ -41,6 +42,10 @@ k6/ ## 실행 예시 +```bash +docker compose -f k6/docker-compose.yml up -d influxdb +``` + ### Baseline 낮은 부하로 기준 응답시간과 기본 안정성을 확인한다. @@ -121,4 +126,4 @@ docker compose -f k6/docker-compose.yml run --rm \ - 콘솔 실시간 출력 - `/reports` 아래 JSON 결과 -- 필요하면 Grafana / InfluxDB 연동 +- 필요하면 InfluxDB 데이터를 monitoring Grafana에서 조회 diff --git a/k6/docker-compose.yml b/k6/docker-compose.yml index 12f64d8..508db60 100644 --- a/k6/docker-compose.yml +++ b/k6/docker-compose.yml @@ -10,18 +10,6 @@ services: volumes: - influxdb-data:/var/lib/influxdb - grafana: - image: grafana/grafana:latest - ports: - - "3000:3000" - environment: - - GF_SECURITY_ADMIN_PASSWORD=admin - - GF_SECURITY_ADMIN_USER=admin - volumes: - - grafana-data:/var/lib/grafana - depends_on: - - influxdb - k6: image: grafana/k6:latest volumes: @@ -35,9 +23,7 @@ services: volumes: influxdb-data: - grafana-data: # 사용법: -# docker-compose up -d influxdb grafana # 모니터링 스택 시작 -# docker-compose run --rm k6 run /scripts/smoke-test.js # 테스트 실행 -# Grafana: http://localhost:3000 (admin/admin) \ No newline at end of file +# docker compose -f k6/docker-compose.yml up -d influxdb # k6 결과 저장소 시작 +# docker compose -f k6/docker-compose.yml run --rm k6 run /scripts/baseline.js diff --git a/monitoring/docker-compose.monitoring-local.yml b/monitoring/docker-compose.monitoring-local.yml index 839cefe..78ee96c 100644 --- a/monitoring/docker-compose.monitoring-local.yml +++ b/monitoring/docker-compose.monitoring-local.yml @@ -3,24 +3,64 @@ services: image: prom/prometheus:latest container_name: prometheus-local ports: - - "9090:9090" + - "${PROMETHEUS_PORT:-9090}:9090" env_file: - - path: .env.monitoring.local + - ../.env.local volumes: - - ./prometheus-local.yml:/etc/prometheus/prometheus.yml + - ./prometheus.yml:/etc/prometheus/prometheus.yml.tmpl:ro + - ./alert-rules.yml:/etc/prometheus/alert-rules.yml:ro - ./data/prometheus-data-local:/prometheus + entrypoint: /bin/sh + command: + - -ec + - | + TARGET="$${APP_METRICS_TARGET:-host.docker.internal:8080}"; + KEY="$${IMPORT_API_KEY:-}"; + ESC_TARGET="$$(printf '%s' "$$TARGET" | sed 's/[&|]/\\&/g')"; + ESC_KEY="$$(printf '%s' "$$KEY" | sed 's/[&|]/\\&/g')"; + sed -e "s|__APP_METRICS_TARGET__|$${ESC_TARGET}|g" \ + -e "s|__IMPORT_API_KEY__|$${ESC_KEY}|g" \ + /etc/prometheus/prometheus.yml.tmpl > /tmp/prometheus.yml; + exec prometheus --config.file=/tmp/prometheus.yml grafana: image: grafana/grafana:latest container_name: grafana-local ports: - - "3000:3000" - env_file: - - path: .env.monitoring.local + - "${MONITORING_GRAFANA_PORT:-3000}:3000" environment: - GF_SECURITY_ADMIN_USER=admin - GF_SECURITY_ADMIN_PASSWORD=password volumes: - ./data/grafana-data-local:/var/lib/grafana depends_on: - - prometheus \ No newline at end of file + - prometheus + + loki: + profiles: ["logs"] + image: grafana/loki:latest + container_name: loki-local + ports: + - "${LOKI_PORT:-3100}:3100" + volumes: + - ./loki-config.yml:/etc/loki/local-config.yaml:ro + - ./data/loki-data-local:/loki + command: + - "-config.file=/etc/loki/local-config.yaml" + + promtail: + profiles: ["logs"] + image: grafana/promtail:latest + container_name: promtail-local + environment: + - LOKI_PUSH_URL=http://loki:3100/loki/api/v1/push + volumes: + - ./promtail-config.yml:/etc/promtail/config.yml:ro + - /var/log:/var/log:ro + - /var/lib/docker/containers:/var/lib/docker/containers:ro + - /var/run/docker.sock:/var/run/docker.sock + command: + - "-config.file=/etc/promtail/config.yml" + - "-config.expand-env=true" + depends_on: + - loki diff --git a/monitoring/docker-compose.monitoring-prod.yml b/monitoring/docker-compose.monitoring-prod.yml index 9ccf67b..f221ead 100644 --- a/monitoring/docker-compose.monitoring-prod.yml +++ b/monitoring/docker-compose.monitoring-prod.yml @@ -11,10 +11,23 @@ services: - "9090" env_file: - path: .env.monitoring.prod + required: false volumes: - - ./prometheus-prod.yml:/etc/prometheus/prometheus.yml - - ./alert-rules.yml:/etc/prometheus/alert-rules.yml + - ./prometheus.yml:/etc/prometheus/prometheus.yml.tmpl:ro + - ./alert-rules.yml:/etc/prometheus/alert-rules.yml:ro - ./data/prometheus-data-prod:/prometheus + entrypoint: /bin/sh + command: + - -ec + - | + TARGET="$${APP_METRICS_TARGET:-app:8080}"; + KEY="$${IMPORT_API_KEY:-}"; + ESC_TARGET="$$(printf '%s' "$$TARGET" | sed 's/[&|]/\\&/g')"; + ESC_KEY="$$(printf '%s' "$$KEY" | sed 's/[&|]/\\&/g')"; + sed -e "s|__APP_METRICS_TARGET__|$${ESC_TARGET}|g" \ + -e "s|__IMPORT_API_KEY__|$${ESC_KEY}|g" \ + /etc/prometheus/prometheus.yml.tmpl > /tmp/prometheus.yml; + exec prometheus --config.file=/tmp/prometheus.yml alertmanager: image: prom/alertmanager:latest @@ -23,6 +36,7 @@ services: - "9093" env_file: - path: .env.monitoring.prod + required: false volumes: - ./alertmanager.yml:/etc/alertmanager/alertmanager.yml - ./data/alertmanager-data-prod:/alertmanager @@ -34,6 +48,7 @@ services: - "3000" env_file: - path: .env.monitoring.prod + required: false environment: - GF_SECURITY_ADMIN_USER=admin - GF_SECURITY_ADMIN_PASSWORD=password @@ -50,6 +65,7 @@ services: - "3100" env_file: - path: .env.monitoring.prod + required: false volumes: - ./loki-config.yml:/etc/loki/local-config.yaml - ./data/loki-data-prod:/loki @@ -60,11 +76,16 @@ services: container_name: promtail-prod env_file: - path: .env.monitoring.prod + required: false + environment: + - LOKI_PUSH_URL=http://loki:3100/loki/api/v1/push volumes: - - ./promtail-config.yml:/etc/promtail/config.yml + - ./promtail-config.yml:/etc/promtail/config.yml:ro - /var/log:/var/log:ro - /var/lib/docker/containers:/var/lib/docker/containers:ro - /var/run/docker.sock:/var/run/docker.sock - command: -config.file=/etc/promtail/config.yml + command: + - "-config.file=/etc/promtail/config.yml" + - "-config.expand-env=true" depends_on: - - loki \ No newline at end of file + - loki diff --git a/monitoring/loki-config.yml b/monitoring/loki-config.yml index a6e8d69..6862275 100644 --- a/monitoring/loki-config.yml +++ b/monitoring/loki-config.yml @@ -22,4 +22,8 @@ schema_config: schema: v13 index: prefix: index_ - period: 24h \ No newline at end of file + period: 24h + +limits_config: + ingestion_rate_mb: 16 + ingestion_burst_size_mb: 32 diff --git a/monitoring/prometheus.yml b/monitoring/prometheus.yml new file mode 100644 index 0000000..8c25c55 --- /dev/null +++ b/monitoring/prometheus.yml @@ -0,0 +1,15 @@ +global: + scrape_interval: 15s + evaluation_interval: 15s + +rule_files: + - /etc/prometheus/alert-rules.yml + +scrape_configs: + - job_name: "llv-api" + metrics_path: /actuator/prometheus + static_configs: + - targets: ["__APP_METRICS_TARGET__"] + authorization: + type: llvk + credentials: "__IMPORT_API_KEY__" diff --git a/monitoring/promtail-config.yml b/monitoring/promtail-config.yml index ff0367f..5b8d8c2 100644 --- a/monitoring/promtail-config.yml +++ b/monitoring/promtail-config.yml @@ -6,7 +6,7 @@ positions: filename: /tmp/positions.yaml clients: - - url: http://loki-prod:3100/loki/api/v1/push + - url: ${LOKI_PUSH_URL} scrape_configs: # Docker containers logs @@ -76,4 +76,4 @@ scrape_configs: source: timestamp - labels: level: - logger: \ No newline at end of file + logger: From d00422da455cdf0c9f26374b9489c2639529b26d Mon Sep 17 00:00:00 2001 From: solfe Date: Wed, 8 Apr 2026 16:45:42 +0900 Subject: [PATCH 17/28] docs: add glossary and concise decision log to architecture docs --- docs/architecture/README.md | 2 ++ docs/architecture/content-book.md | 17 +++++++++++++++++ docs/architecture/overview.md | 15 +++++++++++++++ docs/architecture/streak.md | 15 +++++++++++++++ docs/architecture/word.md | 15 +++++++++++++++ docs/templates/architecture-template.md | 24 ++++++++++++++++++++++++ 6 files changed, 88 insertions(+) diff --git a/docs/architecture/README.md b/docs/architecture/README.md index aa0e40b..157619c 100644 --- a/docs/architecture/README.md +++ b/docs/architecture/README.md @@ -6,6 +6,8 @@ - 목적은 현재 구조를 빠르게 이해하는 것이다. - 문서는 책임, 외부 의존성, 핵심 기능 흐름만 남긴다. +- 도메인별로 `핵심 용어 사전(용어/정의)`을 유지해 의미 흔들림을 줄인다. +- 도메인 의미에 영향을 주는 선택은 `간결 의사결정 기록`에 누적한다. - 핵심 기능은 중요도와 이해 난이도를 기준으로 2~3개만 고른다. - 도메인 내부 구현 상세나 세부 설계 판단은 `MISSIONS.md` 또는 `docs/decisions`에서 다룬다. diff --git a/docs/architecture/content-book.md b/docs/architecture/content-book.md index 5160b61..ba2c86e 100644 --- a/docs/architecture/content-book.md +++ b/docs/architecture/content-book.md @@ -10,6 +10,16 @@ - 챕터는 여러 개의 청크로 구성된다. - 청크는 텍스트, 이미지 등의 타입을 가질 수 있다. +## 핵심 용어 사전 + +| 용어 | 정의 | +| --- | --- | +| Book | 여러 챕터를 포함하는 최상위 학습 콘텐츠 | +| Chapter | Book 내부의 순차 학습 단위 | +| Chunk | Chapter 내부의 세부 학습 단위(텍스트/이미지 등) | +| BookProgress | 사용자별 책 학습 상태를 저장하는 엔티티 | +| normalizedProgress | 도메인에서 정의한 기준으로 정규화한 진행률 값 | + ## 외부 시스템 의존성 - MongoDB: 책, 챕터, 청크, 진행도 저장 @@ -104,6 +114,13 @@ sequenceDiagram 2. 진행도 업데이트는 `book`, `chapter`, `chunk`, `streak`를 함께 이해해야 한다. 3. import는 운영 기능이지만 파일 저장과 후처리가 함께 묶여 있어 읽기 진입점으로 가치가 있다. +## 간결 의사결정 기록 + +| 날짜 | 결정 | 이유 | 영향 범위 | 상태 | +| --- | --- | --- | --- | --- | +| 2026-04-08 | Book 도메인은 책/챕터/청크 3계층 구조를 문서 표준으로 유지 | 기능 확장 시 공통 읽기 모델을 보존하기 위함 | BookService, ChapterService, ProgressService | 유지 | +| 2026-04-08 | 진행도 갱신과 스트릭 연동 흐름을 핵심 시나리오로 고정 | 교차 도메인 영향이 가장 큰 지점이기 때문 | ProgressService, StreakService | 유지 | + ## 참고 코드 - `src/main/java/com/linglevel/api/content/book/service/BookService.java` diff --git a/docs/architecture/overview.md b/docs/architecture/overview.md index 0b5a4ab..de06ada 100644 --- a/docs/architecture/overview.md +++ b/docs/architecture/overview.md @@ -6,6 +6,15 @@ - 책 읽기, 단어 조회, 스트릭 유지 같은 핵심 학습 기능을 한 애플리케이션에서 처리한다. - 추천, 알림, 크롤링, 파일 처리 같은 보조 기능도 함께 운영한다. +## 핵심 용어 사전 + +| 용어 | 정의 | +| --- | --- | +| 학습 콘텐츠 | 사용자가 소비하는 책/아티클/커스텀 학습 단위 | +| 진행도 | 사용자의 현재 학습 위치를 수치화한 상태 | +| 스트릭 | 사용자 학습 연속성(일 단위 유지 상태) | +| 보조 기능 | 추천/알림/크롤링처럼 핵심 학습 흐름을 지원하는 기능 | + ## 외부 시스템 의존성 - MongoDB: 주요 도메인 데이터와 로그 저장 @@ -74,3 +83,9 @@ flowchart TD 1. 실제 사용자 요청이 자주 통과하는 기능이다. 2. 외부 의존성이나 도메인 결합이 있어 이해 난이도가 높다. 3. 리팩터링이나 성능 개선 시 영향 범위가 큰 영역이다. + +## 간결 의사결정 기록 + +| 날짜 | 결정 | 이유 | 영향 범위 | 상태 | +| --- | --- | --- | --- | --- | +| 2026-04-08 | 시스템 문서는 미니맵 중심으로 유지 | 전체 구조를 빠르게 파악하기 위한 목적 우선 | docs/architecture/* | 유지 | diff --git a/docs/architecture/streak.md b/docs/architecture/streak.md index 468d40c..3f98a03 100644 --- a/docs/architecture/streak.md +++ b/docs/architecture/streak.md @@ -13,6 +13,15 @@ - 읽기 세션은 Redis에 짧게 저장되고, 확정된 상태는 MongoDB에 반영된다. - 프리즈와 보상 기록은 별도 트랜잭션/이력 데이터로 관리된다. +## 핵심 용어 사전 + +| 용어 | 정의 | +| --- | --- | +| 스트릭 | 학습 완료를 일 단위로 누적한 연속 기록 | +| 읽기 세션 | 학습 시작 시점부터 종료/완료 처리 전까지의 임시 상태 | +| 프리즈 | 스트릭을 하루 보호하는 소모성 보호 자원 | +| 완료 기록 | 특정 날짜 학습 완료 여부를 확정한 데이터 | + ## 외부 시스템 의존성 - MongoDB: 누적 리포트와 완료 기록 저장 @@ -78,6 +87,12 @@ sequenceDiagram 3. Redis, MongoDB, FCM이 함께 등장해 의존성 파악 가치가 크다. 4. 세션, 누적 상태, 스케줄러가 모두 연결돼 있어 처음 읽는 난이도가 높다. +## 간결 의사결정 기록 + +| 날짜 | 결정 | 이유 | 영향 범위 | 상태 | +| --- | --- | --- | --- | --- | +| 2026-04-08 | 세션 상태는 Redis, 확정 상태는 MongoDB로 분리 | 짧은 상태와 영속 상태의 책임을 분리해 운영 단순화 | ReadingSessionService, StreakService | 유지 | + ## 참고 코드 - `src/main/java/com/linglevel/api/streak/service/StreakService.java` diff --git a/docs/architecture/word.md b/docs/architecture/word.md index e719ec0..c71f42e 100644 --- a/docs/architecture/word.md +++ b/docs/architecture/word.md @@ -13,6 +13,15 @@ - 입력 단어와 원형 단어의 연결은 `WordVariant`로 관리된다. - 반복 실패 단어는 `InvalidWord`에 저장해 재시도를 줄인다. +## 핵심 용어 사전 + +| 용어 | 정의 | +| --- | --- | +| 원형 단어 | 실제 사전/학습 기준이 되는 canonical 단어 | +| 입력 단어 | 사용자가 검색한 원문 입력값 | +| variant | 입력 단어와 원형 단어를 연결하는 매핑 엔티티 | +| invalid word | 반복 실패 단어를 차단하기 위한 캐시 데이터 | + ## 외부 시스템 의존성 - MongoDB: 단어 본문, variant, invalid cache 저장 @@ -85,6 +94,12 @@ sequenceDiagram 3. 외부 AI 의존성이 있어 실패 경로까지 같이 파악해야 한다. 4. variant와 invalid cache가 조회 흐름 초반에 분기점 역할을 한다. +## 간결 의사결정 기록 + +| 날짜 | 결정 | 이유 | 영향 범위 | 상태 | +| --- | --- | --- | --- | --- | +| 2026-04-08 | `Word/Variant/Invalid` 3분할 구조 유지 | 조회 성능, 정합성, 실패 재시도 제어를 분리하기 위함 | WordService, 관련 Repository | 유지 | + ## 참고 코드 - `src/main/java/com/linglevel/api/word/service/WordService.java` diff --git a/docs/templates/architecture-template.md b/docs/templates/architecture-template.md index c730c4d..d1f261c 100644 --- a/docs/templates/architecture-template.md +++ b/docs/templates/architecture-template.md @@ -16,6 +16,19 @@ - 1:N 관계나 상위/하위 개념이 있다면 여기서 먼저 정리한다 - 읽는 사람이 도메인 내부 shape를 빠르게 잡을 수 있으면 충분하다 +## 핵심 용어 사전 + +용어 혼동으로 의미가 흔들리지 않도록, 최소한의 도메인 용어만 관리한다. +각 항목은 `용어`와 `정의`만 유지한다. + +예시: + +| 용어 | 정의 | +| --- | --- | +| 진행률 | 사용자의 현재 학습 위치를 퍼센트로 표현한 값 | +| 완료 | 도메인이 정의한 완료 조건을 만족한 상태 | +| 세션 | 특정 사용자 학습 행위의 추적 단위 | + ## 외부 시스템 의존성 - 사용하는 저장소, 캐시, 메시징, 외부 API @@ -93,6 +106,17 @@ flowchart TD 4. 어떤 저장소나 외부 시스템과 결합되는가 5. 왜 중요하거나 복잡한가 +## 의사결정 기록 + +아키텍처/도메인 의미에 영향을 주는 결정은 이 문서에 간결하게 남긴다. +복잡한 별도 문서로 분리하기 전에, 아래 형식으로 먼저 누적한다. + +| 날짜 | 결정 | 이유 | 영향 범위 | 상태 | +| --- | --- | --- | --- | --- | +| 2026-04-08 | 진행률 계산 기준을 chapter completion으로 통일 | API 간 의미 불일치 제거 | ProgressService, BookService | 유지 | + +상태 예시: `유지`, `대체 예정`, `폐기` + ## 참고 코드 - 관련 패키지 또는 파일 경로 From 0ce9478a65643ae221e4de2ab2680b9975e703b1 Mon Sep 17 00:00:00 2001 From: solfe Date: Wed, 8 Apr 2026 18:13:39 +0900 Subject: [PATCH 18/28] docs(book): define chapter-first progress ordering invariant --- docs/architecture/content-book.md | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/docs/architecture/content-book.md b/docs/architecture/content-book.md index ba2c86e..2855bc1 100644 --- a/docs/architecture/content-book.md +++ b/docs/architecture/content-book.md @@ -18,7 +18,17 @@ | Chapter | Book 내부의 순차 학습 단위 | | Chunk | Chapter 내부의 세부 학습 단위(텍스트/이미지 등) | | BookProgress | 사용자별 책 학습 상태를 저장하는 엔티티 | -| normalizedProgress | 도메인에서 정의한 기준으로 정규화한 진행률 값 | +| normalizedProgress | 완료된 챕터 수를 기준으로 계산한 책 진행률(%) | +| maxReadChunkNumber | 챕터/청크 조합 위치를 전역 순서값으로 환산한 최대 도달 지표 | + +## 핵심 불변식 + +1. 책 진행률(`normalizedProgress`)은 챕터 완료 기반으로 계산한다. +2. 상태 분류(`NOT_STARTED`, `IN_PROGRESS`, `COMPLETED`)는 `normalizedProgress`와 `isCompleted`만으로 판정한다. +3. `maxReadChunkNumber`는 챕터 우선 정렬 기준으로 계산한다. 비교 순서는 `(chapterNumber, chunkNumber)`이며, chapter가 더 크면 항상 더 큰 진행 위치로 본다. +4. `GET /progress`는 학습 상태를 변경하지 않는다. (progress 문서 생성/수정 금지, 검증 중) +5. GET API의 부작용은 분석 목적(`viewCount`, 읽기 세션 시작)으로만 허용한다. +6. 진행도 모델은 V3 단일 경로를 목표로 하며, fallback은 검증 완료 후 제거한다. ## 외부 시스템 의존성 @@ -120,6 +130,17 @@ sequenceDiagram | --- | --- | --- | --- | --- | | 2026-04-08 | Book 도메인은 책/챕터/청크 3계층 구조를 문서 표준으로 유지 | 기능 확장 시 공통 읽기 모델을 보존하기 위함 | BookService, ChapterService, ProgressService | 유지 | | 2026-04-08 | 진행도 갱신과 스트릭 연동 흐름을 핵심 시나리오로 고정 | 교차 도메인 영향이 가장 큰 지점이기 때문 | ProgressService, StreakService | 유지 | +| 2026-04-08 | `normalizedProgress`를 챕터 완료 기반으로 통일 | 청크만으로는 책 진행 의미를 안정적으로 표현하기 어려움 | ProgressService, BookService | 유지 | +| 2026-04-08 | 상태 판정은 `normalizedProgress` + `isCompleted`로 단일화 | 상태 분류 기준 다중화로 인한 의미 흔들림 방지 | BookRepositoryImpl, BookService, ProgressService | 유지 | +| 2026-04-08 | `maxReadChunkNumber`를 정식 필드로 유지하고 챕터 우선 `(chapterNumber, chunkNumber)` 순서로 계산 | 챕터 경계를 보존한 진행 위치 비교를 위해서 | BookProgress, ProgressService, DTO | 유지 | +| 2026-04-08 | `GET /progress`에서 progress 미존재 시 0% 조회 정책 검증 | 조회 API의 상태 변경 부작용 제거 필요 | BooksProgressController, ProgressService | 검증 중 | +| 2026-04-08 | GET API는 분석성 부작용만 허용 | 사용자 학습 상태와 운영 지표를 분리하기 위함 | ChapterService, ReadingSessionService | 유지 | +| 2026-04-08 | V3 단일화 전환 가능성 사전 검증 후 fallback 제거 | 데이터 호환성 리스크를 통제하기 위함 | ProgressService, ChapterService, ChapterRepositoryImpl | 검증 중 | + +## 검증 필요 항목 + +- `GET /progress`에서 progress 문서가 없는 경우에도 DB 변경 없이 0% 응답 가능한지 확인 +- V3 단일화 시 기존 데이터/조회 경로에서 fallback 제거해도 회귀가 없는지 확인 ## 참고 코드 From 5f4ea73cc0aab5963dff447dc82fa781280d6b02 Mon Sep 17 00:00:00 2001 From: solfe Date: Thu, 9 Apr 2026 00:31:39 +0900 Subject: [PATCH 19/28] fix(book-progress): initialize normalized progress from chapter-completion baseline --- .../content/book/service/ProgressService.java | 17 ++++------------- .../book/service/ProgressServiceTest.java | 8 +++----- 2 files changed, 7 insertions(+), 18 deletions(-) diff --git a/src/main/java/com/linglevel/api/content/book/service/ProgressService.java b/src/main/java/com/linglevel/api/content/book/service/ProgressService.java index 9b88d27..c31a00b 100644 --- a/src/main/java/com/linglevel/api/content/book/service/ProgressService.java +++ b/src/main/java/com/linglevel/api/content/book/service/ProgressService.java @@ -11,7 +11,6 @@ import com.linglevel.api.content.book.repository.ChapterRepository; import com.linglevel.api.content.book.repository.ChunkRepository; import com.linglevel.api.content.common.ContentType; -import com.linglevel.api.content.common.service.ProgressCalculationService; import com.linglevel.api.content.common.service.ReadingCompletionService; import com.linglevel.api.streak.service.StreakService; import lombok.RequiredArgsConstructor; @@ -31,7 +30,6 @@ public class ProgressService { private final ChunkService chunkService; private final BookProgressRepository bookProgressRepository; private final ChunkRepository chunkRepository; - private final ProgressCalculationService progressCalculationService; private final ReadingCompletionService readingCompletionService; private final StreakService streakService; private final ChapterRepository chapterRepository; @@ -268,16 +266,9 @@ private BookProgress initializeProgress(String userId, String bookId) { newProgress.setCurrentReadChapterNumber(firstChapter.getChapterNumber()); newProgress.setMaxReadChapterNumber(firstChapter.getChapterNumber()); - // [V2_CORE] V2 필드: 초기 진행률 계산 - long totalChunks = chunkRepository.countByChapterIdAndDifficultyLevel( - firstChapter.getId(), firstChunk.getDifficultyLevel() - ); - double initialProgress = progressCalculationService.calculateNormalizedProgress( - firstChunk.getChunkNumber(), totalChunks - ); - - newProgress.setNormalizedProgress(initialProgress); - newProgress.setMaxNormalizedProgress(initialProgress); + // 초기 상태는 완료 챕터가 없으므로 진행률을 0%로 시작한다. + newProgress.setNormalizedProgress(0.0); + newProgress.setMaxNormalizedProgress(0.0); newProgress.setCurrentDifficultyLevel(firstChunk.getDifficultyLevel()); return bookProgressRepository.save(newProgress); @@ -328,4 +319,4 @@ private ProgressResponse convertToProgressResponse(BookProgress progress, boolea .updatedAt(progress.getUpdatedAt()) .build(); } -} \ No newline at end of file +} diff --git a/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java index 7d7375d..a25e187 100644 --- a/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java @@ -148,15 +148,13 @@ void getProgress_initializesProgressWhenMissing() { savedProgress.setCurrentReadChapterNumber(1); savedProgress.setMaxReadChapterNumber(1); savedProgress.setCurrentDifficultyLevel(DifficultyLevel.A1); - savedProgress.setNormalizedProgress(12.5); - savedProgress.setMaxNormalizedProgress(12.5); + savedProgress.setNormalizedProgress(0.0); + savedProgress.setMaxNormalizedProgress(0.0); when(bookService.existsById(bookId)).thenReturn(true); when(bookProgressRepository.findByUserIdAndBookId(userId, bookId)).thenReturn(Optional.empty()); when(chapterService.findFirstByBookId(bookId)).thenReturn(firstChapter); when(chunkService.findFirstByChapterId("chapter-1")).thenReturn(firstChunk); - when(chunkRepository.countByChapterIdAndDifficultyLevel("chapter-1", DifficultyLevel.A1)).thenReturn(8L); - when(progressCalculationService.calculateNormalizedProgress(1, 8L)).thenReturn(12.5); when(bookProgressRepository.save(any(BookProgress.class))).thenReturn(savedProgress); when(chunkService.findById("chunk-1")).thenReturn(firstChunk); @@ -177,7 +175,7 @@ void getProgress_initializesProgressWhenMissing() { assertThat(response.getId()).isEqualTo("progress-1"); assertThat(response.getCurrentReadChunkNumber()).isEqualTo(1); - assertThat(response.getNormalizedProgress()).isEqualTo(12.5); + assertThat(response.getNormalizedProgress()).isEqualTo(0.0); assertThat(response.getStreakUpdated()).isFalse(); } From ed4a259bd9b7600b0bff2ebe36207e2a1570ba36 Mon Sep 17 00:00:00 2001 From: solfe Date: Thu, 9 Apr 2026 00:45:35 +0900 Subject: [PATCH 20/28] fix(book-progress): return zero-state on missing progress without write --- .../content/book/service/ProgressService.java | 44 ++++++-------- .../book/service/ProgressServiceTest.java | 60 +++++-------------- 2 files changed, 34 insertions(+), 70 deletions(-) diff --git a/src/main/java/com/linglevel/api/content/book/service/ProgressService.java b/src/main/java/com/linglevel/api/content/book/service/ProgressService.java index c31a00b..6d07d18 100644 --- a/src/main/java/com/linglevel/api/content/book/service/ProgressService.java +++ b/src/main/java/com/linglevel/api/content/book/service/ProgressService.java @@ -248,30 +248,9 @@ public ProgressResponse getProgress(String bookId, String userId) { throw new BooksException(BooksErrorCode.BOOK_NOT_FOUND); } - BookProgress bookProgress = bookProgressRepository.findByUserIdAndBookId(userId, bookId) - .orElseGet(() -> initializeProgress(userId, bookId)); - - return convertToProgressResponse(bookProgress, false); - } - - private BookProgress initializeProgress(String userId, String bookId) { - Chapter firstChapter = chapterService.findFirstByBookId(bookId); - Chunk firstChunk = chunkService.findFirstByChapterId(firstChapter.getId()); - - BookProgress newProgress = new BookProgress(); - newProgress.setUserId(userId); - newProgress.setBookId(bookId); - newProgress.setChapterId(firstChapter.getId()); - newProgress.setChunkId(firstChunk.getId()); - newProgress.setCurrentReadChapterNumber(firstChapter.getChapterNumber()); - newProgress.setMaxReadChapterNumber(firstChapter.getChapterNumber()); - - // 초기 상태는 완료 챕터가 없으므로 진행률을 0%로 시작한다. - newProgress.setNormalizedProgress(0.0); - newProgress.setMaxNormalizedProgress(0.0); - newProgress.setCurrentDifficultyLevel(firstChunk.getDifficultyLevel()); - - return bookProgressRepository.save(newProgress); + return bookProgressRepository.findByUserIdAndBookId(userId, bookId) + .map(progress -> convertToProgressResponse(progress, false)) + .orElseGet(() -> createNotStartedProgressResponse(userId, bookId)); } @Transactional @@ -319,4 +298,19 @@ private ProgressResponse convertToProgressResponse(BookProgress progress, boolea .updatedAt(progress.getUpdatedAt()) .build(); } -} + + private ProgressResponse createNotStartedProgressResponse(String userId, String bookId) { + return ProgressResponse.builder() + .userId(userId) + .bookId(bookId) + .currentReadChapterNumber(0) + .currentReadChunkNumber(0) + .maxReadChapterNumber(0) + .maxReadChunkNumber(0) + .isCompleted(false) + .normalizedProgress(0.0) + .maxNormalizedProgress(0.0) + .streakUpdated(false) + .build(); + } +} diff --git a/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java index a25e187..1f4fda2 100644 --- a/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java @@ -11,7 +11,6 @@ import com.linglevel.api.content.book.repository.ChapterRepository; import com.linglevel.api.content.book.repository.ChunkRepository; import com.linglevel.api.content.common.DifficultyLevel; -import com.linglevel.api.content.common.service.ProgressCalculationService; import com.linglevel.api.content.common.service.ReadingCompletionService; import com.linglevel.api.streak.service.StreakService; import org.junit.jupiter.api.DisplayName; @@ -32,6 +31,7 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.when; @ExtendWith(MockitoExtension.class) @@ -55,9 +55,6 @@ class ProgressServiceTest { @Mock private ChunkService chunkService; - @Mock - private ProgressCalculationService progressCalculationService; - @Mock private ReadingCompletionService readingCompletionService; @@ -123,59 +120,32 @@ void updateProgress_shouldLazyMigrate_forOldBookProgress() { } @Test - @DisplayName("진도 정보가 없으면 첫 챕터와 첫 청크 기준으로 초기 진도를 생성해 반환한다") - void getProgress_initializesProgressWhenMissing() { + @DisplayName("진도 정보가 없으면 문서를 생성하지 않고 0% 진도를 반환한다") + void getProgress_returnsZeroProgressWhenMissing() { // given String userId = "user-1"; String bookId = "book-1"; - Chapter firstChapter = new Chapter(); - firstChapter.setId("chapter-1"); - firstChapter.setChapterNumber(1); - - Chunk firstChunk = new Chunk(); - firstChunk.setId("chunk-1"); - firstChunk.setChapterId("chapter-1"); - firstChunk.setChunkNumber(1); - firstChunk.setDifficultyLevel(DifficultyLevel.A1); - - BookProgress savedProgress = new BookProgress(); - savedProgress.setId("progress-1"); - savedProgress.setUserId(userId); - savedProgress.setBookId(bookId); - savedProgress.setChapterId("chapter-1"); - savedProgress.setChunkId("chunk-1"); - savedProgress.setCurrentReadChapterNumber(1); - savedProgress.setMaxReadChapterNumber(1); - savedProgress.setCurrentDifficultyLevel(DifficultyLevel.A1); - savedProgress.setNormalizedProgress(0.0); - savedProgress.setMaxNormalizedProgress(0.0); - when(bookService.existsById(bookId)).thenReturn(true); when(bookProgressRepository.findByUserIdAndBookId(userId, bookId)).thenReturn(Optional.empty()); - when(chapterService.findFirstByBookId(bookId)).thenReturn(firstChapter); - when(chunkService.findFirstByChapterId("chapter-1")).thenReturn(firstChunk); - when(bookProgressRepository.save(any(BookProgress.class))).thenReturn(savedProgress); - when(chunkService.findById("chunk-1")).thenReturn(firstChunk); // when ProgressResponse response = progressService.getProgress(bookId, userId); // then - verify(bookProgressRepository).save(bookProgressCaptor.capture()); - BookProgress initialized = bookProgressCaptor.getValue(); - - assertThat(initialized.getUserId()).isEqualTo(userId); - assertThat(initialized.getBookId()).isEqualTo(bookId); - assertThat(initialized.getChapterId()).isEqualTo("chapter-1"); - assertThat(initialized.getChunkId()).isEqualTo("chunk-1"); - assertThat(initialized.getCurrentReadChapterNumber()).isEqualTo(1); - assertThat(initialized.getMaxReadChapterNumber()).isEqualTo(1); - assertThat(initialized.getCurrentDifficultyLevel()).isEqualTo(DifficultyLevel.A1); - - assertThat(response.getId()).isEqualTo("progress-1"); - assertThat(response.getCurrentReadChunkNumber()).isEqualTo(1); + verify(bookProgressRepository, never()).save(any(BookProgress.class)); + verifyNoInteractions(chapterService, chunkService, chunkRepository); + + assertThat(response.getId()).isNull(); + assertThat(response.getChapterId()).isNull(); + assertThat(response.getChunkId()).isNull(); + assertThat(response.getCurrentReadChapterNumber()).isEqualTo(0); + assertThat(response.getCurrentReadChunkNumber()).isEqualTo(0); + assertThat(response.getMaxReadChapterNumber()).isEqualTo(0); + assertThat(response.getMaxReadChunkNumber()).isEqualTo(0); assertThat(response.getNormalizedProgress()).isEqualTo(0.0); + assertThat(response.getMaxNormalizedProgress()).isEqualTo(0.0); + assertThat(response.getIsCompleted()).isFalse(); assertThat(response.getStreakUpdated()).isFalse(); } From 9875b04f2e6090693919ece13d808188f918dcdc Mon Sep 17 00:00:00 2001 From: solfe Date: Thu, 9 Apr 2026 00:52:49 +0900 Subject: [PATCH 21/28] fix(book-progress): unify book status filtering with normalized progress --- .../book/repository/BookRepositoryImpl.java | 19 ++++++++---- .../repository/BookRepositoryImplTest.java | 29 ++++++++++++++----- 2 files changed, 35 insertions(+), 13 deletions(-) diff --git a/src/main/java/com/linglevel/api/content/book/repository/BookRepositoryImpl.java b/src/main/java/com/linglevel/api/content/book/repository/BookRepositoryImpl.java index 865d024..6549775 100644 --- a/src/main/java/com/linglevel/api/content/book/repository/BookRepositoryImpl.java +++ b/src/main/java/com/linglevel/api/content/book/repository/BookRepositoryImpl.java @@ -14,7 +14,9 @@ import org.springframework.util.StringUtils; import java.util.Arrays; +import java.util.HashSet; import java.util.List; +import java.util.Set; /** * Book Repository 커스텀 구현체 @@ -130,12 +132,13 @@ private List getNotStartedBookIds(String userId) { .map(Book::getId) .toList(); - // 진도가 있는 책 ID 조회 - List progressBookIds = findProgressBookIds(userId); + // 시작한 책(진행 중/완료) ID 조회 + List startedBookIds = findStartedBookIds(userId); + Set startedBookIdSet = new HashSet<>(startedBookIds); - // 진도가 없는 책만 반환 + // 시작하지 않은 책(진도 문서 없음 또는 normalizedProgress 0%)만 반환 return allBookIds.stream() - .filter(bookId -> !progressBookIds.contains(bookId)) + .filter(bookId -> !startedBookIdSet.contains(bookId)) .toList(); } @@ -146,7 +149,7 @@ private List getInProgressBookIds(String userId) { Query query = new Query(); query.addCriteria(Criteria.where("userId").is(userId)); query.addCriteria(Criteria.where("isCompleted").is(false)); - query.addCriteria(Criteria.where("maxReadChunkNumber").gt(0)); + query.addCriteria(Criteria.where("normalizedProgress").gt(0)); return findBookIdsFromProgress(query); } @@ -165,9 +168,13 @@ private List getCompletedBookIds(String userId) { /** * 특정 사용자의 모든 진도 책 ID 조회 */ - private List findProgressBookIds(String userId) { + private List findStartedBookIds(String userId) { Query query = new Query(); query.addCriteria(Criteria.where("userId").is(userId)); + query.addCriteria(new Criteria().orOperator( + Criteria.where("isCompleted").is(true), + Criteria.where("normalizedProgress").gt(0) + )); return findBookIdsFromProgress(query); } diff --git a/src/test/java/com/linglevel/api/content/book/repository/BookRepositoryImplTest.java b/src/test/java/com/linglevel/api/content/book/repository/BookRepositoryImplTest.java index 75812a9..bd5b866 100644 --- a/src/test/java/com/linglevel/api/content/book/repository/BookRepositoryImplTest.java +++ b/src/test/java/com/linglevel/api/content/book/repository/BookRepositoryImplTest.java @@ -49,12 +49,12 @@ void setUp() { createBook("book-3", "Gamma", Instant.parse("2026-01-03T00:00:00Z")) )); - mongoTemplate.insert(createProgressDocument("book-2", false, 5), "bookProgress"); - mongoTemplate.insert(createProgressDocument("book-3", true, 10), "bookProgress"); + mongoTemplate.insert(createProgressDocument("book-2", false, 40.0), "bookProgress"); + mongoTemplate.insert(createProgressDocument("book-3", true, 100.0), "bookProgress"); } @Test - @DisplayName("NOT_STARTED 필터는 progress 문서가 없는 책만 반환한다") + @DisplayName("NOT_STARTED 필터는 시작하지 않은 책(문서 없음 또는 normalizedProgress 0)을 반환한다") void findBooksWithFilters_returnsNotStartedBooks() { GetBooksRequest request = GetBooksRequest.builder() .progress(ProgressStatus.NOT_STARTED) @@ -67,7 +67,7 @@ void findBooksWithFilters_returnsNotStartedBooks() { } @Test - @DisplayName("IN_PROGRESS 필터는 완료되지 않았고 읽기 기록이 있는 책만 반환한다") + @DisplayName("IN_PROGRESS 필터는 normalizedProgress > 0이고 완료되지 않은 책만 반환한다") void findBooksWithFilters_returnsInProgressBooks() { GetBooksRequest request = GetBooksRequest.builder() .progress(ProgressStatus.IN_PROGRESS) @@ -96,7 +96,7 @@ void findBooksWithFilters_returnsCompletedBooks() { @DisplayName("조건에 맞는 progress가 없으면 빈 페이지를 반환한다") void findBooksWithFilters_returnsEmptyPageWhenNoProgressMatch() { bookProgressRepository.deleteAll(); - mongoTemplate.insert(createProgressDocument("book-1", false, 0), "bookProgress"); + mongoTemplate.insert(createProgressDocument("book-1", false, 0.0), "bookProgress"); GetBooksRequest request = GetBooksRequest.builder() .progress(ProgressStatus.IN_PROGRESS) @@ -108,6 +108,21 @@ void findBooksWithFilters_returnsEmptyPageWhenNoProgressMatch() { assertThat(result.getTotalElements()).isZero(); } + @Test + @DisplayName("normalizedProgress가 0이고 미완료인 책은 NOT_STARTED로 분류한다") + void findBooksWithFilters_includesZeroProgressAsNotStarted() { + mongoTemplate.insert(createProgressDocument("book-1", false, 0.0), "bookProgress"); + + GetBooksRequest request = GetBooksRequest.builder() + .progress(ProgressStatus.NOT_STARTED) + .build(); + + Page result = bookRepository.findBooksWithFilters(request, USER_ID, defaultPageable()); + + assertThat(result.getContent()).extracting(Book::getId).containsExactly("book-1"); + assertThat(result.getTotalElements()).isEqualTo(1); + } + private Pageable defaultPageable() { return PageRequest.of(0, 10, Sort.by(Sort.Direction.ASC, "createdAt")); } @@ -123,10 +138,10 @@ private Book createBook(String id, String title, Instant createdAt) { return book; } - private Document createProgressDocument(String bookId, boolean isCompleted, int maxReadChunkNumber) { + private Document createProgressDocument(String bookId, boolean isCompleted, double normalizedProgress) { return new Document("userId", USER_ID) .append("bookId", bookId) .append("isCompleted", isCompleted) - .append("maxReadChunkNumber", maxReadChunkNumber); + .append("normalizedProgress", normalizedProgress); } } From 88efe7de1db2e22a53ddcd9cd915962ab39a47ac Mon Sep 17 00:00:00 2001 From: solfe Date: Sat, 11 Apr 2026 11:38:37 +0900 Subject: [PATCH 22/28] feat(book-progress): implement chapter-first maxReadChunkNumber tracking --- .../content/book/dto/ProgressResponse.java | 4 +- .../api/content/book/entity/BookProgress.java | 6 ++ .../content/book/service/ProgressService.java | 20 +++++++ .../book/service/ProgressServiceTest.java | 58 +++++++++++++++++++ 4 files changed, 86 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/linglevel/api/content/book/dto/ProgressResponse.java b/src/main/java/com/linglevel/api/content/book/dto/ProgressResponse.java index f4cb77a..ea4442d 100644 --- a/src/main/java/com/linglevel/api/content/book/dto/ProgressResponse.java +++ b/src/main/java/com/linglevel/api/content/book/dto/ProgressResponse.java @@ -39,7 +39,7 @@ public class ProgressResponse { @Schema(description = "최대 읽은 챕터 번호", example = "3") private Integer maxReadChapterNumber; - @Schema(description = "최대 읽은 청크 번호", example = "8") + @Schema(description = "챕터 우선 정렬 기준의 최대 도달 청크 위치값", example = "65544") private Integer maxReadChunkNumber; @Schema(description = "완료 여부", example = "false") @@ -59,4 +59,4 @@ public class ProgressResponse { @Schema(description = "업데이트 일시", example = "2024-01-15T10:30:00Z") private Instant updatedAt; -} \ No newline at end of file +} diff --git a/src/main/java/com/linglevel/api/content/book/entity/BookProgress.java b/src/main/java/com/linglevel/api/content/book/entity/BookProgress.java index 3ada537..2b2fdce 100644 --- a/src/main/java/com/linglevel/api/content/book/entity/BookProgress.java +++ b/src/main/java/com/linglevel/api/content/book/entity/BookProgress.java @@ -37,6 +37,12 @@ public class BookProgress { private Integer maxReadChapterNumber; + /** + * 챕터 우선 정렬 기준의 최대 도달 청크 위치값. + * 비교 순서는 (chapterNumber, chunkNumber)이며 chapter가 우선한다. + */ + private Integer maxReadChunkNumber; + // V2 Progress Fields private Double normalizedProgress; diff --git a/src/main/java/com/linglevel/api/content/book/service/ProgressService.java b/src/main/java/com/linglevel/api/content/book/service/ProgressService.java index 6d07d18..b76552a 100644 --- a/src/main/java/com/linglevel/api/content/book/service/ProgressService.java +++ b/src/main/java/com/linglevel/api/content/book/service/ProgressService.java @@ -24,6 +24,9 @@ @RequiredArgsConstructor @Slf4j public class ProgressService { + private static final int CHAPTER_POSITION_SHIFT = 16; + private static final int CHAPTER_NUMBER_MAX = 0x7FFF; // 32767 + private static final int CHUNK_NUMBER_MAX = 0xFFFF; // 65535 private final BookService bookService; private final ChapterService chapterService; @@ -104,6 +107,12 @@ public ProgressResponse updateProgress(String bookId, ProgressUpdateRequest requ bookProgress.setMaxReadChapterNumber(currentChapterNum); } + int currentChunkPosition = toChapterFirstPosition(chapter.getChapterNumber(), chunk.getChunkNumber()); + Integer maxChunkPosition = bookProgress.getMaxReadChunkNumber(); + if (maxChunkPosition == null || currentChunkPosition > maxChunkPosition) { + bookProgress.setMaxReadChunkNumber(currentChunkPosition); + } + // maxNormalizedProgress는 완료된 챕터 기반으로 설정 bookProgress.setMaxNormalizedProgress(bookProgress_normalizedProgress); @@ -290,6 +299,7 @@ private ProgressResponse convertToProgressResponse(BookProgress progress, boolea .currentReadChapterNumber(progress.getCurrentReadChapterNumber()) .currentReadChunkNumber(chunk.getChunkNumber()) .maxReadChapterNumber(progress.getMaxReadChapterNumber()) + .maxReadChunkNumber(progress.getMaxReadChunkNumber()) .isCompleted(progress.getIsCompleted()) .currentDifficultyLevel(progress.getCurrentDifficultyLevel()) .normalizedProgress(progress.getNormalizedProgress()) @@ -313,4 +323,14 @@ private ProgressResponse createNotStartedProgressResponse(String userId, String .streakUpdated(false) .build(); } + + private int toChapterFirstPosition(Integer chapterNumber, Integer chunkNumber) { + if (chapterNumber == null || chapterNumber <= 0 || chunkNumber == null || chunkNumber <= 0) { + throw new BooksException(BooksErrorCode.INVALID_CHUNK_NUMBER); + } + if (chapterNumber > CHAPTER_NUMBER_MAX || chunkNumber > CHUNK_NUMBER_MAX) { + throw new BooksException(BooksErrorCode.INVALID_CHUNK_NUMBER); + } + return (chapterNumber << CHAPTER_POSITION_SHIFT) | chunkNumber; + } } diff --git a/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java index 1f4fda2..3e80ba8 100644 --- a/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/ProgressServiceTest.java @@ -117,6 +117,7 @@ void updateProgress_shouldLazyMigrate_forOldBookProgress() { // ensureMigrated initializes the list, and the subsequent logic adds the first progress info assertThat(savedProgress.getChapterProgresses()).hasSize(1); assertThat(savedProgress.getChapterProgresses().get(0).getChapterNumber()).isEqualTo(1); + assertThat(savedProgress.getMaxReadChunkNumber()).isEqualTo(chapterFirstPosition(1, 1)); } @Test @@ -205,7 +206,9 @@ void updateProgress_updatesExistingChapterProgressEntry() { assertThat(saved.getChapterProgresses().get(0).getIsCompleted()).isFalse(); assertThat(saved.getCurrentReadChapterNumber()).isEqualTo(1); assertThat(saved.getChunkId()).isEqualTo(chunkId); + assertThat(saved.getMaxReadChunkNumber()).isEqualTo(chapterFirstPosition(1, 3)); assertThat(response.getCurrentReadChunkNumber()).isEqualTo(3); + assertThat(response.getMaxReadChunkNumber()).isEqualTo(chapterFirstPosition(1, 3)); assertThat(response.getStreakUpdated()).isFalse(); } @@ -268,10 +271,61 @@ void updateProgress_marksBookCompletedWhenLastRemainingChapterFinishes() { assertThat(saved.getChapterProgresses().get(1).getIsCompleted()).isTrue(); assertThat(saved.getIsCompleted()).isTrue(); assertThat(saved.getCompletedAt()).isNotNull(); + assertThat(saved.getMaxReadChunkNumber()).isEqualTo(chapterFirstPosition(2, 4)); assertThat(response.getCurrentReadChunkNumber()).isEqualTo(4); + assertThat(response.getMaxReadChunkNumber()).isEqualTo(chapterFirstPosition(2, 4)); assertThat(response.getStreakUpdated()).isTrue(); } + @Test + @DisplayName("maxReadChunkNumber는 챕터 우선 순서로 업데이트된다") + void updateProgress_updatesMaxReadChunkNumberByChapterPriority() { + // given + String userId = "user-1"; + String bookId = "book-1"; + String chunkId = "chunk-1"; + String chapterId = "chapter-2"; + + BookProgress progress = new BookProgress(); + progress.setId("progress-1"); + progress.setUserId(userId); + progress.setBookId(bookId); + progress.setMaxReadChunkNumber(chapterFirstPosition(1, 100)); + progress.setChapterProgresses(new ArrayList<>()); + + Chunk chunk = new Chunk(); + chunk.setId(chunkId); + chunk.setChapterId(chapterId); + chunk.setChunkNumber(1); + chunk.setDifficultyLevel(DifficultyLevel.A1); + + Chapter chapter = new Chapter(); + chapter.setId(chapterId); + chapter.setBookId(bookId); + chapter.setChapterNumber(2); + + ProgressUpdateRequest request = new ProgressUpdateRequest(); + request.setChunkId(chunkId); + + when(bookService.existsById(bookId)).thenReturn(true); + when(chunkService.findById(chunkId)).thenReturn(chunk); + when(chapterService.findById(chapterId)).thenReturn(chapter); + when(bookProgressRepository.findByUserIdAndBookId(userId, bookId)).thenReturn(Optional.of(progress)); + when(chunkRepository.countByChapterIdAndDifficultyLevel(chapterId, DifficultyLevel.A1)).thenReturn(10L); + when(chapterRepository.countByBookId(bookId)).thenReturn(5); + when(readingCompletionService.processReadingCompletion(userId, com.linglevel.api.content.common.ContentType.BOOK, chapterId, null)) + .thenReturn(null); + + // when + ProgressResponse response = progressService.updateProgress(bookId, request, userId); + + // then + verify(bookProgressRepository).save(bookProgressCaptor.capture()); + BookProgress saved = bookProgressCaptor.getValue(); + assertThat(saved.getMaxReadChunkNumber()).isEqualTo(chapterFirstPosition(2, 1)); + assertThat(response.getMaxReadChunkNumber()).isEqualTo(chapterFirstPosition(2, 1)); + } + @Test @DisplayName("deleteProgress는 기존 진도 정보를 삭제한다") void deleteProgress_deletesExistingProgress() { @@ -312,4 +366,8 @@ void deleteProgress_throwsWhenProgressMissing() { assertThat(exception.getMessage()).isEqualTo(BooksErrorCode.PROGRESS_NOT_FOUND.getMessage()); verify(bookProgressRepository, never()).delete(any(BookProgress.class)); } + + private int chapterFirstPosition(int chapterNumber, int chunkNumber) { + return (chapterNumber << 16) | chunkNumber; + } } From 8965c63674ef8627ed941f5cf16b68a98878d140 Mon Sep 17 00:00:00 2001 From: solfe Date: Tue, 21 Apr 2026 11:02:35 +0900 Subject: [PATCH 23/28] fix(book-progress): use normalized progress as single source in book responses --- .../api/content/book/service/BookService.java | 10 +++++----- .../api/content/book/service/BookServiceTest.java | 2 ++ 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/main/java/com/linglevel/api/content/book/service/BookService.java b/src/main/java/com/linglevel/api/content/book/service/BookService.java index ca6f9b5..6d0805e 100644 --- a/src/main/java/com/linglevel/api/content/book/service/BookService.java +++ b/src/main/java/com/linglevel/api/content/book/service/BookService.java @@ -194,10 +194,10 @@ private BookResponse convertToBookResponse(Book book, String userId, LanguageCod currentReadChapterNumber = progress.getCurrentReadChapterNumber() != null ? progress.getCurrentReadChapterNumber() : 0; - // 진행률 계산 - if (book.getChapterCount() != null && book.getChapterCount() > 0) { - progressPercentage = (double) currentReadChapterNumber / book.getChapterCount() * 100.0; - } + // 진행률은 저장된 normalizedProgress를 단일 소스로 사용한다. + progressPercentage = progress.getNormalizedProgress() != null + ? progress.getNormalizedProgress() + : 0.0; // DB에 저장된 완료 여부 사용 isCompleted = progress.getIsCompleted() != null ? progress.getIsCompleted() : false; @@ -251,4 +251,4 @@ private String selectTitleByLanguage(Book book, LanguageCode languageCode) { } -} \ No newline at end of file +} diff --git a/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java index 0210c13..6993dcc 100644 --- a/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java @@ -500,6 +500,8 @@ private BookProgress createBookProgress(String userId, String bookId, boolean is progress.setBookId(bookId); progress.setCurrentReadChapterNumber(isCompleted ? 20 : 10); progress.setMaxReadChapterNumber(isCompleted ? 20 : 10); + progress.setNormalizedProgress(isCompleted ? 100.0 : 50.0); + progress.setMaxNormalizedProgress(isCompleted ? 100.0 : 50.0); progress.setIsCompleted(isCompleted); progress.setUpdatedAt(Instant.now()); return progress; From 5a6bbbce85ec66670fd161319f4a4dca69b8ef06 Mon Sep 17 00:00:00 2001 From: solfe Date: Wed, 22 Apr 2026 11:45:20 +0900 Subject: [PATCH 24/28] refactor(book): remove legacy chapter progress fallback --- .../repository/ChapterRepositoryImpl.java | 41 +++++++------------ .../content/book/service/ChapterService.java | 40 ++---------------- .../repository/ChapterRepositoryImplTest.java | 12 +++--- .../book/service/ChapterServiceTest.java | 30 ++------------ 4 files changed, 28 insertions(+), 95 deletions(-) diff --git a/src/main/java/com/linglevel/api/content/book/repository/ChapterRepositoryImpl.java b/src/main/java/com/linglevel/api/content/book/repository/ChapterRepositoryImpl.java index 7d340ee..5c4113f 100644 --- a/src/main/java/com/linglevel/api/content/book/repository/ChapterRepositoryImpl.java +++ b/src/main/java/com/linglevel/api/content/book/repository/ChapterRepositoryImpl.java @@ -95,36 +95,25 @@ private List getChapterNumbersByProgress(String bookId, BookProgress bo return progressStatus == ProgressStatus.NOT_STARTED ? allChapterNumbers : List.of(); } - // [FIX] V3 마이그레이션된 데이터(chapterProgresses)를 기준으로 필터링 - if (bookProgress.getChapterProgresses() != null && !bookProgress.getChapterProgresses().isEmpty()) { - Map progressInfoMap = bookProgress.getChapterProgresses().stream() - .collect(Collectors.toMap(BookProgress.ChapterProgressInfo::getChapterNumber, Function.identity())); - - return allChapterNumbers.stream() - .filter(chapterNumber -> { - BookProgress.ChapterProgressInfo info = progressInfoMap.get(chapterNumber); - boolean isCompleted = info != null && Boolean.TRUE.equals(info.getIsCompleted()); - boolean inProgress = info != null && !isCompleted && info.getProgressPercentage() != null && info.getProgressPercentage() > 0; - - return switch (progressStatus) { - case COMPLETED -> isCompleted; - case IN_PROGRESS -> inProgress; - case NOT_STARTED -> !isCompleted && !inProgress; - }; - }) - .toList(); - } - - // [FALLBACK] 마이그레이션되지 않은 옛날 데이터 기준 - Integer currentChapterNumber = bookProgress.getCurrentReadChapterNumber() != null - ? bookProgress.getCurrentReadChapterNumber() : 0; + Map progressInfoMap = + bookProgress.getChapterProgresses() == null + ? Map.of() + : bookProgress.getChapterProgresses().stream() + .collect(Collectors.toMap(BookProgress.ChapterProgressInfo::getChapterNumber, Function.identity())); return allChapterNumbers.stream() .filter(chapterNumber -> { + BookProgress.ChapterProgressInfo info = progressInfoMap.get(chapterNumber); + boolean isCompleted = info != null && Boolean.TRUE.equals(info.getIsCompleted()); + boolean inProgress = info != null + && !isCompleted + && info.getProgressPercentage() != null + && info.getProgressPercentage() > 0; + return switch (progressStatus) { - case NOT_STARTED -> chapterNumber > currentChapterNumber; - case IN_PROGRESS -> chapterNumber.equals(currentChapterNumber) && currentChapterNumber > 0; - case COMPLETED -> chapterNumber < currentChapterNumber; + case COMPLETED -> isCompleted; + case IN_PROGRESS -> inProgress; + case NOT_STARTED -> !isCompleted && !inProgress; }; }) .toList(); diff --git a/src/main/java/com/linglevel/api/content/book/service/ChapterService.java b/src/main/java/com/linglevel/api/content/book/service/ChapterService.java index 38ac618..200e9a4 100644 --- a/src/main/java/com/linglevel/api/content/book/service/ChapterService.java +++ b/src/main/java/com/linglevel/api/content/book/service/ChapterService.java @@ -6,7 +6,6 @@ import com.linglevel.api.content.book.dto.GetChaptersRequest; import com.linglevel.api.content.book.entity.Book; import com.linglevel.api.content.book.entity.Chapter; -import com.linglevel.api.content.book.entity.Chunk; import com.linglevel.api.content.book.exception.BooksException; import com.linglevel.api.content.book.exception.BooksErrorCode; import com.linglevel.api.content.book.repository.BookRepository; @@ -65,10 +64,6 @@ public PageResponse getChapters(String bookId, GetChaptersReque .flatMap(id -> bookProgressRepository.findByUserIdAndBookId(id, bookId)) .orElse(null); - Chunk progressChunk = (bookProgress != null && bookProgress.getChunkId() != null) - ? chunkRepository.findById(bookProgress.getChunkId()).orElse(null) - : null; - Map> chunkCountsMap = chunkRepository.findChunkCountsByChapterIds(chapterIds) .stream() .collect(Collectors.groupingBy( @@ -77,7 +72,7 @@ public PageResponse getChapters(String bookId, GetChaptersReque )); List chapterResponses = chapters.stream() - .map(chapter -> convertToChapterResponse(chapter, book, bookProgress, progressChunk, chunkCountsMap)) + .map(chapter -> convertToChapterResponse(chapter, book, bookProgress, chunkCountsMap)) .collect(Collectors.toList()); return new PageResponse<>(chapterResponses, chapterPage); @@ -97,10 +92,6 @@ public ChapterResponse getChapter(String bookId, String chapterId, String userId .flatMap(id -> bookProgressRepository.findByUserIdAndBookId(id, bookId)) .orElse(null); - Chunk progressChunk = (bookProgress != null && bookProgress.getChunkId() != null) - ? chunkRepository.findById(bookProgress.getChunkId()).orElse(null) - : null; - Map> chunkCountsMap = chunkRepository.findChunkCountsByChapterIds(Collections.singletonList(chapterId)) .stream() .collect(Collectors.groupingBy( @@ -108,7 +99,7 @@ public ChapterResponse getChapter(String bookId, String chapterId, String userId Collectors.toMap(ChunkCountByLevelDto::getDifficultyLevel, ChunkCountByLevelDto::getCount) )); - return convertToChapterResponse(chapter, book, bookProgress, progressChunk, chunkCountsMap); + return convertToChapterResponse(chapter, book, bookProgress, chunkCountsMap); } public boolean existsById(String chapterId) { @@ -153,7 +144,7 @@ public ChapterNavigationResponse getChapterNavigation(String bookId, String chap .build(); } - private ChapterResponse convertToChapterResponse(Chapter chapter, Book book, BookProgress bookProgress, Chunk progressChunk, Map> chunkCountsMap) { + private ChapterResponse convertToChapterResponse(Chapter chapter, Book book, BookProgress bookProgress, Map> chunkCountsMap) { int currentReadChunkNumber = 0; double progressPercentage = 0.0; DifficultyLevel currentDifficultyLevel = book.getDifficultyLevel(); // Fallback: Book's difficulty @@ -173,7 +164,6 @@ private ChapterResponse convertToChapterResponse(Chapter chapter, Book book, Boo : null; if (chapterProgressInfo != null) { - // 배열에서 찾은 경우 progressPercentage = chapterProgressInfo.getProgressPercentage() != null ? chapterProgressInfo.getProgressPercentage() : 0.0; isCompleted = Boolean.TRUE.equals(chapterProgressInfo.getIsCompleted()); @@ -182,30 +172,6 @@ private ChapterResponse convertToChapterResponse(Chapter chapter, Book book, Boo long totalChunksForLevel = chunkCountsMap.getOrDefault(chapter.getId(), Collections.emptyMap()) .getOrDefault(currentDifficultyLevel, 0L); currentReadChunkNumber = (int) Math.ceil(progressPercentage * totalChunksForLevel / 100.0); - - } else { - // [FALLBACK] 기존 로직 사용 (backward compatibility) - Integer progressChapterNumber = bookProgress.getCurrentReadChapterNumber() != null - ? bookProgress.getCurrentReadChapterNumber() : 0; - - Integer progressChunkNumber = (progressChunk != null && progressChunk.getChunkNumber() != null) - ? progressChunk.getChunkNumber() : 0; - - long totalChunksForLevel = chunkCountsMap.getOrDefault(chapter.getId(), Collections.emptyMap()) - .getOrDefault(currentDifficultyLevel, 0L); - - if (chapter.getChapterNumber() < progressChapterNumber) { - currentReadChunkNumber = (int) totalChunksForLevel; - progressPercentage = 100.0; - } else if (chapter.getChapterNumber().equals(progressChapterNumber)) { - currentReadChunkNumber = progressChunkNumber; - if (totalChunksForLevel > 0) { - progressPercentage = (double) progressChunkNumber / totalChunksForLevel * 100.0; - } - } else { - currentReadChunkNumber = 0; - progressPercentage = 0.0; - } } } diff --git a/src/test/java/com/linglevel/api/content/book/repository/ChapterRepositoryImplTest.java b/src/test/java/com/linglevel/api/content/book/repository/ChapterRepositoryImplTest.java index 4b72e07..8237ed7 100644 --- a/src/test/java/com/linglevel/api/content/book/repository/ChapterRepositoryImplTest.java +++ b/src/test/java/com/linglevel/api/content/book/repository/ChapterRepositoryImplTest.java @@ -98,12 +98,12 @@ void findChaptersWithFilters_usesV3ChapterProgresses() { } @Test - @DisplayName("fallback 데이터에서는 currentReadChapterNumber 기준으로 챕터 상태를 구분한다") - void findChaptersWithFilters_usesFallbackProgressData() { + @DisplayName("V3 데이터가 없으면 모든 챕터를 NOT_STARTED로 본다") + void findChaptersWithFilters_treatsMissingV3DataAsNotStarted() { BookProgress progress = new BookProgress(); progress.setUserId(USER_ID); progress.setBookId(BOOK_ID); - progress.setCurrentReadChapterNumber(2); + progress.setCurrentReadChapterNumber(2); // legacy field only (ignored in V3-only filtering) bookProgressRepository.save(progress); GetChaptersRequest completedRequest = GetChaptersRequest.builder() @@ -120,9 +120,9 @@ void findChaptersWithFilters_usesFallbackProgressData() { Page inProgress = chapterRepository.findChaptersWithFilters(BOOK_ID, inProgressRequest, USER_ID, defaultPageable()); Page notStarted = chapterRepository.findChaptersWithFilters(BOOK_ID, notStartedRequest, USER_ID, defaultPageable()); - assertThat(completed.getContent()).extracting(Chapter::getChapterNumber).containsExactly(1); - assertThat(inProgress.getContent()).extracting(Chapter::getChapterNumber).containsExactly(2); - assertThat(notStarted.getContent()).extracting(Chapter::getChapterNumber).containsExactly(3); + assertThat(completed.getContent()).isEmpty(); + assertThat(inProgress.getContent()).isEmpty(); + assertThat(notStarted.getContent()).extracting(Chapter::getChapterNumber).containsExactly(1, 2, 3); } private Pageable defaultPageable() { diff --git a/src/test/java/com/linglevel/api/content/book/service/ChapterServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/ChapterServiceTest.java index f224059..b22b345 100644 --- a/src/test/java/com/linglevel/api/content/book/service/ChapterServiceTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/ChapterServiceTest.java @@ -114,11 +114,6 @@ void testProgressFilterWithPagination_NotStarted() { progress.setIsCompleted(false); progress.setUpdatedAt(Instant.now()); - com.linglevel.api.content.book.entity.Chunk mockChunk = new com.linglevel.api.content.book.entity.Chunk(); - mockChunk.setId("test-chunk-id"); - mockChunk.setChunkNumber(50); - when(chunkRepository.findById(anyString())).thenReturn(Optional.of(mockChunk)); - when(bookProgressRepository.findByUserIdAndBookId(testUser.getId(), testBook.getId())) .thenReturn(Optional.of(progress)); @@ -156,11 +151,6 @@ void testProgressFilterWithPagination_InProgress() { progress.setIsCompleted(false); progress.setUpdatedAt(Instant.now()); - com.linglevel.api.content.book.entity.Chunk mockChunk = new com.linglevel.api.content.book.entity.Chunk(); - mockChunk.setId("test-chunk-id"); - mockChunk.setChunkNumber(50); - when(chunkRepository.findById(anyString())).thenReturn(Optional.of(mockChunk)); - when(bookProgressRepository.findByUserIdAndBookId(testUser.getId(), testBook.getId())) .thenReturn(Optional.of(progress)); @@ -198,11 +188,6 @@ void testProgressFilterWithPagination_Completed() { progress.setIsCompleted(false); progress.setUpdatedAt(Instant.now()); - com.linglevel.api.content.book.entity.Chunk mockChunk = new com.linglevel.api.content.book.entity.Chunk(); - mockChunk.setId("test-chunk-id"); - mockChunk.setChunkNumber(50); - when(chunkRepository.findById(anyString())).thenReturn(Optional.of(mockChunk)); - when(bookProgressRepository.findByUserIdAndBookId(testUser.getId(), testBook.getId())) .thenReturn(Optional.of(progress)); @@ -280,27 +265,20 @@ void getChapter_usesV3ChapterProgressInfo() { } @Test - @DisplayName("단일 챕터 조회 시 V3 데이터가 없으면 fallback progress 정보로 응답을 계산한다") - void getChapter_usesFallbackProgressInfoWhenV3DataMissing() { + @DisplayName("단일 챕터 조회 시 V3 데이터가 없으면 해당 챕터를 NOT_STARTED로 계산한다") + void getChapter_returnsNotStartedWhenV3DataMissing() { // given Chapter chapter = createChapter(testBook.getId(), 2, "Chapter 2"); BookProgress progress = new BookProgress(); progress.setUserId(testUser.getId()); progress.setBookId(testBook.getId()); - progress.setChunkId("progress-chunk"); - progress.setCurrentReadChapterNumber(2); progress.setCurrentDifficultyLevel(DifficultyLevel.B1); progress.setChapterProgresses(null); - com.linglevel.api.content.book.entity.Chunk progressChunk = new com.linglevel.api.content.book.entity.Chunk(); - progressChunk.setId("progress-chunk"); - progressChunk.setChunkNumber(3); - when(chapterRepository.findById(chapter.getId())).thenReturn(Optional.of(chapter)); when(bookProgressRepository.findByUserIdAndBookId(testUser.getId(), testBook.getId())) .thenReturn(Optional.of(progress)); - when(chunkRepository.findById("progress-chunk")).thenReturn(Optional.of(progressChunk)); when(chunkRepository.findChunkCountsByChapterIds(List.of(chapter.getId()))) .thenReturn(List.of(new ChunkCountByLevelDto(chapter.getId(), DifficultyLevel.B1, 8L))); @@ -311,8 +289,8 @@ void getChapter_usesFallbackProgressInfoWhenV3DataMissing() { assertThat(response.getId()).isEqualTo(chapter.getId()); assertThat(response.getCurrentDifficultyLevel()).isEqualTo(DifficultyLevel.B1); assertThat(response.getChunkCount()).isEqualTo(8); - assertThat(response.getProgressPercentage()).isEqualTo(37.5); - assertThat(response.getCurrentReadChunkNumber()).isEqualTo(3); + assertThat(response.getProgressPercentage()).isEqualTo(0.0); + assertThat(response.getCurrentReadChunkNumber()).isEqualTo(0); assertThat(response.getIsCompleted()).isFalse(); } From 23723251ce9a845947ee2cc4322569272c4e77f2 Mon Sep 17 00:00:00 2001 From: solfe Date: Fri, 24 Apr 2026 19:24:25 +0900 Subject: [PATCH 25/28] refactor(book): batch-fetch progress for getBooks --- .../repository/BookProgressRepository.java | 1 + .../api/content/book/service/BookService.java | 61 +++++++++++++------ .../content/book/service/BookServiceTest.java | 24 +++----- 3 files changed, 52 insertions(+), 34 deletions(-) diff --git a/src/main/java/com/linglevel/api/content/book/repository/BookProgressRepository.java b/src/main/java/com/linglevel/api/content/book/repository/BookProgressRepository.java index 15ecbf3..076c955 100644 --- a/src/main/java/com/linglevel/api/content/book/repository/BookProgressRepository.java +++ b/src/main/java/com/linglevel/api/content/book/repository/BookProgressRepository.java @@ -10,6 +10,7 @@ public interface BookProgressRepository extends MongoRepository { Optional findByUserIdAndBookId(String UserId, String bookId); + List findByUserIdAndBookIdIn(String userId, List bookIds); Page findAllByUserId(String userId, Pageable pageable); List findAllByUserId(String userId); List findByBookId(String bookId); diff --git a/src/main/java/com/linglevel/api/content/book/service/BookService.java b/src/main/java/com/linglevel/api/content/book/service/BookService.java index 6d0805e..a09e29d 100644 --- a/src/main/java/com/linglevel/api/content/book/service/BookService.java +++ b/src/main/java/com/linglevel/api/content/book/service/BookService.java @@ -28,7 +28,9 @@ import org.springframework.util.StringUtils; import java.time.Instant; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; @Service @@ -125,10 +127,12 @@ public PageResponse getBooks(GetBooksRequest request, String userI // QueryDSL Custom Repository를 사용하여 필터링 + 페이지네이션 통합 처리 Page bookPage = bookRepository.findBooksWithFilters(request, userId, pageable); + List books = bookPage.getContent(); + Map progressMap = getProgressMap(userId, books); LanguageCode languageCode = request.getLanguageCode(); - List bookResponses = bookPage.getContent().stream() - .map(book -> convertToBookResponse(book, userId, languageCode)) + List bookResponses = books.stream() + .map(book -> convertToBookResponse(book, progressMap.get(book.getId()), languageCode)) .collect(Collectors.toList()); return new PageResponse<>(bookResponses, bookPage); @@ -138,7 +142,11 @@ public BookResponse getBook(String bookId, String userId, LanguageCode languageC Book book = bookRepository.findById(bookId) .orElseThrow(() -> new BooksException(BooksErrorCode.BOOK_NOT_FOUND)); - return convertToBookResponse(book, userId, languageCode); + BookProgress progress = userId == null + ? null + : bookProgressRepository.findByUserIdAndBookId(userId, book.getId()).orElse(null); + + return convertToBookResponse(book, progress, languageCode); } public boolean existsById(String bookId) { @@ -179,29 +187,44 @@ private List filterByProgress(List bookResponses, Pr .collect(Collectors.toList()); } - private BookResponse convertToBookResponse(Book book, String userId, LanguageCode languageCode) { + private Map getProgressMap(String userId, List books) { + if (userId == null || books.isEmpty()) { + return Map.of(); + } + + List bookIds = books.stream().map(Book::getId).toList(); + List progresses = bookProgressRepository.findByUserIdAndBookIdIn(userId, bookIds); + if (progresses == null || progresses.isEmpty()) { + return Map.of(); + } + + Map progressMap = new HashMap<>(); + for (BookProgress progress : progresses) { + if (progress.getBookId() != null) { + // unique index(userId, bookId) 기준으로 bookId당 1건만 유지 + progressMap.putIfAbsent(progress.getBookId(), progress); + } + } + return progressMap; + } + + private BookResponse convertToBookResponse(Book book, BookProgress progress, LanguageCode languageCode) { // 진도 정보 조회 int currentReadChapterNumber = 0; double progressPercentage = 0.0; boolean isCompleted = false; - if (userId != null) { - BookProgress progress = bookProgressRepository - .findByUserIdAndBookId(userId, book.getId()) - .orElse(null); + if (progress != null) { + currentReadChapterNumber = progress.getCurrentReadChapterNumber() != null + ? progress.getCurrentReadChapterNumber() : 0; - if (progress != null) { - currentReadChapterNumber = progress.getCurrentReadChapterNumber() != null - ? progress.getCurrentReadChapterNumber() : 0; + // 진행률은 저장된 normalizedProgress를 단일 소스로 사용한다. + progressPercentage = progress.getNormalizedProgress() != null + ? progress.getNormalizedProgress() + : 0.0; - // 진행률은 저장된 normalizedProgress를 단일 소스로 사용한다. - progressPercentage = progress.getNormalizedProgress() != null - ? progress.getNormalizedProgress() - : 0.0; - - // DB에 저장된 완료 여부 사용 - isCompleted = progress.getIsCompleted() != null ? progress.getIsCompleted() : false; - } + // DB에 저장된 완료 여부 사용 + isCompleted = progress.getIsCompleted() != null ? progress.getIsCompleted() : false; } // 언어 코드에 따라 title 선택 diff --git a/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java b/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java index 6993dcc..94d5b8a 100644 --- a/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java +++ b/src/test/java/com/linglevel/api/content/book/service/BookServiceTest.java @@ -314,11 +314,7 @@ void testProgressFilterWithPagination_Completed() { when(bookRepository.findBooksWithFilters(any(), eq(testUser.getId()), any())) .thenReturn(bookPage); - for (Book book : books) { - BookProgress progress = createBookProgress(testUser.getId(), book.getId(), true); - when(bookProgressRepository.findByUserIdAndBookId(testUser.getId(), book.getId())) - .thenReturn(Optional.of(progress)); - } + mockBookProgress(books, true); PageResponse response = bookService.getBooks(request, testUser.getId()); @@ -443,11 +439,7 @@ void testCombinedFiltersWithPagination() { when(bookRepository.findBooksWithFilters(any(), eq(testUser.getId()), any())) .thenReturn(bookPage); - for (Book book : books) { - BookProgress progress = createBookProgress(testUser.getId(), book.getId(), false); - when(bookProgressRepository.findByUserIdAndBookId(testUser.getId(), book.getId())) - .thenReturn(Optional.of(progress)); - } + mockBookProgress(books, false); PageResponse response = bookService.getBooks(request, testUser.getId()); @@ -487,11 +479,13 @@ private Book createBook(String title, String author, List tags) { } private void mockBookProgress(List books, boolean isCompleted) { - for (Book book : books) { - BookProgress progress = createBookProgress(testUser.getId(), book.getId(), isCompleted); - when(bookProgressRepository.findByUserIdAndBookId(testUser.getId(), book.getId())) - .thenReturn(Optional.of(progress)); - } + List bookIds = books.stream().map(Book::getId).toList(); + List progresses = books.stream() + .map(book -> createBookProgress(testUser.getId(), book.getId(), isCompleted)) + .toList(); + + when(bookProgressRepository.findByUserIdAndBookIdIn(testUser.getId(), bookIds)) + .thenReturn(progresses); } private BookProgress createBookProgress(String userId, String bookId, boolean isCompleted) { From a1b9ba6a71acd7dc76c5277abbb4d2a0cf047608 Mon Sep 17 00:00:00 2001 From: solfe Date: Sat, 25 Apr 2026 00:42:28 +0900 Subject: [PATCH 26/28] feat(k6): add local-k6 profile to disable rate limiting --- k6/README.md | 2 ++ .../api/common/ratelimit/config/RateLimitProperties.java | 2 ++ .../api/common/ratelimit/filter/RateLimitFilter.java | 5 +++++ src/main/resources/application-local-k6.properties | 4 ++++ 4 files changed, 13 insertions(+) create mode 100644 src/main/resources/application-local-k6.properties diff --git a/k6/README.md b/k6/README.md index 141a82a..0483cad 100644 --- a/k6/README.md +++ b/k6/README.md @@ -26,6 +26,8 @@ k6/ ## 전제 조건 - 애플리케이션이 로컬에서 실행 중이어야 한다. +- rate limit 영향 없이 k6를 실행하려면 앱을 `local,local-k6` 프로필로 실행한다. + - 예: `./gradlew bootRun --args='--spring.profiles.active=local,local-k6'` - MongoDB 에는 seed 데이터가 들어 있어야 한다. - 테스트용 사용자는 `X-Test-Username` 으로 인증 가능해야 한다. - 시드 생성은 [README.md](/Users/solfe/Desktop/WORK/llv/llv-api/k6/seed/README.md)를 따른다. diff --git a/src/main/java/com/linglevel/api/common/ratelimit/config/RateLimitProperties.java b/src/main/java/com/linglevel/api/common/ratelimit/config/RateLimitProperties.java index 71f6529..cd054c7 100644 --- a/src/main/java/com/linglevel/api/common/ratelimit/config/RateLimitProperties.java +++ b/src/main/java/com/linglevel/api/common/ratelimit/config/RateLimitProperties.java @@ -11,6 +11,8 @@ @ConfigurationProperties(prefix = "rate.limit") public class RateLimitProperties { + private boolean enabled = true; + private int capacity; private Refill refill = new Refill(); diff --git a/src/main/java/com/linglevel/api/common/ratelimit/filter/RateLimitFilter.java b/src/main/java/com/linglevel/api/common/ratelimit/filter/RateLimitFilter.java index 8e6af6f..c67754a 100644 --- a/src/main/java/com/linglevel/api/common/ratelimit/filter/RateLimitFilter.java +++ b/src/main/java/com/linglevel/api/common/ratelimit/filter/RateLimitFilter.java @@ -34,6 +34,11 @@ public class RateLimitFilter implements Filter { public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { + if (!rateLimitProperties.isEnabled()) { + chain.doFilter(request, response); + return; + } + HttpServletRequest httpRequest = (HttpServletRequest) request; HttpServletResponse httpResponse = (HttpServletResponse) response; diff --git a/src/main/resources/application-local-k6.properties b/src/main/resources/application-local-k6.properties new file mode 100644 index 0000000..3ea3c7c --- /dev/null +++ b/src/main/resources/application-local-k6.properties @@ -0,0 +1,4 @@ +# k6 전용 로컬 프로필 +# 실행 예: --spring.profiles.active=local,local-k6 + +rate.limit.enabled=false From f98478d12cf47989bc3e1e6a98641c8e71b5460e Mon Sep 17 00:00:00 2001 From: solfe Date: Sat, 25 Apr 2026 01:06:54 +0900 Subject: [PATCH 27/28] fix(book): classify partial reads as in-progress and align seed chunk position encoding --- .../book/seed-books-content.mongosh.js | 10 +++- .../book/repository/BookRepositoryImpl.java | 17 ++++-- .../repository/BookRepositoryImplTest.java | 53 ++++++++++++++++++- 3 files changed, 74 insertions(+), 6 deletions(-) diff --git a/k6/seed/content/book/seed-books-content.mongosh.js b/k6/seed/content/book/seed-books-content.mongosh.js index 92fcf21..3b72cf0 100644 --- a/k6/seed/content/book/seed-books-content.mongosh.js +++ b/k6/seed/content/book/seed-books-content.mongosh.js @@ -395,7 +395,7 @@ function buildCompletedBookProgress(progressId, user, bookEntry, random) { currentReadChapterNumber: bookEntry.chapterCount, maxReadChapterNumber: bookEntry.chapterCount, currentReadChunkNumber: lastChunkNumber, - maxReadChunkNumber: lastChunkNumber, + maxReadChunkNumber: encodeChapterFirstPosition(bookEntry.chapterCount, lastChunkNumber), normalizedProgress: 100, maxNormalizedProgress: 100, currentDifficultyLevel: bookEntry.primaryDifficulty, @@ -441,7 +441,7 @@ function buildInProgressBookProgress(progressId, user, bookEntry, random) { currentReadChapterNumber: currentChapterNumber, maxReadChapterNumber: currentChapterNumber, currentReadChunkNumber, - maxReadChunkNumber: currentReadChunkNumber, + maxReadChunkNumber: encodeChapterFirstPosition(currentChapterNumber, currentReadChunkNumber), normalizedProgress: roundToOneDecimal((completedChapterCount * 100) / chapterCount), maxNormalizedProgress: roundToOneDecimal((completedChapterCount * 100) / chapterCount), currentDifficultyLevel: bookEntry.primaryDifficulty, @@ -504,6 +504,12 @@ function getChunkIdsForProgress(chapter, difficultyLevel) { return chunkIds; } +function encodeChapterFirstPosition(chapterNumber, chunkNumber) { + const safeChapterNumber = Math.max(1, Number(chapterNumber) || 1); + const safeChunkNumber = Math.max(0, Number(chunkNumber) || 0); + return (safeChapterNumber * 65536) + safeChunkNumber; +} + function pickBookProfile(random) { const value = random(); diff --git a/src/main/java/com/linglevel/api/content/book/repository/BookRepositoryImpl.java b/src/main/java/com/linglevel/api/content/book/repository/BookRepositoryImpl.java index 6549775..c0a59d0 100644 --- a/src/main/java/com/linglevel/api/content/book/repository/BookRepositoryImpl.java +++ b/src/main/java/com/linglevel/api/content/book/repository/BookRepositoryImpl.java @@ -136,7 +136,7 @@ private List getNotStartedBookIds(String userId) { List startedBookIds = findStartedBookIds(userId); Set startedBookIdSet = new HashSet<>(startedBookIds); - // 시작하지 않은 책(진도 문서 없음 또는 normalizedProgress 0%)만 반환 + // 시작하지 않은 책(완료/부분 읽기/완료 챕터 진행률이 없는 책)만 반환 return allBookIds.stream() .filter(bookId -> !startedBookIdSet.contains(bookId)) .toList(); @@ -149,7 +149,10 @@ private List getInProgressBookIds(String userId) { Query query = new Query(); query.addCriteria(Criteria.where("userId").is(userId)); query.addCriteria(Criteria.where("isCompleted").is(false)); - query.addCriteria(Criteria.where("normalizedProgress").gt(0)); + query.addCriteria(new Criteria().orOperator( + Criteria.where("normalizedProgress").gt(0), + partiallyReadChapterCriteria() + )); return findBookIdsFromProgress(query); } @@ -173,12 +176,20 @@ private List findStartedBookIds(String userId) { query.addCriteria(Criteria.where("userId").is(userId)); query.addCriteria(new Criteria().orOperator( Criteria.where("isCompleted").is(true), - Criteria.where("normalizedProgress").gt(0) + Criteria.where("normalizedProgress").gt(0), + partiallyReadChapterCriteria() )); return findBookIdsFromProgress(query); } + private Criteria partiallyReadChapterCriteria() { + return Criteria.where("chapterProgresses").elemMatch( + Criteria.where("isCompleted").is(false) + .and("progressPercentage").gt(0) + ); + } + /** * Progress 컬렉션에서 bookId 추출 */ diff --git a/src/test/java/com/linglevel/api/content/book/repository/BookRepositoryImplTest.java b/src/test/java/com/linglevel/api/content/book/repository/BookRepositoryImplTest.java index bd5b866..313f630 100644 --- a/src/test/java/com/linglevel/api/content/book/repository/BookRepositoryImplTest.java +++ b/src/test/java/com/linglevel/api/content/book/repository/BookRepositoryImplTest.java @@ -67,7 +67,7 @@ void findBooksWithFilters_returnsNotStartedBooks() { } @Test - @DisplayName("IN_PROGRESS 필터는 normalizedProgress > 0이고 완료되지 않은 책만 반환한다") + @DisplayName("IN_PROGRESS 필터는 완료되지 않았고 읽기를 시작한 책을 반환한다") void findBooksWithFilters_returnsInProgressBooks() { GetBooksRequest request = GetBooksRequest.builder() .progress(ProgressStatus.IN_PROGRESS) @@ -79,6 +79,22 @@ void findBooksWithFilters_returnsInProgressBooks() { assertThat(result.getTotalElements()).isEqualTo(1); } + @Test + @DisplayName("normalizedProgress가 0이어도 부분 읽기면 IN_PROGRESS로 분류한다") + void findBooksWithFilters_includesPartialReadAsInProgress() { + bookProgressRepository.deleteAll(); + mongoTemplate.insert(createPartialInProgressDocument("book-1", 1, 2, 20.0), "bookProgress"); + + GetBooksRequest request = GetBooksRequest.builder() + .progress(ProgressStatus.IN_PROGRESS) + .build(); + + Page result = bookRepository.findBooksWithFilters(request, USER_ID, defaultPageable()); + + assertThat(result.getContent()).extracting(Book::getId).containsExactly("book-1"); + assertThat(result.getTotalElements()).isEqualTo(1); + } + @Test @DisplayName("COMPLETED 필터는 완료된 책만 반환한다") void findBooksWithFilters_returnsCompletedBooks() { @@ -123,6 +139,23 @@ void findBooksWithFilters_includesZeroProgressAsNotStarted() { assertThat(result.getTotalElements()).isEqualTo(1); } + @Test + @DisplayName("부분 읽기 데이터는 NOT_STARTED에서 제외한다") + void findBooksWithFilters_excludesPartialReadFromNotStarted() { + bookProgressRepository.deleteAll(); + mongoTemplate.insert(createPartialInProgressDocument("book-1", 1, 2, 20.0), "bookProgress"); + + GetBooksRequest request = GetBooksRequest.builder() + .progress(ProgressStatus.NOT_STARTED) + .build(); + + Page result = bookRepository.findBooksWithFilters(request, USER_ID, defaultPageable()); + + assertThat(result.getContent()).extracting(Book::getId) + .containsExactly("book-2", "book-3"); + assertThat(result.getTotalElements()).isEqualTo(2); + } + private Pageable defaultPageable() { return PageRequest.of(0, 10, Sort.by(Sort.Direction.ASC, "createdAt")); } @@ -144,4 +177,22 @@ private Document createProgressDocument(String bookId, boolean isCompleted, doub .append("isCompleted", isCompleted) .append("normalizedProgress", normalizedProgress); } + + private Document createPartialInProgressDocument( + String bookId, + int chapterNumber, + int chunkNumber, + double progressPercentage + ) { + int encodedPosition = chapterNumber * 65536 + chunkNumber; + Document chapterProgress = new Document("chapterNumber", chapterNumber) + .append("progressPercentage", progressPercentage) + .append("isCompleted", false) + .append("completedAt", null); + + return createProgressDocument(bookId, false, 0.0) + .append("maxReadChapterNumber", chapterNumber) + .append("maxReadChunkNumber", encodedPosition) + .append("chapterProgresses", List.of(chapterProgress)); + } } From 4e279b6e93b1633e853ab1230873a9470220c487 Mon Sep 17 00:00:00 2001 From: solfe Date: Sat, 25 Apr 2026 01:21:33 +0900 Subject: [PATCH 28/28] test(feed): skip external RSS integration tests in CI --- .../content/feed/service/Formula1EspnThumbnailTest.java | 6 ++++++ .../api/content/feed/service/NewFeedSourcesTest.java | 8 +++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/src/test/java/com/linglevel/api/content/feed/service/Formula1EspnThumbnailTest.java b/src/test/java/com/linglevel/api/content/feed/service/Formula1EspnThumbnailTest.java index 548e67a..e9a70ca 100644 --- a/src/test/java/com/linglevel/api/content/feed/service/Formula1EspnThumbnailTest.java +++ b/src/test/java/com/linglevel/api/content/feed/service/Formula1EspnThumbnailTest.java @@ -9,6 +9,7 @@ import org.jsoup.nodes.Document; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; import java.net.URL; import java.util.List; @@ -16,6 +17,11 @@ import static org.junit.jupiter.api.Assertions.*; @DisplayName("Formula1 & ESPN 썸네일 추출 DSL 테스트") +@DisabledIfEnvironmentVariable( + named = "CI", + matches = "true", + disabledReason = "외부 RSS/웹 페이지 의존 통합 테스트는 CI 환경에서 불안정하여 로컬에서만 실행" +) class Formula1EspnThumbnailTest { // 권장 DSL (다양한 사이트에서 작동) diff --git a/src/test/java/com/linglevel/api/content/feed/service/NewFeedSourcesTest.java b/src/test/java/com/linglevel/api/content/feed/service/NewFeedSourcesTest.java index 6647d5f..546c865 100644 --- a/src/test/java/com/linglevel/api/content/feed/service/NewFeedSourcesTest.java +++ b/src/test/java/com/linglevel/api/content/feed/service/NewFeedSourcesTest.java @@ -6,6 +6,7 @@ import com.rometools.rome.io.XmlReader; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; import java.lang.reflect.Method; import java.net.URL; @@ -14,6 +15,11 @@ import static org.junit.jupiter.api.Assertions.*; @DisplayName("새로운 RSS Feed 소스 파싱 테스트 (Formula1, ESPN)") +@DisabledIfEnvironmentVariable( + named = "CI", + matches = "true", + disabledReason = "외부 RSS 의존 통합 테스트는 CI 환경에서 불안정하여 로컬에서만 실행" +) class NewFeedSourcesTest { @Test @@ -245,4 +251,4 @@ void testBothNewSourcesComparison() throws Exception { } } } -} \ No newline at end of file +}