diff --git a/workspaces/scorecard/.changeset/sour-coins-check.md b/workspaces/scorecard/.changeset/sour-coins-check.md new file mode 100644 index 0000000000..5a8f2bfe37 --- /dev/null +++ b/workspaces/scorecard/.changeset/sour-coins-check.md @@ -0,0 +1,6 @@ +--- +'@red-hat-developer-hub/backstage-plugin-scorecard-backend': minor +'@red-hat-developer-hub/backstage-plugin-scorecard-common': minor +--- + +Adds the ability to drill down from aggregated scorecard KPIs to view the individual entities that contribute to the overall score. This enables managers and platform engineers to identify specific services impacting metrics and troubleshoot issues at the entity level. diff --git a/workspaces/scorecard/plugins/scorecard-backend/__fixtures__/mockDatabaseMetricValues.ts b/workspaces/scorecard/plugins/scorecard-backend/__fixtures__/mockDatabaseMetricValues.ts index 5e9493bc50..7e1e8424ff 100644 --- a/workspaces/scorecard/plugins/scorecard-backend/__fixtures__/mockDatabaseMetricValues.ts +++ b/workspaces/scorecard/plugins/scorecard-backend/__fixtures__/mockDatabaseMetricValues.ts @@ -22,6 +22,7 @@ type BuildMockDatabaseMetricValuesParams = { latestEntityMetric?: DbMetricValue[]; countOfExpiredMetrics?: number; aggregatedMetric?: DbAggregatedMetric; + entityMetricsByStatus?: { rows: DbMetricValue[]; total: number }; }; export const mockDatabaseMetricValues = { @@ -29,6 +30,7 @@ export const mockDatabaseMetricValues = { readLatestEntityMetricValues: jest.fn(), cleanupExpiredMetrics: jest.fn(), readAggregatedMetricByEntityRefs: jest.fn(), + readEntityMetricsByStatus: jest.fn(), } as unknown as jest.Mocked; export const buildMockDatabaseMetricValues = ({ @@ -36,6 +38,7 @@ export const buildMockDatabaseMetricValues = ({ latestEntityMetric, countOfExpiredMetrics, aggregatedMetric, + entityMetricsByStatus, }: BuildMockDatabaseMetricValuesParams) => { const createMetricValues = metricValues ? jest.fn().mockResolvedValue(metricValues) @@ -53,10 +56,15 @@ export const buildMockDatabaseMetricValues = ({ ? jest.fn().mockResolvedValue(aggregatedMetric) : mockDatabaseMetricValues.readAggregatedMetricByEntityRefs; + const readEntityMetricsByStatus = entityMetricsByStatus + ? jest.fn().mockResolvedValue(entityMetricsByStatus) + : mockDatabaseMetricValues.readEntityMetricsByStatus; + return { createMetricValues, readLatestEntityMetricValues, cleanupExpiredMetrics, readAggregatedMetricByEntityRefs, + readEntityMetricsByStatus, } as unknown as jest.Mocked; }; diff --git a/workspaces/scorecard/plugins/scorecard-backend/__fixtures__/mockMetricProvidersRegistry.ts b/workspaces/scorecard/plugins/scorecard-backend/__fixtures__/mockMetricProvidersRegistry.ts index 50900883fb..84be73a096 100644 --- a/workspaces/scorecard/plugins/scorecard-backend/__fixtures__/mockMetricProvidersRegistry.ts +++ b/workspaces/scorecard/plugins/scorecard-backend/__fixtures__/mockMetricProvidersRegistry.ts @@ -49,10 +49,16 @@ export const buildMockMetricProvidersRegistry = ({ return metricsList; }) : jest.fn(); + const getMetric = provider + ? jest.fn().mockImplementation((_metricId: string) => { + return provider.getMetric(); // Returns the metric from the provider + }) + : jest.fn(); return { ...mockMetricProvidersRegistry, getProvider, listMetrics, + getMetric, } as unknown as jest.Mocked; }; diff --git a/workspaces/scorecard/plugins/scorecard-backend/docs/drill-down.md b/workspaces/scorecard/plugins/scorecard-backend/docs/drill-down.md new file mode 100644 index 0000000000..3a79992509 --- /dev/null +++ b/workspaces/scorecard/plugins/scorecard-backend/docs/drill-down.md @@ -0,0 +1,484 @@ +# Entity Drill-Down + +The Scorecard plugin provides a drill-down endpoint that returns detailed entity-level metrics with filtering, sorting, and pagination capabilities. This feature allows users to investigate the individual entities that contribute to aggregated scorecard metrics, enabling detailed analysis and troubleshooting. + +## Overview + +The drill-down endpoint (`/metrics/:metricId/catalog/aggregations/entities`) provides a detailed view of entities and their metric values. It allows managers and platform engineers to: + +- See individual entities contributing to aggregated scores +- Filter entities by status (success/warning/error), owner, kind, or entity ref substring +- Sort by any column (entity name, owner, kind, timestamp, metric value) +- Paginate through large result sets +- Understand data freshness through per-entity timestamps + +This endpoint transforms the scorecard from a passive reporting tool into an actionable diagnostic interface. + +## API Endpoint + +### `GET /metrics/:metricId/catalog/aggregations/entities` + +Returns a paginated list of entities with their metric values, enriched with catalog metadata. + +#### Path Parameters + +| Parameter | Type | Required | Description | +| ---------- | ------ | -------- | ---------------------------------------------- | +| `metricId` | string | Yes | The ID of the metric (e.g., `github.open_prs`) | + +#### Query Parameters + +| Parameter | Type | Required | Default | Description | +| ------------ | ---------------- | -------- | ----------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `status` | string | No | - | Filter by threshold status: `success`, `warning`, or `error` | +| `owner` | string/string\[] | No | - | Filter by owner entity ref. Repeat to supply multiple values (e.g., `?owner=a&owner=b`) | +| `kind` | string | No | - | Filter by entity kind (e.g., `Component`, `API`, `System`) | +| `entityName` | string | No | - | Substring search against the entity ref (`kind:namespace/name`). Matches any part of the ref (case-insensitive). Use the name portion for simple searches (e.g., `auth` matches `component:default/auth-service`) | +| `sortBy` | string | No | `timestamp` | Sort by: `entityName`, `owner`, `entityKind`, `timestamp`, or `metricValue` | +| `sortOrder` | string | No | `desc` | Sort direction: `asc` or `desc` | +| `page` | number | No | `1` | Page number (1-indexed) | +| `pageSize` | number | No | `5` | Number of entities per page (max: 100) | + +#### Authentication + +Requires user authentication. + +#### Permissions + +Requires `scorecard.metric.read` permission. Additionally: + +- The user must have access to the specific metric (returns `403 Forbidden` if access is denied) +- The user must have `catalog.entity.read` permission for each entity that will be included in the results + +#### Response Schema + +```typescript +{ + metricId: string; + metricMetadata: { + title: string; + description: string; + type: 'number' | 'boolean'; + }; + entities: EntityMetricDetail[]; + pagination: { + page: number; + pageSize: number; + total: number; + totalPages: number; + }; +} + +type EntityMetricDetail = { + entityRef: string; // Full entity reference (e.g., "component:default/my-service") + entityName: string; // Entity name from catalog + entityKind: string; // Entity kind (e.g., "Component", "API") + owner: string; // Owner entity reference or name + metricValue: number | boolean | null; // The actual metric value + timestamp: string; // ISO 8601 timestamp of when metric was synced + status: 'success' | 'warning' | 'error'; // Threshold evaluation status +}; +``` + +## Usage Examples + +### Basic Drill-Down + +Get the first page of entities for a metric: + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/github.open_prs/catalog/aggregations/entities?page=1&pageSize=10" \ + -H "Authorization: Bearer " +``` + +### Filter by Status + +Get only entities in error state: + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/github.open_prs/catalog/aggregations/entities?status=error&page=1&pageSize=10" \ + -H "Authorization: Bearer " +``` + +### Filter by Ownership + +Get entities owned by a specific team: + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/github.open_prs/catalog/aggregations/entities?owner=team:default/platform&page=1&pageSize=10" \ + -H "Authorization: Bearer " +``` + +Get entities owned by multiple teams (repeat the `owner` parameter): + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/github.open_prs/catalog/aggregations/entities?owner=team:default/platform&owner=team:default/backend&page=1&pageSize=10" \ + -H "Authorization: Bearer " +``` + +### Filter by Entity Kind + +Get only Component entities: + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/github.open_prs/catalog/aggregations/entities?kind=Component&page=1&pageSize=10" \ + -H "Authorization: Bearer " +``` + +### Search by Entity Name + +Search for entities with "service" in their name: + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/github.open_prs/catalog/aggregations/entities?entityName=service&page=1&pageSize=10" \ + -H "Authorization: Bearer " +``` + +### Sorting + +Sort by metric value (highest first): + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/github.open_prs/catalog/aggregations/entities?sortBy=metricValue&sortOrder=desc&page=1&pageSize=10" \ + -H "Authorization: Bearer " +``` + +Sort by entity name alphabetically: + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/github.open_prs/catalog/aggregations/entities?sortBy=entityName&sortOrder=asc&page=1&pageSize=10" \ + -H "Authorization: Bearer " +``` + +### Combining Filters + +Get Component entities with errors for a specific team, sorted by metric value: + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/github.open_prs/catalog/aggregations/entities?owner=team:default/platform&status=error&kind=Component&sortBy=metricValue&sortOrder=desc&page=1&pageSize=10" \ + -H "Authorization: Bearer " +``` + +## Response Example + +```json +{ + "metricId": "github.open_prs", + "metricMetadata": { + "title": "Open Pull Requests", + "description": "Number of open pull requests in GitHub", + "type": "number" + }, + "entities": [ + { + "entityRef": "component:default/my-service", + "entityName": "my-service", + "entityKind": "Component", + "owner": "team:default/platform", + "metricValue": 15, + "timestamp": "2026-02-17T10:30:00Z", + "status": "error" + }, + { + "entityRef": "component:default/another-service", + "entityName": "another-service", + "entityKind": "Component", + "owner": "team:default/backend", + "metricValue": 8, + "timestamp": "2026-02-17T10:25:00Z", + "status": "warning" + } + ], + "pagination": { + "page": 1, + "pageSize": 10, + "total": 42, + "totalPages": 5 + } +} +``` + +## Filtering Behavior + +### Status Filtering + +When `status` is specified, only entities with that threshold evaluation are returned: + +- `status=success`: Entities meeting success thresholds +- `status=warning`: Entities meeting warning thresholds +- `status=error`: Entities failing thresholds or in error state + +Status filtering is performed at the database level for optimal performance. + +### Owner Filtering + +The `owner` parameter filters entities by their catalog owner (`spec.owner`). Repeat the parameter to match any of several owners (up to 50): + +```bash +# Get entities owned by a specific team +?owner=team:default/platform + +# Get entities owned by a specific user +?owner=user:default/alice + +# Get entities owned by either of two teams +?owner=team:default/platform&owner=team:default/backend +``` + +This filter is applied at the database level for optimal performance. Frontends can implement "owned by me" scoping by passing the user's `identityApi.ownershipEntityRefs` (user ref + direct group refs) as repeated `owner` values. + +### Kind Filtering + +Filter by entity kind to narrow results to specific entity types: + +```bash +# Only Components +?kind=Component + +# Only APIs +?kind=API + +# Only Systems +?kind=System +``` + +Kind filtering is performed at the database level for optimal performance. + +### Entity Name Search + +The `entityName` parameter performs a case-insensitive substring search against the full entity reference, which has the format `kind:namespace/name` (e.g., `component:default/auth-service`). + +```bash +# Match by name fragment — matches component:default/auth-service, api:default/auth-api, etc. +?entityName=auth + +# Match by name fragment — matches component:default/my-service, component:default/service-api, etc. +?entityName=service + +# Match more precisely using the full ref format +?entityName=component:default/auth-service +``` + +Because the search runs against the entire ref string, searching by just the name portion (the part after `/`) is the most common and natural usage. Be aware that the search term could also match the kind or namespace prefix if those happen to contain the search string (e.g., `?entityName=default` would match all entities in the `default` namespace). + +Entity name filtering is performed at the database level for consistent pagination and accurate total counts. + +## Sorting + +Results can be sorted by any column in ascending or descending order. Sorting is applied at the database level, so the correct order is guaranteed across all pages regardless of which filters are active. + +### Sort Options + +| Sort By | Description | Example Values | +| ------------- | ---------------------------------------------------------------------------------------------- | ------------------------------------------------------ | +| `entityName` | Full entity ref (`kind:namespace/name`) alphabetically — equivalent to sorting by the full ref | "api:default/api-service", "component:default/web-app" | +| `owner` | Owner entity reference alphabetically | "team:default/platform" | +| `entityKind` | Entity kind alphabetically | "API", "Component" | +| `timestamp` | Metric sync timestamp (most/least recent) | ISO 8601 timestamps | +| `metricValue` | Metric value numerically (highest/lowest first) | 5, 15, 25, 100 | + +### Default Sorting + +If no `sortBy` is specified, results are sorted by `timestamp` in descending order (most recent first). + +### Null Value Handling + +When sorting by `metricValue`, entities with `null` values are sorted to the end regardless of sort order. + +## Pagination + +The endpoint uses offset-based pagination: + +- **Default page size**: 5 entities +- **Maximum page size**: 100 entities +- **Page numbering**: 1-indexed (first page is `page=1`) + +The response includes pagination metadata: + +```json +{ + "pagination": { + "page": 1, // Current page + "pageSize": 10, // Entities per page + "total": 42, // Total matching entities across all pages + "totalPages": 5 // Total number of pages + } +} +``` + +### Pagination Performance + +All filters (`status`, `owner`, `kind`, and `entityName`) and sorting (`sortBy`, `sortOrder`) are applied at the database level before pagination. The `ORDER BY` and `LIMIT`/`OFFSET` are always pushed to the database, so only the requested page of rows is fetched in the correct order regardless of which filters are active. + +For best performance with large datasets, combine specific filters to reduce the result set size before paginating. + +## Error Handling + +### Invalid Metric ID + +If the specified metric does not exist: + +- **Status Code**: `404 Not Found` +- **Error**: `NotFoundError: Metric not found` + +### Missing User Entity Reference + +If the authenticated user doesn't have an entity reference: + +- **Status Code**: `401 Unauthorized` +- **Error**: `AuthenticationError: User entity reference not found` + +### Permission Denied + +If the user doesn't have access to the metric: + +- **Status Code**: `403 Forbidden` +- **Error**: `NotAllowedError: To view the scorecard metrics, your administrator must grant you the required permission.` + +### Invalid Query Parameters + +If query parameters are invalid (e.g., `pageSize > 100`): + +- **Status Code**: `400 Bad Request` +- **Error**: Description of the validation error + +### Empty Results + +When no entities match the filters: + +- **Status Code**: `200 OK` +- **Response**: Empty entities array with `total: 0` + +```json +{ + "metricId": "github.open_prs", + "metricMetadata": { ... }, + "entities": [], + "pagination": { + "page": 1, + "pageSize": 10, + "total": 0, + "totalPages": 0 + } +} +``` + +## Data Freshness + +Each entity includes a `timestamp` field indicating when the metric value was last synced. This helps users understand data recency and identify stale metrics. + +The timestamp represents when the metric provider last successfully fetched and evaluated the metric for that specific entity. Timestamps may vary across entities depending on: + +- When entities were added to the catalog +- Individual metric sync schedules +- Failures or errors in previous sync attempts + +## Use Cases + +### Use Case 1: Identify Services in Error State + +A manager sees an aggregated scorecard showing 50 entities with errors. They drill down to see which specific services need attention: + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/github.open_prs/catalog/aggregations/entities?status=error&sortBy=metricValue&sortOrder=desc&page=1&pageSize=20" \ + -H "Authorization: Bearer " +``` + +This returns the 20 entities with the most severe issues (highest metric values in error state). + +### Use Case 2: Review Team-Specific Metrics + +A team lead wants to see only their team's entities: + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/jira.open_issues/catalog/aggregations/entities?owner=team:default/backend&sortBy=timestamp&sortOrder=asc" \ + -H "Authorization: Bearer " +``` + +This shows the team's entities sorted by staleness (oldest data first), helping identify entities that may need attention. + +### Use Case 3: Audit Specific Entity Type + +An architect wants to review all API entities: + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/openssf.score/catalog/aggregations/entities?kind=API&status=warning&page=1&pageSize=25" \ + -H "Authorization: Bearer " +``` + +This returns API entities with security warnings, helping prioritize security improvements. + +### Use Case 4: Personal Dashboard + +An engineer wants to see only their owned entities with issues. The frontend passes the user's `ownershipEntityRefs` (user ref + group memberships) as repeated `owner` params: + +```bash +curl -X GET "{{url}}/api/scorecard/metrics/github.open_prs/catalog/aggregations/entities?owner=user:default/alice&owner=team:default/platform&page=1&pageSize=10" \ + -H "Authorization: Bearer " +``` + +This returns a personalized view scoped to the entities the engineer and their teams are responsible for. + +## Limitations + +### Entity Metadata Freshness + +Entity metadata (name, kind, owner) is fetched from the catalog at request time and reflects the current state. However, metric values and timestamps represent historical data from the last sync. This means: + +- If an entity was renamed, the new name appears +- If ownership changed, the new owner appears +- But the metric value and timestamp are from the last sync, not re-calculated + +## Troubleshooting + +### Empty Results + +**Symptom**: `total: 0` even though aggregation shows entities in that category + +**Possible causes**: + +1. **Stale aggregation data**: Aggregation was cached, entities have since changed status +2. **Permission changes**: User lost access to entities between viewing aggregation and drill-down +3. **Incorrect filters**: Check filter parameters match the aggregation criteria + +### Missing Entity Metadata + +**Symptom**: Entities show "Unknown" for name, kind, or owner + +**Possible causes**: + +1. **Entity deleted from catalog**: Entity ref exists in metrics but entity was removed from the catalog +2. **Permission denied**: User lacks `catalog.entity.read` for that entity + +**Resolution**: Check whether the entity still exists in the catalog and that the user has the appropriate read permissions. + +### Catalog Unavailable + +**Symptom**: Empty entity list despite knowing entities with metric data exist + +**Possible causes**: + +1. **Catalog API unreachable**: The endpoint could not contact the catalog to verify entity access. To protect against unauthorized data exposure, results are not returned when authorization cannot be confirmed. + +**Resolution**: Check backend logs for `Failed to fetch entities from catalog` error entries and confirm the catalog service is healthy. + +### Slow Responses + +**Symptom**: Response times > 5 seconds + +**Possible causes**: + +1. **Large result set**: Too many entities match the filters +2. **No filters applied**: Returning all entities in the system + +**Resolution**: + +- Use more specific filters (status, kind, owner, entityName) +- Reduce page size +- Use the `owner` filter to scope results to specific teams + +## Related Documentation + +- [Entity Aggregation](./aggregation.md) - Parent aggregation endpoint that shows summary counts +- [Thresholds](./thresholds.md) - How threshold evaluation determines success/warning/error status +- [Metric Providers](./providers.md) - How metrics are collected and stored diff --git a/workspaces/scorecard/plugins/scorecard-backend/migrations/20260217152637_add_entity_metadata_columns.js b/workspaces/scorecard/plugins/scorecard-backend/migrations/20260217152637_add_entity_metadata_columns.js new file mode 100644 index 0000000000..f3f26b3b9a --- /dev/null +++ b/workspaces/scorecard/plugins/scorecard-backend/migrations/20260217152637_add_entity_metadata_columns.js @@ -0,0 +1,33 @@ +/* + * Copyright Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +exports.up = async function up(knex) { + await knex.schema.alterTable('metric_values', table => { + // Add entity_kind column (e.g., "Component", "API", "System") + table.string('entity_kind', 255).nullable(); + + // Add entity_owner column (stores the owner entity ref) + table.string('entity_owner', 255).nullable(); + }); +}; + +exports.down = async function down(knex) { + await knex.schema.alterTable('metric_values', table => { + // Drop columns + table.dropColumn('entity_kind'); + table.dropColumn('entity_owner'); + }); +}; diff --git a/workspaces/scorecard/plugins/scorecard-backend/src/database/DatabaseMetricValues.test.ts b/workspaces/scorecard/plugins/scorecard-backend/src/database/DatabaseMetricValues.test.ts index 099d0f0334..87d789c584 100644 --- a/workspaces/scorecard/plugins/scorecard-backend/src/database/DatabaseMetricValues.test.ts +++ b/workspaces/scorecard/plugins/scorecard-backend/src/database/DatabaseMetricValues.test.ts @@ -258,4 +258,721 @@ describe('DatabaseMetricValues', () => { }, ); }); + + describe('readEntityMetricsByStatus', () => { + it.each(databases.eachSupportedId())( + 'should return paginated entity metrics filtered by status - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const baseTime = new Date('2023-01-01T00:00:00Z'); + const laterTime = new Date('2023-01-01T01:00:00Z'); + + // Insert test data with different statuses + await client('metric_values').insert([ + // Older value for service1 - should be ignored + { + catalog_entity_ref: 'component:default/service1', + metric_id: 'github.metric1', + value: 999, + timestamp: baseTime, + status: 'success', + }, + { + catalog_entity_ref: 'component:default/service1', + metric_id: 'github.metric1', + value: 10, + timestamp: laterTime, + status: 'error', + }, + { + catalog_entity_ref: 'component:default/service2', + metric_id: 'github.metric1', + value: 5, + timestamp: laterTime, + status: 'success', + }, + { + catalog_entity_ref: 'component:default/service3', + metric_id: 'github.metric1', + value: 15, + timestamp: laterTime, + status: 'error', + }, + { + catalog_entity_ref: 'component:default/service4', + metric_id: 'github.metric1', + value: 3, + timestamp: laterTime, + status: 'warning', + }, + ]); + + const result = await db.readEntityMetricsByStatus('github.metric1', { + status: 'error', + pagination: { limit: 10, offset: 0 }, + }); + + // Should return 2 entities with error status + expect(result.rows).toHaveLength(2); + expect(result.total).toBe(2); + + // Check that both are error status + expect(result.rows[0].status).toBe('error'); + expect(result.rows[1].status).toBe('error'); + + // Verify it's the latest values (not the old one for service1) + const service1Result = result.rows.find( + r => r.catalog_entity_ref === 'component:default/service1', + ); + expect(service1Result?.value).toBe(10); // Not 999 from older entry + }, + ); + + it.each(databases.eachSupportedId())( + 'should return all statuses when no filter provided - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const timestamp = new Date('2023-01-01T00:00:00Z'); + + await client('metric_values').insert([ + { + catalog_entity_ref: 'component:default/service1', + metric_id: 'github.metric1', + value: 10, + timestamp, + status: 'error', + }, + { + catalog_entity_ref: 'component:default/service2', + metric_id: 'github.metric1', + value: 5, + timestamp, + status: 'success', + }, + { + catalog_entity_ref: 'component:default/service3', + metric_id: 'github.metric1', + value: 15, + timestamp, + status: 'warning', + }, + ]); + + const result = await db.readEntityMetricsByStatus('github.metric1', { + pagination: { limit: 10, offset: 0 }, + }); + + expect(result.rows).toHaveLength(3); + expect(result.total).toBe(3); + }, + ); + + it.each(databases.eachSupportedId())( + 'should handle pagination correctly - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const timestamp = new Date('2023-01-01T00:00:00Z'); + + // Insert 5 entities with same status + await client('metric_values').insert([ + { + catalog_entity_ref: 'component:default/service1', + metric_id: 'github.metric1', + value: 1, + timestamp, + status: 'error', + }, + { + catalog_entity_ref: 'component:default/service2', + metric_id: 'github.metric1', + value: 2, + timestamp, + status: 'error', + }, + { + catalog_entity_ref: 'component:default/service3', + metric_id: 'github.metric1', + value: 3, + timestamp, + status: 'error', + }, + { + catalog_entity_ref: 'component:default/service4', + metric_id: 'github.metric1', + value: 4, + timestamp, + status: 'error', + }, + { + catalog_entity_ref: 'component:default/service5', + metric_id: 'github.metric1', + value: 5, + timestamp, + status: 'error', + }, + ]); + + // Page 1: limit 2 + const page1 = await db.readEntityMetricsByStatus('github.metric1', { + status: 'error', + pagination: { limit: 2, offset: 0 }, + }); + + expect(page1.rows).toHaveLength(2); + expect(page1.total).toBe(5); + + // Page 2: limit 2, offset 2 + const page2 = await db.readEntityMetricsByStatus('github.metric1', { + status: 'error', + pagination: { limit: 2, offset: 2 }, + }); + + expect(page2.rows).toHaveLength(2); + expect(page2.total).toBe(5); // Total should stay the same + + // Page 3: limit 2, offset 4 + const page3 = await db.readEntityMetricsByStatus('github.metric1', { + status: 'error', + pagination: { limit: 2, offset: 4 }, + }); + + expect(page3.rows).toHaveLength(1); // Only 1 left on page 3 + expect(page3.total).toBe(5); + }, + ); + + it.each(databases.eachSupportedId())( + 'should return empty result when database has no rows for the metric - %p', + async databaseId => { + const { db } = await createDatabase(databaseId); + + const result = await db.readEntityMetricsByStatus('github.metric1', { + status: 'error', + pagination: { limit: 10, offset: 0 }, + }); + + expect(result.rows).toHaveLength(0); + expect(result.total).toBe(0); + }, + ); + + it.each(databases.eachSupportedId())( + 'should filter by entity kind - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const timestamp = new Date('2023-01-01T00:00:00Z'); + + // Insert entities with different kinds + await client('metric_values').insert([ + { + catalog_entity_ref: 'component:default/service1', + metric_id: 'github.metric1', + value: 10, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + { + catalog_entity_ref: 'api:default/api1', + metric_id: 'github.metric1', + value: 5, + timestamp, + status: 'error', + entity_kind: 'API', + entity_owner: 'team:default/platform', + }, + { + catalog_entity_ref: 'component:default/service2', + metric_id: 'github.metric1', + value: 15, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/backend', + }, + ]); + + const result = await db.readEntityMetricsByStatus('github.metric1', { + status: 'error', + entityKind: 'Component', // Filter by kind + pagination: { limit: 10, offset: 0 }, + }); + + // Should only return Component entities + expect(result.rows).toHaveLength(2); + expect(result.total).toBe(2); + expect(result.rows[0].entity_kind).toBe('Component'); + expect(result.rows[1].entity_kind).toBe('Component'); + }, + ); + + it.each(databases.eachSupportedId())( + 'should filter by entity owner - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const timestamp = new Date('2023-01-01T00:00:00Z'); + + // Insert entities with different owners + await client('metric_values').insert([ + { + catalog_entity_ref: 'component:default/service1', + metric_id: 'github.metric1', + value: 10, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + { + catalog_entity_ref: 'component:default/service2', + metric_id: 'github.metric1', + value: 5, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/backend', + }, + { + catalog_entity_ref: 'component:default/service3', + metric_id: 'github.metric1', + value: 15, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + ]); + + const result = await db.readEntityMetricsByStatus('github.metric1', { + status: 'error', + entityOwner: ['team:default/platform'], // Filter by owner + pagination: { limit: 10, offset: 0 }, + }); + + // Should only return entities owned by team:default/platform + expect(result.rows).toHaveLength(2); + expect(result.total).toBe(2); + expect(result.rows[0].entity_owner).toBe('team:default/platform'); + expect(result.rows[1].entity_owner).toBe('team:default/platform'); + }, + ); + + it.each(databases.eachSupportedId())( + 'should filter by status, kind, and owner together - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const timestamp = new Date('2023-01-01T00:00:00Z'); + + // Insert diverse test data + await client('metric_values').insert([ + { + catalog_entity_ref: 'component:default/service1', + metric_id: 'github.metric1', + value: 10, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + { + catalog_entity_ref: 'api:default/api1', + metric_id: 'github.metric1', + value: 5, + timestamp, + status: 'error', + entity_kind: 'API', + entity_owner: 'team:default/platform', + }, + { + catalog_entity_ref: 'component:default/service2', + metric_id: 'github.metric1', + value: 15, + timestamp, + status: 'warning', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + { + catalog_entity_ref: 'component:default/service3', + metric_id: 'github.metric1', + value: 20, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/backend', + }, + ]); + + const result = await db.readEntityMetricsByStatus('github.metric1', { + status: 'error', // Only error status + entityKind: 'Component', // Only Component kind + entityOwner: ['team:default/platform'], // Only platform team + pagination: { limit: 10, offset: 0 }, + }); + + // Should only return service1 (Component, error, platform) + expect(result.rows).toHaveLength(1); + expect(result.total).toBe(1); + expect(result.rows[0].catalog_entity_ref).toBe( + 'component:default/service1', + ); + expect(result.rows[0].status).toBe('error'); + expect(result.rows[0].entity_kind).toBe('Component'); + expect(result.rows[0].entity_owner).toBe('team:default/platform'); + }, + ); + + it.each(databases.eachSupportedId())( + 'should work without pagination (fetch all) - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const timestamp = new Date('2023-01-01T00:00:00Z'); + + await client('metric_values').insert([ + { + catalog_entity_ref: 'component:default/service1', + metric_id: 'github.metric1', + value: 10, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + { + catalog_entity_ref: 'component:default/service2', + metric_id: 'github.metric1', + value: 5, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + { + catalog_entity_ref: 'component:default/service3', + metric_id: 'github.metric1', + value: 15, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + ]); + + // No pagination parameter - should return all + const result = await db.readEntityMetricsByStatus('github.metric1', { + status: 'error', + }); + + expect(result.rows).toHaveLength(3); + expect(result.total).toBe(3); + }, + ); + + it.each(databases.eachSupportedId())( + 'should handle null entity_kind and entity_owner - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const timestamp = new Date('2023-01-01T00:00:00Z'); + + // Insert entity with null kind/owner (legacy data) + await client('metric_values').insert([ + { + catalog_entity_ref: 'component:default/service1', + metric_id: 'github.metric1', + value: 10, + timestamp, + status: 'error', + entity_kind: null, + entity_owner: null, + }, + { + catalog_entity_ref: 'component:default/service2', + metric_id: 'github.metric1', + value: 5, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + ]); + + // Should return both when no filters + const result = await db.readEntityMetricsByStatus('github.metric1', { + status: 'error', + pagination: { limit: 10, offset: 0 }, + }); + + expect(result.rows).toHaveLength(2); + expect(result.total).toBe(2); + + // Should only return service2 when filtering by kind + const filteredResult = await db.readEntityMetricsByStatus( + 'github.metric1', + { + status: 'error', + entityKind: 'Component', + pagination: { limit: 10, offset: 0 }, + }, + ); + + expect(filteredResult.rows).toHaveLength(1); + expect(filteredResult.rows[0].catalog_entity_ref).toBe( + 'component:default/service2', + ); + }, + ); + + it.each(databases.eachSupportedId())( + 'should return all rows when no owner filter is applied - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const timestamp = new Date('2023-01-01T00:00:00Z'); + + await client('metric_values').insert([ + { + catalog_entity_ref: 'component:default/service1', + metric_id: 'github.metric1', + value: 10, + timestamp, + status: 'success', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + { + catalog_entity_ref: 'component:default/service2', + metric_id: 'github.metric1', + value: 5, + timestamp, + status: 'warning', + entity_kind: 'Component', + entity_owner: 'team:default/backend', + }, + ]); + + // No owner filter — all rows for the metric are returned. + // Per-row authorization is enforced downstream by catalog.getEntitiesByRefs. + const result = await db.readEntityMetricsByStatus('github.metric1', { + pagination: { limit: 10, offset: 0 }, + }); + + expect(result.rows).toHaveLength(2); + expect(result.total).toBe(2); + }, + ); + + it.each(databases.eachSupportedId())( + 'should filter by multiple owner refs - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const timestamp = new Date('2023-01-01T00:00:00Z'); + + await client('metric_values').insert([ + { + catalog_entity_ref: 'component:default/service1', + metric_id: 'github.metric1', + value: 10, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + { + catalog_entity_ref: 'component:default/service2', + metric_id: 'github.metric1', + value: 5, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/backend', + }, + { + catalog_entity_ref: 'component:default/service3', + metric_id: 'github.metric1', + value: 8, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/other', + }, + ]); + + // Passing two owners returns only those two teams' entities. + const result = await db.readEntityMetricsByStatus('github.metric1', { + status: 'error', + entityOwner: ['team:default/platform', 'team:default/backend'], + pagination: { limit: 10, offset: 0 }, + }); + + expect(result.rows).toHaveLength(2); + expect(result.total).toBe(2); + expect( + result.rows + .map(r => r.entity_owner) + .filter((o): o is string => o !== null) + .sort((a, b) => a.localeCompare(b)), + ).toEqual(['team:default/backend', 'team:default/platform']); + }, + ); + + it.each(databases.eachSupportedId())( + 'should filter by entityName substring via catalog_entity_ref LIKE - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const timestamp = new Date('2023-01-01T00:00:00Z'); + + await client('metric_values').insert([ + { + catalog_entity_ref: 'component:default/my-service', + metric_id: 'github.metric1', + value: 10, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + { + catalog_entity_ref: 'component:default/service-api', + metric_id: 'github.metric1', + value: 5, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + { + catalog_entity_ref: 'component:default/unrelated', + metric_id: 'github.metric1', + value: 15, + timestamp, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + ]); + + // 'service' should match 'my-service' and 'service-api' but not 'unrelated' + const result = await db.readEntityMetricsByStatus('github.metric1', { + entityName: 'service', + pagination: { limit: 10, offset: 0 }, + }); + + expect(result.rows).toHaveLength(2); + expect(result.total).toBe(2); + expect( + result.rows + .map(r => r.catalog_entity_ref) + .sort((a, b) => a.localeCompare(b)), + ).toEqual([ + 'component:default/my-service', + 'component:default/service-api', + ]); + }, + ); + + it.each(databases.eachSupportedId())( + 'should sort by catalog_entity_ref ascending when sortBy=entityName - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const timestamp = new Date('2023-01-01T00:00:00Z'); + + await client('metric_values').insert([ + { + catalog_entity_ref: 'component:default/service-c', + metric_id: 'github.metric1', + value: 1, + timestamp, + status: 'success', + }, + { + catalog_entity_ref: 'component:default/service-a', + metric_id: 'github.metric1', + value: 2, + timestamp, + status: 'success', + }, + { + catalog_entity_ref: 'component:default/service-b', + metric_id: 'github.metric1', + value: 3, + timestamp, + status: 'success', + }, + ]); + + const result = await db.readEntityMetricsByStatus('github.metric1', { + sortBy: 'entityName', + sortOrder: 'asc', + pagination: { limit: 10, offset: 0 }, + }); + + expect(result.rows).toHaveLength(3); + expect(result.rows[0].catalog_entity_ref).toBe( + 'component:default/service-a', + ); + expect(result.rows[1].catalog_entity_ref).toBe( + 'component:default/service-b', + ); + expect(result.rows[2].catalog_entity_ref).toBe( + 'component:default/service-c', + ); + }, + ); + + it.each(databases.eachSupportedId())( + 'should sort by value descending with nulls last when sortBy=metricValue - %p', + async databaseId => { + const { client, db } = await createDatabase(databaseId); + + const timestamp = new Date('2023-01-01T00:00:00Z'); + + await client('metric_values').insert([ + { + catalog_entity_ref: 'component:default/service-a', + metric_id: 'github.metric1', + value: null, + timestamp, + status: 'error', + }, + { + catalog_entity_ref: 'component:default/service-b', + metric_id: 'github.metric1', + value: 5, + timestamp, + status: 'error', + }, + { + catalog_entity_ref: 'component:default/service-c', + metric_id: 'github.metric1', + value: 15, + timestamp, + status: 'error', + }, + ]); + + const result = await db.readEntityMetricsByStatus('github.metric1', { + sortBy: 'metricValue', + sortOrder: 'desc', + pagination: { limit: 10, offset: 0 }, + }); + + expect(result.rows).toHaveLength(3); + expect(result.rows[0].value).toBe(15); + expect(result.rows[1].value).toBe(5); + expect(result.rows[2].value).toBeNull(); // null sorted last + }, + ); + }); }); diff --git a/workspaces/scorecard/plugins/scorecard-backend/src/database/DatabaseMetricValues.ts b/workspaces/scorecard/plugins/scorecard-backend/src/database/DatabaseMetricValues.ts index 5b40fbee15..54846b8ce4 100644 --- a/workspaces/scorecard/plugins/scorecard-backend/src/database/DatabaseMetricValues.ts +++ b/workspaces/scorecard/plugins/scorecard-backend/src/database/DatabaseMetricValues.ts @@ -21,6 +21,16 @@ import { DbAggregatedMetric, } from './types'; +type ReadEntityMetricsByStatusOptions = { + status?: 'success' | 'warning' | 'error'; + entityName?: string; + entityKind?: string; + entityOwner?: string[]; + sortBy?: 'entityName' | 'owner' | 'entityKind' | 'timestamp' | 'metricValue'; + sortOrder?: 'asc' | 'desc'; + pagination?: { limit: number; offset: number }; +}; + export class DatabaseMetricValues { private readonly tableName = 'metric_values'; @@ -129,4 +139,72 @@ export class DatabaseMetricValues { return undefined; } + + /** + * Fetch entity metric values filtered by status with pagination + * Returns both the paginated rows and total count for pagination + */ + async readEntityMetricsByStatus( + metric_id: string, + options: ReadEntityMetricsByStatusOptions, + ): Promise<{ rows: DbMetricValue[]; total: number }> { + const latestIdsSubquery = this.dbClient(this.tableName) + .max('id') + .where('metric_id', metric_id) + .groupBy('metric_id', 'catalog_entity_ref'); + + const query = this.dbClient(this.tableName) + .select('*') + .select(this.dbClient.raw('COUNT(*) OVER() as total_count')) + .whereIn('id', latestIdsSubquery) + .where('metric_id', metric_id); + + const sortColumnMap: Record = { + entityName: 'catalog_entity_ref', + owner: 'entity_owner', + entityKind: 'entity_kind', + timestamp: 'timestamp', + metricValue: 'value', + }; + + const column = + (options.sortBy && sortColumnMap[options.sortBy]) ?? 'timestamp'; + const direction = options.sortOrder === 'asc' ? 'asc' : 'desc'; + + // Nulls last for metricValue (value can be null) + if (options.sortBy === 'metricValue') { + query.orderByRaw( + `value IS NULL, CAST(CAST(value AS TEXT) AS REAL) ${direction}`, + ); + } else { + query.orderBy(column, direction); + } + + if (options.status) { + query.where('status', options.status); + } + + if (options.entityName) { + query.whereRaw('LOWER(catalog_entity_ref) LIKE LOWER(?)', [ + `%${options.entityName}%`, + ]); + } + + if (options.entityKind) { + query.whereRaw('LOWER(entity_kind) = LOWER(?)', [options.entityKind]); + } + + if (options.entityOwner && options.entityOwner.length > 0) { + query.whereIn('entity_owner', options.entityOwner); + } + + if (options.pagination) { + query.limit(options.pagination.limit).offset(options.pagination.offset); + } + + const rows = await query; + const total = rows.length > 0 ? Number(rows[0].total_count) : 0; + + return { rows, total }; + } } diff --git a/workspaces/scorecard/plugins/scorecard-backend/src/database/types.ts b/workspaces/scorecard/plugins/scorecard-backend/src/database/types.ts index 62c5576c8b..aca204c46a 100644 --- a/workspaces/scorecard/plugins/scorecard-backend/src/database/types.ts +++ b/workspaces/scorecard/plugins/scorecard-backend/src/database/types.ts @@ -25,6 +25,8 @@ export type DbMetricValueCreate = { timestamp: Date; error_message?: string; status?: DbMetricValueStatus; + entity_kind?: string; + entity_owner?: string; }; export type DbMetricValue = { @@ -35,6 +37,8 @@ export type DbMetricValue = { timestamp: Date; error_message: string | null; status: DbMetricValueStatus | null; + entity_kind: string | null; + entity_owner: string | null; }; export type DbAggregatedMetric = { diff --git a/workspaces/scorecard/plugins/scorecard-backend/src/plugin.ts b/workspaces/scorecard/plugins/scorecard-backend/src/plugin.ts index 39d1840c34..10ee3abe63 100644 --- a/workspaces/scorecard/plugins/scorecard-backend/src/plugin.ts +++ b/workspaces/scorecard/plugins/scorecard-backend/src/plugin.ts @@ -98,6 +98,7 @@ export const scorecardPlugin = createBackendPlugin({ auth, registry: metricProvidersRegistry, database: dbMetricValues, + logger: logger, }); Scheduler.create({ @@ -118,6 +119,7 @@ export const scorecardPlugin = createBackendPlugin({ catalog, httpAuth, permissions, + logger, }), ); }, diff --git a/workspaces/scorecard/plugins/scorecard-backend/src/scheduler/tasks/PullMetricsByProviderTask.test.ts b/workspaces/scorecard/plugins/scorecard-backend/src/scheduler/tasks/PullMetricsByProviderTask.test.ts index 65895c2f03..926e030732 100644 --- a/workspaces/scorecard/plugins/scorecard-backend/src/scheduler/tasks/PullMetricsByProviderTask.test.ts +++ b/workspaces/scorecard/plugins/scorecard-backend/src/scheduler/tasks/PullMetricsByProviderTask.test.ts @@ -267,6 +267,8 @@ describe('PullMetricsByProviderTask', () => { const metricValues = [ { catalog_entity_ref: 'component:default/test1', + entity_kind: 'Component', + entity_owner: undefined, metric_id: 'github.test_metric', timestamp: new Date('2024-01-15T12:00:00.000Z'), value: 42, @@ -275,6 +277,8 @@ describe('PullMetricsByProviderTask', () => { { catalog_entity_ref: 'component:default/test2', metric_id: 'github.test_metric', + entity_kind: 'Component', + entity_owner: undefined, status: 'success', timestamp: new Date('2024-01-15T12:00:00.000Z'), value: 42, diff --git a/workspaces/scorecard/plugins/scorecard-backend/src/scheduler/tasks/PullMetricsByProviderTask.ts b/workspaces/scorecard/plugins/scorecard-backend/src/scheduler/tasks/PullMetricsByProviderTask.ts index 230b5b86ad..ce3399414b 100644 --- a/workspaces/scorecard/plugins/scorecard-backend/src/scheduler/tasks/PullMetricsByProviderTask.ts +++ b/workspaces/scorecard/plugins/scorecard-backend/src/scheduler/tasks/PullMetricsByProviderTask.ts @@ -161,6 +161,8 @@ export class PullMetricsByProviderTask implements SchedulerTask { value, timestamp: new Date(), status, + entity_kind: entity.kind, + entity_owner: normalizeOwner(entity?.spec?.owner), } as DbMetricValueCreate; } catch (error) { return { @@ -170,6 +172,8 @@ export class PullMetricsByProviderTask implements SchedulerTask { timestamp: new Date(), error_message: error instanceof Error ? error.message : String(error), + entity_kind: entity.kind, + entity_owner: normalizeOwner(entity?.spec?.owner), } as DbMetricValueCreate; } }), @@ -196,3 +200,9 @@ export class PullMetricsByProviderTask implements SchedulerTask { } } } + +function normalizeOwner(owner: unknown): string | undefined { + if (!owner) return undefined; + if (typeof owner === 'string') return owner; + return JSON.stringify(owner); +} diff --git a/workspaces/scorecard/plugins/scorecard-backend/src/service/CatalogMetricService.test.ts b/workspaces/scorecard/plugins/scorecard-backend/src/service/CatalogMetricService.test.ts index abfdae96b7..42ae423b3a 100644 --- a/workspaces/scorecard/plugins/scorecard-backend/src/service/CatalogMetricService.test.ts +++ b/workspaces/scorecard/plugins/scorecard-backend/src/service/CatalogMetricService.test.ts @@ -25,7 +25,11 @@ import { mockDatabaseMetricValues, } from '../../__fixtures__/mockDatabaseMetricValues'; import { buildMockMetricProvidersRegistry } from '../../__fixtures__/mockMetricProvidersRegistry'; -import { AuthService } from '@backstage/backend-plugin-api'; +import { + AuthService, + BackstageCredentials, + LoggerService, +} from '@backstage/backend-plugin-api'; import * as permissionUtils from '../permissions/permissionUtils'; import { AggregatedMetric, @@ -88,6 +92,7 @@ describe('CatalogMetricService', () => { let mockedAuth: jest.Mocked; let mockedRegistry: jest.Mocked; let mockedDatabase: jest.Mocked; + let mockedLogger: jest.Mocked; let service: CatalogMetricService; let toAggregatedMetricSpy: jest.SpyInstance; @@ -97,6 +102,10 @@ describe('CatalogMetricService', () => { mockedCatalog = catalogServiceMock.mock(); mockedCatalog.getEntityByRef.mockResolvedValue(mockEntity); + mockedCatalog.getEntitiesByRefs = jest + .fn() + .mockResolvedValue({ items: [] }); + mockedAuth = mockServices.auth.mock({ getOwnServiceCredentials: jest.fn().mockResolvedValue({ token: 'test-token', @@ -113,6 +122,8 @@ describe('CatalogMetricService', () => { aggregatedMetric, }); + mockedLogger = mockServices.logger.mock(); + (permissionUtils.filterAuthorizedMetrics as jest.Mock).mockReturnValue([ { id: 'github.important_metric' }, ]); @@ -133,6 +144,7 @@ describe('CatalogMetricService', () => { auth: mockedAuth, registry: mockedRegistry, database: mockedDatabase, + logger: mockedLogger, }); jest.useFakeTimers(); @@ -506,4 +518,497 @@ describe('CatalogMetricService', () => { }); }); }); + + describe('getEntityMetricDetails', () => { + const mockMetricRows: DbMetricValue[] = [ + { + id: 1, + catalog_entity_ref: 'component:default/service-a', + metric_id: 'github.important_metric', + value: 15, + timestamp: new Date('2024-01-15T12:00:00.000Z'), + error_message: null, + status: 'error', + entity_kind: 'Component', + entity_owner: 'team:default/platform', + }, + { + id: 2, + catalog_entity_ref: 'component:default/service-b', + metric_id: 'github.important_metric', + value: 8, + timestamp: new Date('2024-01-15T11:00:00.000Z'), + error_message: null, + status: 'warning', + entity_kind: 'Component', + entity_owner: 'team:default/backend', + }, + { + id: 3, + catalog_entity_ref: 'component:default/service-c', + metric_id: 'github.important_metric', + value: 3, + timestamp: new Date('2024-01-15T10:00:00.000Z'), + error_message: null, + status: 'success', + entity_kind: 'API', + entity_owner: 'team:default/platform', + }, + ]; + + const mockEntities = { + items: [ + new MockEntityBuilder() + .withKind('Component') + .withMetadata({ name: 'service-a', namespace: 'default' }) + .withSpec({ owner: 'team:default/platform' }) + .build(), + new MockEntityBuilder() + .withKind('Component') + .withMetadata({ name: 'service-b', namespace: 'default' }) + .withSpec({ owner: 'team:default/backend' }) + .build(), + new MockEntityBuilder() + .withKind('API') + .withMetadata({ name: 'service-c', namespace: 'default' }) + .withSpec({ owner: 'team:default/platform' }) + .build(), + ], + }; + + let mockCredentials: BackstageCredentials; + + beforeEach(() => { + mockedDatabase.readEntityMetricsByStatus.mockResolvedValue({ + rows: mockMetricRows, + total: 3, + }); + + mockedCatalog.getEntitiesByRefs.mockReset(); + mockedCatalog.getEntitiesByRefs.mockResolvedValue(mockEntities); + mockCredentials = {} as BackstageCredentials; + }); + + it('should fetch entity metrics with default options', async () => { + const result = await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + page: 1, + limit: 10, + }, + ); + + expect(result.metricId).toBe('github.important_metric'); + expect(result.entities).toHaveLength(3); + expect(result.pagination).toEqual({ + page: 1, + pageSize: 10, + total: 3, + totalPages: 1, + }); + }); + + it('should enrich entities with catalog metadata', async () => { + const result = await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + page: 1, + limit: 10, + }, + ); + + expect(result.entities[0]).toEqual({ + entityRef: 'component:default/service-a', + entityName: 'service-a', + entityKind: 'Component', + owner: 'team:default/platform', + metricValue: 15, + timestamp: '2024-01-15T12:00:00.000Z', + status: 'error', + }); + }); + + it('should call database with correct pagination parameters', async () => { + await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + page: 2, + limit: 5, + }, + ); + + expect(mockedDatabase.readEntityMetricsByStatus).toHaveBeenCalledWith( + 'github.important_metric', + { pagination: { limit: 5, offset: 5 } }, + ); + }); + + it('should filter by status at database level', async () => { + await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + status: 'error', + page: 1, + limit: 10, + }, + ); + + expect(mockedDatabase.readEntityMetricsByStatus).toHaveBeenCalledWith( + 'github.important_metric', + { status: 'error', pagination: { limit: 10, offset: 0 } }, + ); + }); + + it('should filter by kind at database level', async () => { + await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + kind: 'Component', + page: 1, + limit: 10, + }, + ); + + expect(mockedDatabase.readEntityMetricsByStatus).toHaveBeenCalledWith( + 'github.important_metric', + { entityKind: 'Component', pagination: { limit: 10, offset: 0 } }, + ); + }); + + it('should filter by owner at database level', async () => { + await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + owner: ['team:default/platform'], + page: 1, + limit: 10, + }, + ); + + expect(mockedDatabase.readEntityMetricsByStatus).toHaveBeenCalledWith( + 'github.important_metric', + { + entityOwner: ['team:default/platform'], + pagination: { limit: 10, offset: 0 }, + }, + ); + }); + + it('should filter by entityName at database level', async () => { + mockedDatabase.readEntityMetricsByStatus.mockResolvedValueOnce({ + rows: [mockMetricRows[0]], + total: 1, + }); + + const result = await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + entityName: 'service-a', + page: 1, + limit: 10, + }, + ); + + expect(mockedDatabase.readEntityMetricsByStatus).toHaveBeenCalledWith( + 'github.important_metric', + { entityName: 'service-a', pagination: { limit: 10, offset: 0 } }, + ); + + expect(result.entities).toHaveLength(1); + expect(result.entities[0].entityName).toBe('service-a'); + expect(result.pagination.total).toBe(1); + }); + + it('should pass entityName to database for filtering', async () => { + await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + entityName: 'SERVICE', + page: 1, + limit: 10, + }, + ); + + expect(mockedDatabase.readEntityMetricsByStatus).toHaveBeenCalledWith( + 'github.important_metric', + { entityName: 'SERVICE', pagination: { limit: 10, offset: 0 } }, + ); + }); + + it('should sort by entityName ascending', async () => { + await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + sortBy: 'entityName', + sortOrder: 'asc', + page: 1, + limit: 10, + }, + ); + + expect(mockedDatabase.readEntityMetricsByStatus).toHaveBeenCalledWith( + 'github.important_metric', + { + sortBy: 'entityName', + sortOrder: 'asc', + pagination: { limit: 10, offset: 0 }, + }, + ); + }); + + it('should sort by metricValue descending', async () => { + await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + sortBy: 'metricValue', + sortOrder: 'desc', + page: 1, + limit: 10, + }, + ); + + expect(mockedDatabase.readEntityMetricsByStatus).toHaveBeenCalledWith( + 'github.important_metric', + { + sortBy: 'metricValue', + sortOrder: 'desc', + pagination: { limit: 10, offset: 0 }, + }, + ); + }); + + it('should sort by timestamp descending by default', async () => { + await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + page: 1, + limit: 10, + }, + ); + + // When no sortBy/sortOrder are supplied the DB defaults to timestamp desc + expect(mockedDatabase.readEntityMetricsByStatus).toHaveBeenCalledWith( + 'github.important_metric', + { pagination: { limit: 10, offset: 0 } }, + ); + }); + + it('should handle null metric values in sorting', async () => { + mockedDatabase.readEntityMetricsByStatus.mockResolvedValue({ + rows: [{ ...mockMetricRows[0], value: null }, mockMetricRows[1]], + total: 2, + }); + + await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + sortBy: 'metricValue', + sortOrder: 'desc', + page: 1, + limit: 10, + }, + ); + + // Null handling (nulls-last) is delegated to the DB via orderByRaw + expect(mockedDatabase.readEntityMetricsByStatus).toHaveBeenCalledWith( + 'github.important_metric', + { + sortBy: 'metricValue', + sortOrder: 'desc', + pagination: { limit: 10, offset: 0 }, + }, + ); + }); + + it('should batch-fetch entities using getEntitiesByRefs', async () => { + await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + page: 1, + limit: 10, + }, + ); + + expect(mockedCatalog.getEntitiesByRefs).toHaveBeenCalledWith( + { + entityRefs: [ + 'component:default/service-a', + 'component:default/service-b', + 'component:default/service-c', + ], + fields: ['kind', 'metadata', 'spec'], + }, + { credentials: expect.any(Object) }, + ); + }); + + it('should exclude entities that the catalog returns null for (unauthorized)', async () => { + // service-b (index 1) returns undefined/null — catalog enforces no access + mockedCatalog.getEntitiesByRefs.mockResolvedValue({ + items: [mockEntities.items[0], undefined, mockEntities.items[2]], + }); + + const result = await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + page: 1, + limit: 10, + }, + ); + + // service-b is filtered out; only the two authorized entities are returned + expect(result.entities).toHaveLength(2); + expect(result.entities.map(e => e.entityRef)).not.toContain( + 'component:default/service-b', + ); + expect(result.entities[0].entityRef).toBe('component:default/service-a'); + expect(result.entities[1].entityRef).toBe('component:default/service-c'); + }); + + it('should handle catalog API failures by logging an error and not returning information from the database', async () => { + mockedCatalog.getEntitiesByRefs.mockRejectedValue( + new Error('Catalog API error'), + ); + + const result = await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + page: 1, + limit: 10, + }, + ); + + expect(mockedLogger.error).toHaveBeenCalledWith( + 'Failed to fetch entities from catalog', + expect.objectContaining({ error: expect.any(Error) }), + ); + + // When catalog is unavailable, do not bypass and instead log error + expect(result.entities).toHaveLength(0); + }); + + it('should pass null to database for unscoped query (avoids catalog enumeration)', async () => { + await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { page: 1, limit: 10 }, + ); + + expect(mockedDatabase.readEntityMetricsByStatus).toHaveBeenCalledWith( + 'github.important_metric', + { pagination: { limit: 10, offset: 0 } }, + ); + }); + + it('should use catalog.getEntitiesByRefs as the sole authorization gate for the unscoped path', async () => { + // Simulate catalog returning null for service-b (no access) and real entities for others + mockedCatalog.getEntitiesByRefs.mockResolvedValue({ + items: [mockEntities.items[0], undefined, mockEntities.items[2]], + }); + + const result = await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { page: 1, limit: 10 }, + ); + + // service-b should be filtered out because catalog returned null (unauthorized) + expect(result.entities).toHaveLength(2); + expect(result.entities.map(e => e.entityRef)).not.toContain( + 'component:default/service-b', + ); + }); + + it('should combine filters, sorting, and pagination', async () => { + mockedDatabase.readEntityMetricsByStatus.mockResolvedValue({ + rows: [mockMetricRows[0]], + total: 1, + }); + + const result = await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + status: 'error', + kind: 'Component', + owner: ['team:default/platform'], + sortBy: 'metricValue', + sortOrder: 'desc', + page: 1, + limit: 5, + }, + ); + + expect(mockedDatabase.readEntityMetricsByStatus).toHaveBeenCalledWith( + 'github.important_metric', + { + status: 'error', + entityKind: 'Component', + entityOwner: ['team:default/platform'], + sortBy: 'metricValue', + sortOrder: 'desc', + pagination: { limit: 5, offset: 0 }, + }, + ); + + expect(result.entities).toHaveLength(1); + expect(result.pagination.total).toBe(1); + }); + + it('should return empty results when no entities match', async () => { + mockedDatabase.readEntityMetricsByStatus.mockResolvedValue({ + rows: [], + total: 0, + }); + + const result = await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + page: 1, + limit: 10, + }, + ); + + expect(result.entities).toEqual([]); + expect(result.pagination).toEqual({ + page: 1, + pageSize: 10, + total: 0, + totalPages: 0, + }); + }); + + it('should include metric metadata in response', async () => { + const result = await service.getEntityMetricDetails( + 'github.important_metric', + mockCredentials, + { + page: 1, + limit: 10, + }, + ); + + expect(result.metricMetadata).toEqual({ + title: provider.getMetric().title, + description: provider.getMetric().description, + type: provider.getMetric().type, + }); + }); + }); }); diff --git a/workspaces/scorecard/plugins/scorecard-backend/src/service/CatalogMetricService.ts b/workspaces/scorecard/plugins/scorecard-backend/src/service/CatalogMetricService.ts index 1be8d6fd5d..81de477eb6 100644 --- a/workspaces/scorecard/plugins/scorecard-backend/src/service/CatalogMetricService.ts +++ b/workspaces/scorecard/plugins/scorecard-backend/src/service/CatalogMetricService.ts @@ -18,10 +18,16 @@ import { MetricResult, ThresholdConfig, AggregatedMetric, + EntityMetricDetailResponse, + EntityMetricDetail, } from '@red-hat-developer-hub/backstage-plugin-scorecard-common'; import { MetricProvidersRegistry } from '../providers/MetricProvidersRegistry'; import { NotFoundError, stringifyError } from '@backstage/errors'; -import { AuthService } from '@backstage/backend-plugin-api'; +import { + AuthService, + BackstageCredentials, + LoggerService, +} from '@backstage/backend-plugin-api'; import { filterAuthorizedMetrics } from '../permissions/permissionUtils'; import { PermissionCondition, @@ -32,12 +38,14 @@ import { CatalogService } from '@backstage/plugin-catalog-node'; import { DatabaseMetricValues } from '../database/DatabaseMetricValues'; import { mergeEntityAndProviderThresholds } from '../utils/mergeEntityAndProviderThresholds'; import { AggregatedMetricMapper } from './mappers'; +import { Entity } from '@backstage/catalog-model'; type CatalogMetricServiceOptions = { catalog: CatalogService; auth: AuthService; registry: MetricProvidersRegistry; database: DatabaseMetricValues; + logger: LoggerService; }; export type AggregatedMetricsByStatus = Record< @@ -46,6 +54,8 @@ export type AggregatedMetricsByStatus = Record< >; export class CatalogMetricService { + private readonly logger: LoggerService; + private readonly catalog: CatalogService; private readonly auth: AuthService; private readonly registry: MetricProvidersRegistry; @@ -56,6 +66,7 @@ export class CatalogMetricService { this.auth = options.auth; this.registry = options.registry; this.database = options.database; + this.logger = options.logger; } /** @@ -167,4 +178,130 @@ export class CatalogMetricService { return AggregatedMetricMapper.toAggregatedMetric(); } + + /** + * Get detailed entity metrics for drill-down with filtering, sorting, and pagination. + * + * Fetches individual entity metric values and enriches them with catalog metadata. + * Supports database-level filtering (status, owner, kind, entityName) and + * database-level sorting and pagination. Falls back to database values if catalog is unavailable. + * + * @param metricId - Metric ID to fetch (e.g., "github.open_prs") + * @param options - Query options for filtering, sorting, and pagination + * @param options.status - Filter by threshold status (database-level) + * @param options.owner - Filter by owner entity reference (database-level) + * @param options.kind - Filter by entity kind (database-level) + * @param options.entityName - Substring search against the entity ref `kind:namespace/name` (database-level) + * @param options.sortBy - Field to sort by (default: "timestamp") + * @param options.sortOrder - Sort direction: "asc" or "desc" (default: "desc") + * @param options.page - Page number (1-indexed) + * @param options.limit - Entities per page (max: 100) + * @returns Paginated entity metric details with metadata + */ + async getEntityMetricDetails( + metricId: string, + credentials: BackstageCredentials, + options: { + status?: 'success' | 'warning' | 'error'; + owner?: string[]; + kind?: string; + entityName?: string; + sortBy?: + | 'entityName' + | 'owner' + | 'entityKind' + | 'timestamp' + | 'metricValue'; + sortOrder?: 'asc' | 'desc'; + page: number; + limit: number; + }, + ): Promise { + const dbPagination = { + limit: options.limit, + offset: (options.page - 1) * options.limit, + }; + + // Fetch raw metric data from database + const { rows, total: dbTotal } = + await this.database.readEntityMetricsByStatus(metricId, { + status: options.status, + entityName: options.entityName, + entityKind: options.kind, + entityOwner: options.owner, + sortBy: options.sortBy, + sortOrder: options.sortOrder, + pagination: dbPagination, + }); + + // Get metric metadata + const metric = this.registry.getMetric(metricId); + + // Batch-fetch entities from catalog using user credentials. + // The catalog enforces catalog.entity.read permissions — entities the user + // cannot access are returned as null in response.items. + const entityRefsToFetch = rows.map(row => row.catalog_entity_ref); + const entityMap = new Map(); + + if (entityRefsToFetch.length > 0) { + try { + const response = await this.catalog.getEntitiesByRefs( + { + entityRefs: entityRefsToFetch, + fields: ['kind', 'metadata', 'spec'], + }, + { credentials }, + ); + + // Build map of ref -> entity (null entries = unauthorized, not added to map) + entityRefsToFetch.forEach((ref, index) => { + const entity = response.items[index]; + if (entity) { + entityMap.set(ref, entity); + } + }); + } catch (error) { + // Catalog unavailable — entityMap stays empty, so the filter below removes all rows. + // Fail secure: authorization cannot be confirmed without the catalog, so no results + // are returned rather than risking exposure of unauthorized entity metric data. + this.logger.error('Failed to fetch entities from catalog', { error }); + } + } + + // Only include entities the catalog confirmed the user can access. + // Unauthorized entities are returned as null by getEntitiesByRefs and are never added + // to entityMap, so they are silently excluded here. + const enrichedEntities: EntityMetricDetail[] = rows + .filter(row => entityMap.has(row.catalog_entity_ref)) + .map(row => { + const entity = entityMap.get(row.catalog_entity_ref); + return { + entityRef: row.catalog_entity_ref, + entityName: entity?.metadata?.name ?? 'Unknown', + entityKind: entity?.kind ?? row.entity_kind ?? 'Unknown', + owner: + (entity?.spec?.owner as string) ?? row.entity_owner ?? 'Unknown', + metricValue: row.value, + timestamp: new Date(row.timestamp).toISOString(), + status: row.status ?? 'error', // default to error if status is null + }; + }); + + // Format and return response + return { + metricId: metric.id, + metricMetadata: { + title: metric.title, + description: metric.description, + type: metric.type, + }, + entities: enrichedEntities, + pagination: { + page: options.page, + pageSize: options.limit, + total: dbTotal, + totalPages: Math.ceil(dbTotal / options.limit), + }, + }; + } } diff --git a/workspaces/scorecard/plugins/scorecard-backend/src/service/router.test.ts b/workspaces/scorecard/plugins/scorecard-backend/src/service/router.test.ts index e46e561048..a4895aa497 100644 --- a/workspaces/scorecard/plugins/scorecard-backend/src/service/router.test.ts +++ b/workspaces/scorecard/plugins/scorecard-backend/src/service/router.test.ts @@ -40,7 +40,10 @@ import { AuthorizeResult, PolicyDecision, } from '@backstage/plugin-permission-common'; -import { PermissionsService } from '@backstage/backend-plugin-api'; +import { + BackstageCredentials, + PermissionsService, +} from '@backstage/backend-plugin-api'; import { mockDatabaseMetricValues } from '../../__fixtures__/mockDatabaseMetricValues'; jest.mock('../utils/getEntitiesOwnedByUser', () => ({ @@ -81,6 +84,7 @@ describe('createRouter', () => { let app: express.Express; let metricProvidersRegistry: MetricProvidersRegistry; let catalogMetricService: CatalogMetricService; + let mockLogger: ReturnType; let httpAuthMock: ServiceMock< import('@backstage/backend-plugin-api').HttpAuthService >; @@ -93,11 +97,13 @@ describe('createRouter', () => { beforeEach(async () => { metricProvidersRegistry = new MetricProvidersRegistry(); const catalog = catalogServiceMock.mock(); + mockLogger = mockServices.logger.mock(); catalogMetricService = new CatalogMetricService({ catalog, registry: metricProvidersRegistry, auth: mockServices.auth(), database: mockDatabaseMetricValues, + logger: mockLogger, }); permissionsMock.authorizeConditional.mockResolvedValue([ @@ -122,6 +128,7 @@ describe('createRouter', () => { catalog, httpAuth: httpAuthMock, permissions: permissionsMock, + logger: mockServices.logger.mock(), }); app = express(); app.use(router); @@ -546,6 +553,7 @@ describe('createRouter', () => { catalog: mockCatalog, httpAuth: httpAuthMock, permissions: permissionsMock, + logger: mockServices.logger.mock(), }); aggregationApp = express(); aggregationApp.use(router); @@ -725,4 +733,471 @@ describe('createRouter', () => { ); }); }); + + describe('GET /metrics/:metricId/catalog/aggregations/entities', () => { + const mockEntityMetricDetailResponse = { + metricId: 'github.open_prs', + metricMetadata: { + title: 'GitHub Open PRs', + description: 'Mock number description.', + type: 'number', + }, + entities: [ + { + entityRef: 'component:default/my-service', + entityName: 'my-service', + entityKind: 'Component', + owner: 'team:default/platform', + metricValue: 15, + timestamp: '2025-01-01T10:30:00.000Z', + status: 'error', + }, + { + entityRef: 'component:default/another-service', + entityName: 'another-service', + entityKind: 'Component', + owner: 'team:default/backend', + metricValue: 8, + timestamp: '2025-01-01T10:25:00.000Z', + status: 'warning', + }, + ], + pagination: { + page: 1, + pageSize: 10, + total: 2, + totalPages: 1, + }, + }; + + let drillDownApp: express.Express; + let getEntityMetricDetailsSpy: jest.SpyInstance; + let mockCredentials: BackstageCredentials; + + beforeEach(async () => { + const githubProvider = new MockNumberProvider( + 'github.open_prs', + 'github', + 'GitHub Open PRs', + ); + metricProvidersRegistry.register(githubProvider); + + const jiraProvider = new MockNumberProvider( + 'jira.open_issues', + 'jira', + 'Jira Open Issues', + ); + metricProvidersRegistry.register(jiraProvider); + + getEntityMetricDetailsSpy = jest + .spyOn(catalogMetricService, 'getEntityMetricDetails') + .mockResolvedValue(mockEntityMetricDetailResponse as any); + + const mockCatalog = catalogServiceMock.mock(); + const router = await createRouter({ + metricProvidersRegistry, + catalogMetricService, + catalog: mockCatalog, + httpAuth: httpAuthMock, + permissions: permissionsMock, + logger: mockServices.logger.mock(), + }); + + drillDownApp = express(); + drillDownApp.use(router); + drillDownApp.use(mockErrorHandler()); + mockCredentials = { + principal: { + userEntityRef: 'user:default/test-user', + }, + } as BackstageCredentials; + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should return entity metric details with default pagination', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities', + ); + + expect(response.status).toBe(200); + expect(response.body).toEqual(mockEntityMetricDetailResponse); + expect(getEntityMetricDetailsSpy).toHaveBeenCalledWith( + 'github.open_prs', + mockCredentials, + { + status: undefined, + owner: undefined, + kind: undefined, + entityName: undefined, + sortBy: undefined, + sortOrder: 'desc', + page: 1, + limit: 5, + }, + ); + }); + + it('should handle custom page and pageSize', async () => { + await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?page=2&pageSize=20', + ); + + expect(getEntityMetricDetailsSpy).toHaveBeenCalledWith( + 'github.open_prs', + mockCredentials, + expect.objectContaining({ + page: 2, + limit: 20, + }), + ); + }); + + it('should return 400 when pageSize exceeds max of 100', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?pageSize=200', + ); + + expect(response.status).toBe(400); + expect(response.body.error.name).toBe('InputError'); + expect(response.body.error.message).toContain('Invalid query parameters'); + expect(getEntityMetricDetailsSpy).not.toHaveBeenCalled(); + }); + + it('should accept pageSize at max boundary of 100', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?pageSize=100', + ); + + expect(response.status).toBe(200); + expect(getEntityMetricDetailsSpy).toHaveBeenCalledWith( + 'github.open_prs', + mockCredentials, + expect.objectContaining({ + limit: 100, + }), + ); + }); + + it('should filter by status', async () => { + await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?status=error', + ); + + expect(getEntityMetricDetailsSpy).toHaveBeenCalledWith( + 'github.open_prs', + mockCredentials, + expect.objectContaining({ + status: 'error', + }), + ); + }); + + it('should filter by owner', async () => { + await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?owner=team:default/platform', + ); + + expect(getEntityMetricDetailsSpy).toHaveBeenCalledWith( + 'github.open_prs', + mockCredentials, + expect.objectContaining({ + owner: ['team:default/platform'], + }), + ); + }); + + it('should filter by kind', async () => { + await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?kind=Component', + ); + + expect(getEntityMetricDetailsSpy).toHaveBeenCalledWith( + 'github.open_prs', + mockCredentials, + expect.objectContaining({ + kind: 'Component', + }), + ); + }); + + it('should filter by entityName', async () => { + await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?entityName=service', + ); + + expect(getEntityMetricDetailsSpy).toHaveBeenCalledWith( + 'github.open_prs', + mockCredentials, + expect.objectContaining({ + entityName: 'service', + }), + ); + }); + + it('should sort by entityName ascending', async () => { + await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?sortBy=entityName&sortOrder=asc', + ); + + expect(getEntityMetricDetailsSpy).toHaveBeenCalledWith( + 'github.open_prs', + mockCredentials, + expect.objectContaining({ + sortBy: 'entityName', + sortOrder: 'asc', + }), + ); + }); + + it('should sort by metricValue descending', async () => { + await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?sortBy=metricValue&sortOrder=desc', + ); + + expect(getEntityMetricDetailsSpy).toHaveBeenCalledWith( + 'github.open_prs', + mockCredentials, + expect.objectContaining({ + sortBy: 'metricValue', + sortOrder: 'desc', + }), + ); + }); + + it('should combine multiple filters', async () => { + await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?status=error&kind=Component&owner=team:default/platform&sortBy=metricValue&sortOrder=desc', + ); + + expect(getEntityMetricDetailsSpy).toHaveBeenCalledWith( + 'github.open_prs', + mockCredentials, + expect.objectContaining({ + status: 'error', + kind: 'Component', + owner: ['team:default/platform'], + sortBy: 'metricValue', + sortOrder: 'desc', + }), + ); + }); + + it('should return 403 when user does not have permission', async () => { + permissionsMock.authorizeConditional.mockResolvedValue([ + { result: AuthorizeResult.DENY }, + ]); + + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities', + ); + + expect(response.status).toBe(403); + expect(response.body.error.name).toEqual('NotAllowedError'); + }); + + it('should return 401 when user entity reference is not found', async () => { + httpAuthMock.credentials.mockResolvedValue({ + principal: {}, + } as any); + + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities', + ); + + expect(response.status).toBe(401); + expect(response.body.error.name).toEqual('AuthenticationError'); + expect(response.body.error.message).toContain( + 'User entity reference not found', + ); + }); + + it('should return 403 when user does not have access to metric (conditional)', async () => { + permissionsMock.authorizeConditional.mockResolvedValue([ + CONDITIONAL_POLICY_DECISION, + ]); + + const response = await request(drillDownApp).get( + '/metrics/jira.open_issues/catalog/aggregations/entities', + ); + + expect(response.status).toBe(403); + expect(response.body.error.name).toEqual('NotAllowedError'); + }); + + it('should return 404 when metric does not exist', async () => { + const response = await request(drillDownApp).get( + '/metrics/non.existent.metric/catalog/aggregations/entities', + ); + + expect(response.status).toBe(404); + expect(response.body.error.name).toBe('NotFoundError'); + }); + + it('should return empty entities array when no results', async () => { + getEntityMetricDetailsSpy.mockResolvedValue({ + metricId: 'github.open_prs', + mockCredentials, + metricMetadata: mockEntityMetricDetailResponse.metricMetadata, + entities: [], + pagination: { page: 1, pageSize: 10, total: 0, totalPages: 0 }, + }); + + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities', + ); + + expect(response.status).toBe(200); + expect(response.body.entities).toEqual([]); + expect(response.body.pagination.total).toBe(0); + }); + + it('should normalize multi-value owner params to an array', async () => { + await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?owner=team:default/platform&owner=user:default/alice', + ); + + expect(getEntityMetricDetailsSpy).toHaveBeenCalledWith( + 'github.open_prs', + mockCredentials, + expect.objectContaining({ + owner: ['team:default/platform', 'user:default/alice'], + }), + ); + }); + + describe('input validation', () => { + it('should return 400 when page is 0', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?page=0', + ); + + expect(response.status).toBe(400); + expect(response.body.error.name).toBe('InputError'); + expect(response.body.error.message).toContain( + 'Invalid query parameters', + ); + expect(getEntityMetricDetailsSpy).not.toHaveBeenCalled(); + }); + + it('should return 400 when page is negative', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?page=-1', + ); + + expect(response.status).toBe(400); + expect(response.body.error.name).toBe('InputError'); + expect(response.body.error.message).toContain( + 'Invalid query parameters', + ); + expect(getEntityMetricDetailsSpy).not.toHaveBeenCalled(); + }); + + it('should return 400 when page exceeds max of 10000', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?page=10001', + ); + + expect(response.status).toBe(400); + expect(response.body.error.name).toBe('InputError'); + expect(response.body.error.message).toContain( + 'Invalid query parameters', + ); + expect(getEntityMetricDetailsSpy).not.toHaveBeenCalled(); + }); + + it('should return 400 when pageSize is 0', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?pageSize=0', + ); + + expect(response.status).toBe(400); + expect(response.body.error.name).toBe('InputError'); + expect(response.body.error.message).toContain( + 'Invalid query parameters', + ); + expect(getEntityMetricDetailsSpy).not.toHaveBeenCalled(); + }); + + it('should return 400 when status is an invalid value', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?status=unknown', + ); + + expect(response.status).toBe(400); + expect(response.body.error.name).toBe('InputError'); + expect(response.body.error.message).toContain( + 'Invalid query parameters', + ); + expect(getEntityMetricDetailsSpy).not.toHaveBeenCalled(); + }); + + it('should return 400 when sortBy is an invalid value', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?sortBy=invalid', + ); + + expect(response.status).toBe(400); + expect(response.body.error.name).toBe('InputError'); + expect(response.body.error.message).toContain( + 'Invalid query parameters', + ); + expect(getEntityMetricDetailsSpy).not.toHaveBeenCalled(); + }); + + it('should return 400 when sortOrder is an invalid value', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?sortOrder=random', + ); + + expect(response.status).toBe(400); + expect(response.body.error.name).toBe('InputError'); + expect(response.body.error.message).toContain( + 'Invalid query parameters', + ); + expect(getEntityMetricDetailsSpy).not.toHaveBeenCalled(); + }); + + it('should return 400 when owner is an empty string', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?owner=', + ); + + expect(response.status).toBe(400); + expect(response.body.error.name).toBe('InputError'); + expect(response.body.error.message).toContain( + 'Invalid query parameters', + ); + expect(getEntityMetricDetailsSpy).not.toHaveBeenCalled(); + }); + + it('should return 400 when kind is an empty string', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?kind=', + ); + + expect(response.status).toBe(400); + expect(response.body.error.name).toBe('InputError'); + expect(response.body.error.message).toContain( + 'Invalid query parameters', + ); + expect(getEntityMetricDetailsSpy).not.toHaveBeenCalled(); + }); + + it('should return 400 when entityName is an empty string', async () => { + const response = await request(drillDownApp).get( + '/metrics/github.open_prs/catalog/aggregations/entities?entityName=', + ); + + expect(response.status).toBe(400); + expect(response.body.error.name).toBe('InputError'); + expect(response.body.error.message).toContain( + 'Invalid query parameters', + ); + expect(getEntityMetricDetailsSpy).not.toHaveBeenCalled(); + }); + }); + }); }); diff --git a/workspaces/scorecard/plugins/scorecard-backend/src/service/router.ts b/workspaces/scorecard/plugins/scorecard-backend/src/service/router.ts index 1d588921e6..b54529940f 100644 --- a/workspaces/scorecard/plugins/scorecard-backend/src/service/router.ts +++ b/workspaces/scorecard/plugins/scorecard-backend/src/service/router.ts @@ -23,6 +23,7 @@ import Router from 'express-promise-router'; import type { CatalogMetricService } from './CatalogMetricService'; import type { MetricProvidersRegistry } from '../providers/MetricProvidersRegistry'; import { + LoggerService, type HttpAuthService, type PermissionsService, } from '@backstage/backend-plugin-api'; @@ -44,6 +45,7 @@ import { getEntitiesOwnedByUser } from '../utils/getEntitiesOwnedByUser'; import { parseCommaSeparatedString } from '../utils/parseCommaSeparatedString'; import { validateMetricsSchema } from '../validation/validateMetricsSchema'; import { AggregatedMetricMapper } from './mappers'; +import { validateDrillDownMetricsSchema } from '../validation/validateDrillDownMetricsSchema'; export type ScorecardRouterOptions = { metricProvidersRegistry: MetricProvidersRegistry; @@ -51,6 +53,7 @@ export type ScorecardRouterOptions = { catalog: CatalogService; httpAuth: HttpAuthService; permissions: PermissionsService; + logger: LoggerService; }; export async function createRouter({ @@ -59,6 +62,7 @@ export async function createRouter({ catalog, httpAuth, permissions, + logger, }: ScorecardRouterOptions): Promise { const router = Router(); router.use(express.json()); @@ -192,5 +196,64 @@ export async function createRouter({ ); }); + router.get( + '/metrics/:metricId/catalog/aggregations/entities', + async (req, res) => { + const { metricId } = req.params; + + const { + page, + pageSize, + status, + owner, + kind, + entityName, + sortBy, + sortOrder, + } = validateDrillDownMetricsSchema(req.query, logger); + + const { conditions } = await authorizeConditional( + req, + scorecardMetricReadPermission, + ); + + const metric = metricProvidersRegistry.getMetric(metricId); + const authorizedMetrics = filterAuthorizedMetrics([metric], conditions); + + if (authorizedMetrics.length === 0) { + throw new NotAllowedError( + `To view the scorecard metrics, your administrator must grant you the required permission.`, + ); + } + + const credentials = await httpAuth.credentials(req, { allow: ['user'] }); + const userEntityRef = credentials?.principal?.userEntityRef; + + if (!userEntityRef) { + throw new AuthenticationError('User entity reference not found'); + } + + // Per-row authorization is enforced by catalog.getEntitiesByRefs in the service. + // For "owned by me" scoping, the frontend passes identityApi.ownershipEntityRefs + // as repeated ?owner= params. + const entityMetrics = await catalogMetricService.getEntityMetricDetails( + metricId, + credentials, + { + status, + owner, + kind, + entityName, + sortBy, + sortOrder, + page, + limit: pageSize, + }, + ); + + res.json(entityMetrics); + }, + ); + return router; } diff --git a/workspaces/scorecard/plugins/scorecard-backend/src/validation/validateDrillDownMetricsSchema.test.ts b/workspaces/scorecard/plugins/scorecard-backend/src/validation/validateDrillDownMetricsSchema.test.ts new file mode 100644 index 0000000000..eed6110ff3 --- /dev/null +++ b/workspaces/scorecard/plugins/scorecard-backend/src/validation/validateDrillDownMetricsSchema.test.ts @@ -0,0 +1,301 @@ +/* + * Copyright Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { InputError } from '@backstage/errors'; +import { validateDrillDownMetricsSchema } from './validateDrillDownMetricsSchema'; +import { mockServices } from '@backstage/backend-test-utils'; + +describe('validateDrillDownMetricsSchema', () => { + describe('valid query parameters', () => { + it('should return defaults when given an empty object', () => { + expect( + validateDrillDownMetricsSchema({}, mockServices.logger.mock()), + ).toEqual({ + page: 1, + pageSize: 5, + sortOrder: 'desc', + }); + }); + + it('should coerce page and pageSize from strings to numbers', () => { + const result = validateDrillDownMetricsSchema( + { + page: '3', + pageSize: '20', + }, + mockServices.logger.mock(), + ); + expect(result.page).toBe(3); + expect(result.pageSize).toBe(20); + }); + + it('should accept page at max boundary of 10000', () => { + const result = validateDrillDownMetricsSchema( + { page: '10000' }, + mockServices.logger.mock(), + ); + expect(result.page).toBe(10000); + }); + + it('should accept pageSize at max boundary of 100', () => { + const result = validateDrillDownMetricsSchema( + { pageSize: '100' }, + mockServices.logger.mock(), + ); + expect(result.pageSize).toBe(100); + }); + + it.each(['success', 'warning', 'error'] as const)( + 'should accept status=%s', + status => { + const result = validateDrillDownMetricsSchema( + { status }, + mockServices.logger.mock(), + ); + expect(result.status).toBe(status); + }, + ); + + it.each([ + 'entityName', + 'owner', + 'entityKind', + 'timestamp', + 'metricValue', + ] as const)('should accept sortBy=%s', sortBy => { + const result = validateDrillDownMetricsSchema( + { sortBy }, + mockServices.logger.mock(), + ); + expect(result.sortBy).toBe(sortBy); + }); + + it.each(['asc', 'desc'] as const)( + 'should accept sortOrder=%s', + sortOrder => { + const result = validateDrillDownMetricsSchema( + { sortOrder }, + mockServices.logger.mock(), + ); + expect(result.sortOrder).toBe(sortOrder); + }, + ); + + it('should normalize a single owner string to an array', () => { + const result = validateDrillDownMetricsSchema( + { + owner: 'team:default/platform', + }, + mockServices.logger.mock(), + ); + expect(result.owner).toEqual(['team:default/platform']); + }); + + it('should accept an array of owner strings', () => { + const result = validateDrillDownMetricsSchema( + { + owner: ['team:default/platform', 'user:default/alice'], + }, + mockServices.logger.mock(), + ); + expect(result.owner).toEqual([ + 'team:default/platform', + 'user:default/alice', + ]); + }); + + it('should return undefined when owner is not provided', () => { + const result = validateDrillDownMetricsSchema( + {}, + mockServices.logger.mock(), + ); + expect(result.owner).toBeUndefined(); + }); + + it('should accept a valid kind string', () => { + const result = validateDrillDownMetricsSchema( + { kind: 'Component' }, + mockServices.logger.mock(), + ); + expect(result.kind).toBe('Component'); + }); + + it('should accept a valid entityName string', () => { + const result = validateDrillDownMetricsSchema( + { + entityName: 'my-service', + }, + mockServices.logger.mock(), + ); + expect(result.entityName).toBe('my-service'); + }); + + it('should accept all valid parameters together', () => { + const result = validateDrillDownMetricsSchema( + { + page: '2', + pageSize: '10', + status: 'error', + sortBy: 'metricValue', + sortOrder: 'asc', + owner: 'team:default/backend', + kind: 'Component', + entityName: 'my-service', + }, + mockServices.logger.mock(), + ); + + expect(result).toEqual({ + page: 2, + pageSize: 10, + status: 'error', + sortBy: 'metricValue', + sortOrder: 'asc', + owner: ['team:default/backend'], + kind: 'Component', + entityName: 'my-service', + }); + }); + + it('should strip unknown properties', () => { + const result = validateDrillDownMetricsSchema( + { unknownProp: 'value' }, + mockServices.logger.mock(), + ); + expect(result).not.toHaveProperty('unknownProp'); + }); + }); + + describe('invalid query parameters', () => { + it('should throw InputError when page is 0', () => { + expect(() => + validateDrillDownMetricsSchema( + { page: '0' }, + mockServices.logger.mock(), + ), + ).toThrow(InputError); + }); + + it('should throw InputError when page is negative', () => { + expect(() => + validateDrillDownMetricsSchema( + { page: '-1' }, + mockServices.logger.mock(), + ), + ).toThrow(InputError); + }); + + it('should throw InputError when page exceeds 10000', () => { + expect(() => + validateDrillDownMetricsSchema( + { page: '10001' }, + mockServices.logger.mock(), + ), + ).toThrow(InputError); + }); + + it('should throw InputError when pageSize is 0', () => { + expect(() => + validateDrillDownMetricsSchema( + { pageSize: '0' }, + mockServices.logger.mock(), + ), + ).toThrow(InputError); + }); + + it('should throw InputError when pageSize exceeds 100', () => { + expect(() => + validateDrillDownMetricsSchema( + { pageSize: '101' }, + mockServices.logger.mock(), + ), + ).toThrow(InputError); + }); + + it('should throw InputError when more than 50 owner values are provided', () => { + const tooManyOwners = Array.from( + { length: 51 }, + (_, i) => `team:default/team-${i}`, + ); + expect(() => + validateDrillDownMetricsSchema( + { owner: tooManyOwners }, + mockServices.logger.mock(), + ), + ).toThrow(InputError); + expect(() => + validateDrillDownMetricsSchema( + { owner: tooManyOwners }, + mockServices.logger.mock(), + ), + ).toThrow('Invalid query parameters'); + }); + + it.each([ + { field: 'status', value: 'unknown' }, + { field: 'sortBy', value: 'invalid' }, + { field: 'sortOrder', value: 'random' }, + ])( + 'should throw InputError when $field has invalid value "$value"', + ({ field, value }) => { + expect(() => + validateDrillDownMetricsSchema( + { [field]: value }, + mockServices.logger.mock(), + ), + ).toThrow(InputError); + expect(() => + validateDrillDownMetricsSchema( + { [field]: value }, + mockServices.logger.mock(), + ), + ).toThrow('Invalid query parameters'); + }, + ); + + it.each(['kind', 'entityName'])( + 'should throw InputError when %s is an empty string', + field => { + expect(() => + validateDrillDownMetricsSchema( + { [field]: '' }, + mockServices.logger.mock(), + ), + ).toThrow(InputError); + expect(() => + validateDrillDownMetricsSchema( + { [field]: '' }, + mockServices.logger.mock(), + ), + ).toThrow('Invalid query parameters'); + }, + ); + + it('should throw InputError when owner contains an empty string', () => { + expect(() => + validateDrillDownMetricsSchema( + { owner: [''] }, + mockServices.logger.mock(), + ), + ).toThrow(InputError); + expect(() => + validateDrillDownMetricsSchema( + { owner: [''] }, + mockServices.logger.mock(), + ), + ).toThrow('Invalid query parameters'); + }); + }); +}); diff --git a/workspaces/scorecard/plugins/scorecard-backend/src/validation/validateDrillDownMetricsSchema.ts b/workspaces/scorecard/plugins/scorecard-backend/src/validation/validateDrillDownMetricsSchema.ts new file mode 100644 index 0000000000..190585c623 --- /dev/null +++ b/workspaces/scorecard/plugins/scorecard-backend/src/validation/validateDrillDownMetricsSchema.ts @@ -0,0 +1,57 @@ +/* + * Copyright Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { z } from 'zod'; +import { InputError } from '@backstage/errors'; +import { LoggerService } from '@backstage/backend-plugin-api'; + +export function validateDrillDownMetricsSchema( + query: unknown, + logger: LoggerService, +) { + const drillDownSchema = z.object({ + page: z.coerce.number().int().min(1).max(10000).optional().default(1), + pageSize: z.coerce.number().int().min(1).max(100).optional().default(5), + status: z.enum(['success', 'warning', 'error']).optional(), + sortBy: z + .enum(['entityName', 'owner', 'entityKind', 'timestamp', 'metricValue']) + .optional(), + sortOrder: z.enum(['asc', 'desc']).optional().default('desc'), + owner: z.preprocess(val => { + if (val === undefined) return val; + if (Array.isArray(val)) return val; + return [val]; + }, z.array(z.string().min(1).max(255)).max(50).optional()), + kind: z.string().min(1).max(100).optional(), + entityName: z.string().min(1).max(255).optional(), + }); + + const parsed = drillDownSchema.safeParse(query); + + if (!parsed.success) { + logger.warn('Invalid drill-down query parameters', { + errors: JSON.stringify( + parsed.error.errors.map(e => ({ + path: e.path.join('.'), + message: e.message, + code: e.code, + })), + ), + }); + throw new InputError('Invalid query parameters'); + } + + return parsed.data; +} diff --git a/workspaces/scorecard/plugins/scorecard-common/report.api.md b/workspaces/scorecard/plugins/scorecard-common/report.api.md index 589032df52..ba8423db80 100644 --- a/workspaces/scorecard/plugins/scorecard-common/report.api.md +++ b/workspaces/scorecard/plugins/scorecard-common/report.api.md @@ -34,6 +34,35 @@ export type AggregatedMetricValue = { // @public export const DEFAULT_NUMBER_THRESHOLDS: ThresholdConfig; +// @public +export type EntityMetricDetail = { + entityRef: string; + entityName: string; + entityKind: string; + owner: string; + metricValue: number | boolean | null; + timestamp: string; + status: 'success' | 'warning' | 'error'; + score?: string; +}; + +// @public +export type EntityMetricDetailResponse = { + metricId: string; + metricMetadata: { + title: string; + description: string; + type: MetricType; + }; + entities: EntityMetricDetail[]; + pagination: { + page: number; + pageSize: number; + total: number; + totalPages: number; + }; +}; + // @public (undocumented) export type Metric = { id: string; diff --git a/workspaces/scorecard/plugins/scorecard-common/src/types/Metric.ts b/workspaces/scorecard/plugins/scorecard-common/src/types/Metric.ts index 84001a5e9e..58e042d037 100644 --- a/workspaces/scorecard/plugins/scorecard-common/src/types/Metric.ts +++ b/workspaces/scorecard/plugins/scorecard-common/src/types/Metric.ts @@ -92,3 +92,38 @@ export type AggregatedMetricResult = { }; result: AggregatedMetric; }; + +/** + * Individual entity metric detail for drill-down + * @public + */ +export type EntityMetricDetail = { + entityRef: string; + entityName: string; + entityKind: string; + owner: string; + metricValue: number | boolean | null; + timestamp: string; + status: 'success' | 'warning' | 'error'; + score?: string; +}; + +/** + * Paginated response for entity metrics drill-down + * @public + */ +export type EntityMetricDetailResponse = { + metricId: string; + metricMetadata: { + title: string; + description: string; + type: MetricType; + }; + entities: EntityMetricDetail[]; + pagination: { + page: number; + pageSize: number; + total: number; + totalPages: number; + }; +};