Skip to content

Commit aae49dc

Browse files
indexzeroclaude
andauthored
feat(flatcover) add time-travel coverage analysis with caching (#16)
> "Roads? Where we're going ..." Enable historical coverage analysis by filtering versions based on their publication date. This supports scenarios like: - Reproducing past coverage states without modifying lockfiles - Analyzing what coverage looked like at a specific point in time - Understanding when packages became available in a registry New options: - --before <date>: Only count versions published before an ISO date - --cache <dir>: Cache packuments to disk with HTTP conditional requests (ETag/If-None-Match, Last-Modified/If-Modified-Since) Extended --full output now includes: - time: ISO timestamp when each version was published - spec: Convenience field with name@version format The cache enables efficient re-analysis across multiple --before dates without re-fetching packuments from the registry. --------- Co-authored-by: Claude <claude@anthropic.com>
1 parent a2f4f03 commit aae49dc

2 files changed

Lines changed: 362 additions & 23 deletions

File tree

bin/flatcover.js

Lines changed: 130 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
import { parseArgs } from 'node:util';
1616
import { readFileSync } from 'node:fs';
17+
import { readFile, writeFile, rename, mkdir } from 'node:fs/promises';
1718
import { createReadStream } from 'node:fs';
1819
import { createInterface } from 'node:readline';
1920
import { dirname, join } from 'node:path';
@@ -37,6 +38,8 @@ const { values, positionals } = parseArgs({
3738
concurrency: { type: 'string', default: '20' },
3839
progress: { type: 'boolean', default: false },
3940
summary: { type: 'boolean', default: false },
41+
before: { type: 'string', short: 'b' },
42+
cache: { type: 'string', short: 'c' },
4043
help: { type: 'boolean', short: 'h' }
4144
},
4245
allowPositionals: true
@@ -67,7 +70,7 @@ Options:
6770
-s, --specs Include version (name@version or {name,version})
6871
--json Output as JSON array
6972
--ndjson Output as newline-delimited JSON (streaming)
70-
--full Include all metadata (integrity, resolved)
73+
--full Include all metadata (integrity, resolved, time)
7174
--dev Include dev dependencies (default: false)
7275
--peer Include peer dependencies (default: true)
7376
-h, --help Show this help
@@ -80,13 +83,17 @@ Coverage options:
8083
--concurrency <n> Concurrent requests (default: 20)
8184
--progress Show progress on stderr
8285
--summary Show coverage summary on stderr
86+
--before <date> Only count versions published before this ISO date
87+
-c, --cache <dir> Cache packuments to disk for faster subsequent runs
8388
8489
Output formats (with --cover):
85-
(default) CSV: package,version,present
86-
--full CSV: package,version,present,integrity,resolved
87-
--json [{"name":"...","version":"...","present":true}, ...]
88-
--full --json Adds "integrity" and "resolved" fields to JSON
89-
--ndjson {"name":"...","version":"...","present":true} per line
90+
(default) CSV format (sorted by name, version)
91+
--json JSON array (sorted by name, version)
92+
--ndjson Newline-delimited JSON (streaming, unsorted)
93+
94+
Output fields:
95+
(default) name, version, present
96+
--full Adds: spec, integrity, resolved, time (works with all formats)
9097
9198
Examples:
9299
# From lockfile
@@ -97,6 +104,10 @@ Examples:
97104
flatcover --list packages.json --cover --summary
98105
echo '[{"name":"lodash","version":"4.17.21"}]' > pkgs.json && flatcover -l pkgs.json --cover
99106
107+
# Time-travel reanalysis: capture full output with timestamps
108+
flatcover package-lock.json --cover --full --json > coverage.json
109+
# Later, filter locally by publication date without re-fetching registry
110+
100111
# From stdin (NDJSON) - use '-' to read from stdin
101112
echo '{"name":"lodash","version":"4.17.21"}' | flatcover - --cover
102113
cat packages.ndjson | flatcover - --cover --json
@@ -217,6 +228,68 @@ function encodePackageName(name) {
217228
return name.replace('/', '%2f');
218229
}
219230

231+
/**
232+
* Read cached packument metadata (etag, lastModified)
233+
* @param {string} cacheDir - Cache directory path
234+
* @param {string} encodedName - URL-encoded package name
235+
* @returns {Promise<{ etag?: string, lastModified?: string } | null>}
236+
*/
237+
async function readCacheMeta(cacheDir, encodedName) {
238+
try {
239+
const metaPath = join(cacheDir, `${encodedName}.meta.json`);
240+
const content = await readFile(metaPath, 'utf8');
241+
return JSON.parse(content);
242+
} catch {
243+
return null;
244+
}
245+
}
246+
247+
/**
248+
* Read cached packument from disk
249+
* @param {string} cacheDir - Cache directory path
250+
* @param {string} encodedName - URL-encoded package name
251+
* @returns {Promise<object | null>}
252+
*/
253+
async function readCachedPackument(cacheDir, encodedName) {
254+
try {
255+
const cachePath = join(cacheDir, `${encodedName}.json`);
256+
const content = await readFile(cachePath, 'utf8');
257+
return JSON.parse(content);
258+
} catch {
259+
return null;
260+
}
261+
}
262+
263+
/**
264+
* Write packument and metadata to cache atomically
265+
* @param {string} cacheDir - Cache directory path
266+
* @param {string} encodedName - URL-encoded package name
267+
* @param {string} body - Raw packument JSON string
268+
* @param {{ etag?: string, lastModified?: string }} meta - Cache metadata
269+
*/
270+
async function writeCache(cacheDir, encodedName, body, meta) {
271+
await mkdir(cacheDir, { recursive: true });
272+
273+
const cachePath = join(cacheDir, `${encodedName}.json`);
274+
const metaPath = join(cacheDir, `${encodedName}.meta.json`);
275+
const pid = process.pid;
276+
277+
// Write packument atomically
278+
const tmpCachePath = `${cachePath}.${pid}.tmp`;
279+
await writeFile(tmpCachePath, body);
280+
await rename(tmpCachePath, cachePath);
281+
282+
// Write metadata atomically
283+
const metaObj = {
284+
etag: meta.etag,
285+
lastModified: meta.lastModified,
286+
fetchedAt: new Date().toISOString()
287+
};
288+
const tmpMetaPath = `${metaPath}.${pid}.tmp`;
289+
await writeFile(tmpMetaPath, JSON.stringify(metaObj));
290+
await rename(tmpMetaPath, metaPath);
291+
}
292+
220293
/**
221294
* Create undici client with retry support
222295
* @param {string} registryUrl
@@ -267,10 +340,10 @@ function createClient(registryUrl, { auth, token }) {
267340
/**
268341
* Check coverage for all dependencies
269342
* @param {Array<{ name: string, version: string, integrity?: string, resolved?: string }>} deps
270-
* @param {{ registry: string, auth?: string, token?: string, progress: boolean }} options
343+
* @param {{ registry: string, auth?: string, token?: string, progress: boolean, before?: string, cache?: string }} options
271344
* @returns {AsyncGenerator<{ name: string, version: string, present: boolean, integrity?: string, resolved?: string, error?: string }>}
272345
*/
273-
async function* checkCoverage(deps, { registry, auth, token, progress }) {
346+
async function* checkCoverage(deps, { registry, auth, token, progress, before, cache }) {
274347
const { client, headers, baseUrl } = createClient(registry, { auth, token });
275348

276349
// Group by package name to avoid duplicate requests
@@ -299,10 +372,22 @@ async function* checkCoverage(deps, { registry, auth, token, progress }) {
299372
const path = `${basePath}/${encodedName}`;
300373

301374
try {
375+
// Build request headers, adding conditional request headers if cached
376+
const reqHeaders = { ...headers };
377+
let cacheMeta = null;
378+
if (cache) {
379+
cacheMeta = await readCacheMeta(cache, encodedName);
380+
if (cacheMeta?.etag) {
381+
reqHeaders['If-None-Match'] = cacheMeta.etag;
382+
} else if (cacheMeta?.lastModified) {
383+
reqHeaders['If-Modified-Since'] = cacheMeta.lastModified;
384+
}
385+
}
386+
302387
const response = await client.request({
303388
method: 'GET',
304389
path,
305-
headers
390+
headers: reqHeaders
306391
});
307392

308393
const chunks = [];
@@ -316,19 +401,43 @@ async function* checkCoverage(deps, { registry, auth, token, progress }) {
316401
}
317402

318403
let packumentVersions = null;
319-
if (response.statusCode === 200) {
404+
let packumentTime = null;
405+
406+
if (response.statusCode === 304 && cache) {
407+
// Cache hit - read from disk
408+
const cachedPackument = await readCachedPackument(cache, encodedName);
409+
if (cachedPackument) {
410+
packumentVersions = cachedPackument.versions || {};
411+
packumentTime = cachedPackument.time || {};
412+
}
413+
} else if (response.statusCode === 200) {
320414
const body = Buffer.concat(chunks).toString('utf8');
321415
const packument = JSON.parse(body);
322416
packumentVersions = packument.versions || {};
417+
packumentTime = packument.time || {};
418+
419+
// Write to cache if enabled
420+
if (cache) {
421+
await writeCache(cache, encodedName, body, {
422+
etag: response.headers.etag,
423+
lastModified: response.headers['last-modified']
424+
});
425+
}
323426
}
324427

325428
// Check each version, preserving integrity/resolved from original dep
326429
const versionResults = [];
327430
for (const [version, dep] of versionMap) {
328-
const present = packumentVersions ? !!packumentVersions[version] : false;
431+
let present = packumentVersions ? !!packumentVersions[version] : false;
432+
433+
// Time travel: if --before set, only count if published before that date
434+
if (present && before && packumentTime[version] >= before) {
435+
present = false;
436+
}
329437
const result = { name, version, present };
330438
if (dep.integrity) result.integrity = dep.integrity;
331439
if (dep.resolved) result.resolved = dep.resolved;
440+
if (packumentTime && packumentTime[version]) result.time = packumentTime[version];
332441
versionResults.push(result);
333442
}
334443
return versionResults;
@@ -374,7 +483,7 @@ async function* checkCoverage(deps, { registry, auth, token, progress }) {
374483
*/
375484
function formatDep(dep, { specs, full }) {
376485
if (full) {
377-
const obj = { name: dep.name, version: dep.version };
486+
const obj = { name: dep.name, version: dep.version, spec: `${dep.name}@${dep.version}` };
378487
if (dep.integrity) obj.integrity = dep.integrity;
379488
if (dep.resolved) obj.resolved = dep.resolved;
380489
return obj;
@@ -432,8 +541,10 @@ async function outputCoverage(results, { json, ndjson, summary, full }) {
432541
if (ndjson) {
433542
// Stream immediately
434543
const obj = { name: result.name, version: result.version, present: result.present };
544+
if (full) obj.spec = `${result.name}@${result.version}`;
435545
if (full && result.integrity) obj.integrity = result.integrity;
436546
if (full && result.resolved) obj.resolved = result.resolved;
547+
if (full && result.time) obj.time = result.time;
437548
console.log(JSON.stringify(obj));
438549
} else {
439550
all.push(result);
@@ -447,17 +558,19 @@ async function outputCoverage(results, { json, ndjson, summary, full }) {
447558
if (json) {
448559
const data = all.map(r => {
449560
const obj = { name: r.name, version: r.version, present: r.present };
561+
if (full) obj.spec = `${r.name}@${r.version}`;
450562
if (full && r.integrity) obj.integrity = r.integrity;
451563
if (full && r.resolved) obj.resolved = r.resolved;
564+
if (full && r.time) obj.time = r.time;
452565
return obj;
453566
});
454567
console.log(JSON.stringify(data, null, 2));
455568
} else {
456569
// CSV output
457570
if (full) {
458-
console.log('package,version,present,integrity,resolved');
571+
console.log('package,version,spec,present,integrity,resolved,time');
459572
for (const r of all) {
460-
console.log(`${r.name},${r.version},${r.present},${r.integrity || ''},${r.resolved || ''}`);
573+
console.log(`${r.name},${r.version},${r.name}@${r.version},${r.present},${r.integrity || ''},${r.resolved || ''},${r.time || ''}`);
461574
}
462575
} else {
463576
console.log('package,version,present');
@@ -523,7 +636,9 @@ try {
523636
registry: values.registry,
524637
auth: values.auth,
525638
token: values.token,
526-
progress: values.progress
639+
progress: values.progress,
640+
before: values.before,
641+
cache: values.cache
527642
});
528643

529644
await outputCoverage(results, {

0 commit comments

Comments
 (0)