diff --git a/.gitignore b/.gitignore index 7c7c0860..82d0210a 100644 --- a/.gitignore +++ b/.gitignore @@ -6,7 +6,8 @@ npm-debug.log* workspace/ workspace_restored/ snapshot.json -data.json +# data.json checksums.json source.txt chunk_*.txt +package-lock.json \ No newline at end of file diff --git a/data.json b/data.json new file mode 100644 index 00000000..f00c7f87 --- /dev/null +++ b/data.json @@ -0,0 +1 @@ +[42, 17, 89, 5, 73, 31, 56, 92, 11, 68, 48, 76, 9, 61, 38, 81, 22, 57, 94, 14] diff --git a/package.json b/package.json index dfecb12a..b6547fd8 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,9 @@ "cli:interactive": "node src/cli/interactive.js", "cli:progress": "node src/cli/progress.js", "modules:dynamic": "node src/modules/dynamic.js uppercase", + "modules:dynamic:uppercase": "node src/modules/dynamic.js uppercase", + "modules:dynamic:reverse": "node src/modules/dynamic.js reverse", + "modules:dynamic:repeat": "node src/modules/dynamic.js repeat", "hash:verify": "node src/hash/verify.js", "streams:lineNumberer": "echo 'hello\nworld' | node src/streams/lineNumberer.js", "streams:filter": "echo 'hello\nworld\ntest' | node src/streams/filter.js --pattern test", diff --git a/src/cli/interactive.js b/src/cli/interactive.js index d0e3e0d9..2b7cf24a 100644 --- a/src/cli/interactive.js +++ b/src/cli/interactive.js @@ -1,8 +1,53 @@ +import readline from 'node:readline'; + const interactive = () => { // Write your code here // Use readline module for interactive CLI // Support commands: uptime, cwd, date, exit // Handle Ctrl+C and unknown commands + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + prompt: '> ', + }); + + const formatUptime = (secondsTotal) => { + const hours = Math.floor(secondsTotal / 3600); + const minutes = Math.floor((secondsTotal % 3600) / 60); + const seconds = secondsTotal % 60; + + return `${hours}h ${minutes}m ${seconds}s`; + }; + + rl.prompt(); + + rl.on('line', (line) => { + const command = line.trim().toLowerCase(); + + if (command === 'uptime') { + const uptimeInSeconds = Math.floor(process.uptime()); + console.log(formatUptime(uptimeInSeconds)); + } else if (command === 'cwd') { + console.log(process.cwd()); + } else if (command === 'date') { + console.log(new Date().toString()); + } else if (command === 'exit') { + rl.close(); + return; + } else if (command !== '') { + console.log('Unknown command'); + } + + rl.prompt(); + }); + + rl.on('close', () => { + process.exit(0); + }); + + process.on('SIGINT', () => { + rl.close(); + }); }; interactive(); diff --git a/src/cli/progress.js b/src/cli/progress.js index 3e060763..aa93532e 100644 --- a/src/cli/progress.js +++ b/src/cli/progress.js @@ -3,6 +3,32 @@ const progress = () => { // Simulate progress bar from 0% to 100% over ~5 seconds // Update in place using \r every 100ms // Format: [████████████████████ ] 67% + const totalSteps = 50; + const intervalMs = 100; + const barWidth = 30; + + let step = 0; + + const render = () => { + const percent = Math.round((step / totalSteps) * 100); + const filled = Math.round((percent / 100) * barWidth); + const empty = barWidth - filled; + const bar = `${'█'.repeat(filled)}${' '.repeat(empty)}`; + + process.stdout.write(`\r[${bar}] ${percent}%`); + }; + + render(); + + const timer = setInterval(() => { + step += 1; + render(); + + if (step >= totalSteps) { + clearInterval(timer); + process.stdout.write('\n'); + } + }, intervalMs); }; progress(); diff --git a/src/cp/execCommand.js b/src/cp/execCommand.js index 34a89c8d..57004811 100644 --- a/src/cp/execCommand.js +++ b/src/cp/execCommand.js @@ -1,10 +1,37 @@ +import { spawn } from 'node:child_process'; + const execCommand = () => { - // Write your code here + // Write your code here // Take command from CLI argument // Spawn child process // Pipe child stdout/stderr to parent stdout/stderr // Pass environment variables // Exit with same code as child + + const command = process.argv.slice(2).join(' ').trim(); + + if (!command) { + process.exit(1); + } + + const child = spawn(command, { + shell: true, + env: process.env, + stdio: 'inherit', + }); + + child.on('error', () => { + process.exit(1); + }); + + child.on('exit', (code, signal) => { + if (signal) { + process.kill(process.pid, signal); + return; + } + + process.exit(code ?? 1); + }); }; execCommand(); diff --git a/src/fs/findByExt.js b/src/fs/findByExt.js index 24f06cb8..a8ef8d97 100644 --- a/src/fs/findByExt.js +++ b/src/fs/findByExt.js @@ -1,7 +1,39 @@ +import { readdir } from 'node:fs/promises'; +import path from 'node:path'; + const findByExt = async () => { // Write your code here // Recursively find all files with specific extension // Parse --ext CLI argument (default: .txt) + const extFlagIndex = process.argv.indexOf('--ext'); + const rawExt = extFlagIndex >= 0 ? process.argv[extFlagIndex + 1] : '.txt'; + const extension = rawExt ? (rawExt.startsWith('.') ? rawExt : `.${rawExt}`) : '.txt'; + const rootDir = process.cwd(); + const matchedFiles = []; + + const scanDirectory = async (currentDir) => { + const entries = await readdir(currentDir, { withFileTypes: true }); + + for (const entry of entries) { + const entryPath = path.join(currentDir, entry.name); + + if (entry.isDirectory()) { + await scanDirectory(entryPath); + } else if (entry.isFile() && path.extname(entry.name).toLowerCase() === extension.toLowerCase()) { + matchedFiles.push(path.relative(rootDir, entryPath).split(path.sep).join('/')); + } + } + }; + + try { + await scanDirectory(rootDir); + matchedFiles.sort((a, b) => a.localeCompare(b)); + matchedFiles.forEach((filePath) => { + console.log(filePath); + }); + } catch { + throw new Error('FS operation failed'); + } }; await findByExt(); diff --git a/src/fs/merge.js b/src/fs/merge.js index cb8e0d8f..2df031ab 100644 --- a/src/fs/merge.js +++ b/src/fs/merge.js @@ -1,8 +1,52 @@ +import { debug } from 'node:console'; +import { readdir, readFile, writeFile } from 'node:fs/promises'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + const merge = async () => { // Write your code here // Default: read all .txt files from workspace/parts in alphabetical order // Optional: support --files filename1,filename2,... to merge specific files in provided order // Concatenate content and write to workspace/merged.txt + const currentFilePath = fileURLToPath(import.meta.url); + const currentDir = path.dirname(currentFilePath); + const partsDir = path.resolve(currentDir, '../../workspace/parts'); + const targetFile = path.resolve(currentDir, '../../workspace/merged.txt'); + + const filesFlagIndex = process.argv.indexOf('--files'); + //debugger; + + try { + let filenames; + + if (filesFlagIndex >= 0) { + const rawList = process.argv[filesFlagIndex + 1] ?? ''; + filenames = rawList + .split(',') + .map((name) => name.trim()) + .filter(Boolean); + + if (filenames.length === 0) { + throw new Error('FS operation failed'); + } + } else { + const entries = await readdir(partsDir, { withFileTypes: true }); + filenames = entries + .filter((entry) => entry.isFile() && entry.name.toLowerCase().endsWith('.txt')) + .map((entry) => entry.name) + .sort((a, b) => a.localeCompare(b)); + } + + const chunks = []; + for (const filename of filenames) { + const filePath = path.join(partsDir, filename); + chunks.push(await readFile(filePath, 'utf8')); + } + //debugger; + await writeFile(targetFile, chunks.join(''), 'utf8'); + } catch { + throw new Error('FS operation failed'); + } }; await merge(); diff --git a/src/fs/restore.js b/src/fs/restore.js index 96ae1ffb..b90924e4 100644 --- a/src/fs/restore.js +++ b/src/fs/restore.js @@ -1,8 +1,54 @@ +import { mkdir, readFile, rm, writeFile } from 'node:fs/promises'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + const restore = async () => { // Write your code here // Read snapshot.json // Treat snapshot.rootPath as metadata only // Recreate directory/file structure in workspace_restored + const currentFilePath = fileURLToPath(import.meta.url); + const currentDir = path.dirname(currentFilePath); + const snapshotPath = path.resolve(currentDir, '../../snapshot.json'); + const restoreRoot = path.resolve(currentDir, '../../workspace_restored'); + + try { + const rawSnapshot = await readFile(snapshotPath, 'utf8'); + const snapshot = JSON.parse(rawSnapshot); + + if (!snapshot || !Array.isArray(snapshot.entries)) { + throw new Error('Invalid snapshot format'); + } + + await rm(restoreRoot, { recursive: true, force: true }); + await mkdir(restoreRoot, { recursive: true }); + + for (const entry of snapshot.entries) { + if (!entry || typeof entry.path !== 'string' || typeof entry.type !== 'string') { + throw new Error('Invalid snapshot entry'); + } + + const normalizedRelativePath = path.normalize(entry.path); + const targetPath = path.resolve(restoreRoot, normalizedRelativePath); + + if (targetPath !== restoreRoot && !targetPath.startsWith(`${restoreRoot}${path.sep}`)) { + throw new Error('Invalid snapshot entry path'); + } + + if (entry.type === 'directory') { + await mkdir(targetPath, { recursive: true }); + } else if (entry.type === 'file') { + const parentDir = path.dirname(targetPath); + await mkdir(parentDir, { recursive: true }); + const content = typeof entry.content === 'string' ? Buffer.from(entry.content, 'base64') : Buffer.alloc(0); + await writeFile(targetPath, content); + } else { + throw new Error('Invalid snapshot entry type'); + } + } + } catch { + throw new Error('FS operation failed'); + } }; await restore(); diff --git a/src/fs/snapshot.js b/src/fs/snapshot.js index 050103d3..0dadc447 100644 --- a/src/fs/snapshot.js +++ b/src/fs/snapshot.js @@ -1,9 +1,60 @@ +import { readdir, readFile, stat, writeFile } from 'node:fs/promises'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + const snapshot = async () => { - // Write your code here // Recursively scan workspace directory // Write snapshot.json with: // - rootPath: absolute path to workspace // - entries: flat array of relative paths and metadata + const currentFilePath = fileURLToPath(import.meta.url); + const currentDir = path.dirname(currentFilePath); + const workspaceRoot = path.resolve(currentDir, '../../workspace'); + const snapshotPath = path.resolve(currentDir, '../../snapshot.json'); + debugger; + const entries = []; + + const toSnapshotPath = (absolutePath) => path.relative(workspaceRoot, absolutePath).split(path.sep).join('/'); + + const scanDirectory = async (directory) => { + const items = await readdir(directory, { withFileTypes: true }); + items.sort((firstItem, secondItem) => firstItem.name.localeCompare(secondItem.name)); + + for (const item of items) { + const itemPath = path.join(directory, item.name); + const relativePath = toSnapshotPath(itemPath); + + if (item.isDirectory()) { + entries.push({ + path: relativePath, + type: 'directory', + }); + await scanDirectory(itemPath); + } else if (item.isFile()) { + const fileStats = await stat(itemPath); + const content = await readFile(itemPath); + + entries.push({ + path: relativePath, + type: 'file', + size: fileStats.size, + content: content.toString('base64'), + }); + } + } + }; + + try { + await scanDirectory(workspaceRoot); + const snapshotData = { + rootPath: workspaceRoot, + entries, + }; + + await writeFile(snapshotPath, JSON.stringify(snapshotData, null, 2), 'utf8'); + } catch { + throw new Error('FS operation failed'); + } }; await snapshot(); diff --git a/src/hash/verify.js b/src/hash/verify.js index 7f1e8961..88f9b42c 100644 --- a/src/hash/verify.js +++ b/src/hash/verify.js @@ -1,8 +1,78 @@ +import { createHash } from 'node:crypto'; +import { createReadStream } from 'node:fs'; +import { readFile } from 'node:fs/promises'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const getEntries = (checksums) => { + if (!checksums || typeof checksums !== 'object' || Array.isArray(checksums)) { + return []; + } + + return Object.entries(checksums).filter( + ([filename, hash]) => typeof filename === 'string' && typeof hash === 'string' + ); +}; + +const calculateSha256 = (filePath) => + new Promise((resolve, reject) => { + const hash = createHash('sha256'); + //console.log(`Calculating SHA256 for file: ${filePath}`); + const stream = createReadStream(filePath); + //console.log(`Stream created for file: ${filePath}`); + stream.on('data', (chunk) => { + hash.update(chunk); + //console.log(`Hash updated with chunk of size: ${chunk.length} bytes for file: ${filePath}`); + }); + + stream.on('end', () => { + resolve(hash.digest('hex')); + //console.log(`Finished calculating SHA256 for file: ${filePath}`); + }); + + stream.on('error', (error) => { + console.log(`Error occurred while calculating SHA256 for file: ${filePath}`, error); + reject(error); + }); + }); + const verify = async () => { // Write your code here // Read checksums.json // Calculate SHA256 hash using Streams API // Print result: filename — OK/FAIL + const checksumsPath = new URL('../../checksums.json', import.meta.url); + const checksumsFilePath = fileURLToPath(checksumsPath); + //debugger; + let checksumsRaw; + console.log(`Reading checksums from: ${checksumsFilePath}`); + + try { + checksumsRaw = await readFile(checksumsPath, 'utf8'); + } catch (error) { + if (error && typeof error === 'object' && 'code' in error && error.code === 'ENOENT') { + throw new Error('FS operation failed'); + } + throw error; + } + + const checksums = JSON.parse(checksumsRaw); + //console.log(`checksums\n${JSON.stringify(checksums, null, 2)}`); + const entries = getEntries(checksums); + //console.log(`entries\n${JSON.stringify(entries, null, 2)}`); + for (const [filename, expectedHash] of entries) { + let status = 'FAIL'; + + try { + const actualHash = await calculateSha256(filename); + //console.log(`actualHash\n${actualHash} expectedHash\n${expectedHash} filename\n${filename}`); + status = actualHash.toLowerCase() === expectedHash.toLowerCase() ? 'OK' : 'FAIL'; + } catch { + status = 'FAIL'; + } + + console.log(`${filename} — ${status}`); + } }; await verify(); diff --git a/src/modules/dynamic.js b/src/modules/dynamic.js index 008ca387..ad8ca4b1 100644 --- a/src/modules/dynamic.js +++ b/src/modules/dynamic.js @@ -4,6 +4,26 @@ const dynamic = async () => { // Dynamically import plugin from plugins/ directory // Call run() function and print result // Handle missing plugin case + const pluginName = process.argv[2]; + + if (!pluginName) { + console.error('Plugin not found'); + process.exit(1); + } + + try { + const pluginPath = new URL(`./plugins/${pluginName}.js`, import.meta.url).href; + const pluginModule = await import(pluginPath); + const result = pluginModule.run(); + console.log(result); + } catch (error) { + if (error && (error.code === 'ERR_MODULE_NOT_FOUND' || error.code === 'ERR_UNSUPPORTED_DIR_IMPORT')) { + console.error('Plugin not found'); + process.exit(1); + } + + throw error; + } }; await dynamic(); diff --git a/src/streams/filter.js b/src/streams/filter.js index 3868ab46..79bbb00d 100644 --- a/src/streams/filter.js +++ b/src/streams/filter.js @@ -1,9 +1,43 @@ +import { Transform } from 'node:stream'; + const filter = () => { // Write your code here // Read from process.stdin // Filter lines by --pattern CLI argument // Use Transform Stream // Write to process.stdout + const patternArgIndex = process.argv.indexOf('--pattern'); + const pattern = patternArgIndex >= 0 ? process.argv[patternArgIndex + 1] ?? '' : ''; + + let buffer = ''; + const filterTransform = new Transform({ + decodeStrings: false, + transform(chunk, _encoding, callback) { + const chunkText = typeof chunk === 'string' ? chunk : chunk.toString('utf8'); + buffer += chunkText; + + const lines = buffer.split('\n'); + buffer = lines.pop() ?? ''; + + for (const line of lines) { + if (line.includes(pattern)) { + this.push(`${line}\n`); + } + } + + callback(); + }, + flush(callback) { + if (buffer.includes(pattern)) { + this.push(buffer); + } + + callback(); + } + }); + + process.stdin.setEncoding('utf8'); + process.stdin.pipe(filterTransform).pipe(process.stdout); }; filter(); diff --git a/src/streams/lineNumberer.js b/src/streams/lineNumberer.js index 579d662e..73058297 100644 --- a/src/streams/lineNumberer.js +++ b/src/streams/lineNumberer.js @@ -1,8 +1,41 @@ +import { Transform } from 'node:stream'; + const lineNumberer = () => { // Write your code here // Read from process.stdin // Use Transform Stream to prepend line numbers // Write to process.stdout + let buffer = ''; + let lineNumber = 1; + + const lineNumberTransform = new Transform({ + decodeStrings: false, + transform(chunk, _encoding, callback) { + const chunkText = typeof chunk === 'string' ? chunk : chunk.toString('utf8'); + buffer += chunkText; + + const lines = buffer.split('\n'); + buffer = lines.pop() ?? ''; + + for (const line of lines) { + this.push(`${lineNumber}: ${line}\n`); + lineNumber += 1; + } + + callback(); + }, + flush(callback) { + debugger; + if (buffer.length > 0) { + this.push(`${lineNumber}: ${buffer}`); + } + + callback(); + } + }); + + process.stdin.setEncoding('utf8'); + process.stdin.pipe(lineNumberTransform).pipe(process.stdout); }; lineNumberer(); diff --git a/src/streams/split.js b/src/streams/split.js index f8f814fa..7e7e165a 100644 --- a/src/streams/split.js +++ b/src/streams/split.js @@ -1,8 +1,64 @@ +import { createReadStream } from 'node:fs'; +import { writeFile } from 'node:fs/promises'; +import path from 'node:path'; + const split = async () => { // Write your code here // Read source.txt using Readable Stream // Split into chunk_1.txt, chunk_2.txt, etc. // Each chunk max N lines (--lines CLI argument, default: 10) + const linesArgIndex = process.argv.indexOf('--lines'); + const parsedLinesValue = linesArgIndex >= 0 ? Number.parseInt(process.argv[linesArgIndex + 1] ?? '', 10) : Number.NaN; + const linesPerChunk = Number.isInteger(parsedLinesValue) && parsedLinesValue > 0 ? parsedLinesValue : 10; + + const sourceFilePath = path.resolve(process.cwd(), 'source.txt'); + const sourceStream = createReadStream(sourceFilePath, { encoding: 'utf8' }); + + let buffer = ''; + let chunkIndex = 1; + let currentChunkLineCount = 0; + let currentChunkParts = []; + + const flushChunk = async () => { + if (currentChunkParts.length === 0) { + return; + } + + const chunkPath = path.resolve(process.cwd(), `chunk_${chunkIndex}.txt`); + await writeFile(chunkPath, currentChunkParts.join(''), 'utf8'); + + chunkIndex += 1; + currentChunkLineCount = 0; + currentChunkParts = []; + }; + + const appendLine = async (line) => { + currentChunkParts.push(line); + currentChunkLineCount += 1; + + if (currentChunkLineCount >= linesPerChunk) { + await flushChunk(); + } + }; + + for await (const chunk of sourceStream) { + buffer += chunk; + + let newlineIndex = buffer.indexOf('\n'); + while (newlineIndex !== -1) { + const lineWithSeparator = buffer.slice(0, newlineIndex + 1); + buffer = buffer.slice(newlineIndex + 1); + + await appendLine(lineWithSeparator); + newlineIndex = buffer.indexOf('\n'); + } + } + + if (buffer.length > 0) { + await appendLine(buffer); + } + + await flushChunk(); }; await split(); diff --git a/src/wt/main.js b/src/wt/main.js index d7d21f0c..acd75b8b 100644 --- a/src/wt/main.js +++ b/src/wt/main.js @@ -1,3 +1,70 @@ +import { cpus } from 'node:os'; +import { readFile } from 'node:fs/promises'; +import path from 'node:path'; +import { Worker } from 'node:worker_threads'; +import { fileURLToPath } from 'node:url'; + +const createChunks = (numbers, chunkCount) => { + const chunks = Array.from({ length: chunkCount }, () => []); + + for (let index = 0; index < numbers.length; index += 1) { + chunks[index % chunkCount].push(numbers[index]); + } + + return chunks; +}; + +const runWorker = (workerPath, chunk) => new Promise((resolve, reject) => { + const worker = new Worker(workerPath); + + worker.once('message', (sortedChunk) => { + resolve(sortedChunk); + worker.terminate(); + }); + + worker.once('error', reject); + + worker.once('exit', (code) => { + if (code !== 0) { + reject(new Error(`Worker stopped with exit code ${code}`)); + } + }); + + worker.postMessage(chunk); +}); + +const mergeSortedChunks = (sortedChunks) => { + const merged = []; + const indexes = Array.from({ length: sortedChunks.length }, () => 0); + + while (true) { + let candidateChunkIndex = -1; + + for (let chunkIndex = 0; chunkIndex < sortedChunks.length; chunkIndex += 1) { + const currentIndex = indexes[chunkIndex]; + if (currentIndex >= sortedChunks[chunkIndex].length) { + continue; + } + + if (candidateChunkIndex === -1) { + candidateChunkIndex = chunkIndex; + continue; + } + + if (sortedChunks[chunkIndex][currentIndex] < sortedChunks[candidateChunkIndex][indexes[candidateChunkIndex]]) { + candidateChunkIndex = chunkIndex; + } + } + + if (candidateChunkIndex === -1) { + return merged; + } + + merged.push(sortedChunks[candidateChunkIndex][indexes[candidateChunkIndex]]); + indexes[candidateChunkIndex] += 1; + } +}; + const main = async () => { // Write your code here // Read data.json containing array of numbers @@ -6,6 +73,28 @@ const main = async () => { // Collect sorted chunks // Merge using k-way merge algorithm // Log final sorted array + const currentFilePath = fileURLToPath(import.meta.url); + const currentDir = path.dirname(currentFilePath); + const dataPath = path.resolve(currentDir, '../../data.json'); + const workerPath = path.resolve(currentDir, './worker.js'); + + try { + const rawData = await readFile(dataPath, 'utf8'); + const numbers = JSON.parse(rawData); + + if (!Array.isArray(numbers) || !numbers.every((item) => typeof item === 'number')) { + throw new Error('Invalid input data'); + } + + const workerCount = cpus().length; + const chunks = createChunks(numbers, workerCount); + const sortedChunks = await Promise.all(chunks.map((chunk) => runWorker(workerPath, chunk))); + const sortedNumbers = mergeSortedChunks(sortedChunks); + + console.log(sortedNumbers); + } catch { + throw new Error('WT operation failed'); + } }; await main(); diff --git a/src/wt/worker.js b/src/wt/worker.js index 15f42fc8..9c8c8b48 100644 --- a/src/wt/worker.js +++ b/src/wt/worker.js @@ -5,5 +5,11 @@ import { parentPort } from 'worker_threads'; // Send back to main thread parentPort.on('message', (data) => { - // Write your code here + if (!Array.isArray(data)) { + parentPort.postMessage([]); + return; + } + + const sortedData = [...data].sort((left, right) => left - right); + parentPort.postMessage(sortedData); }); diff --git a/src/zip/compressDir.js b/src/zip/compressDir.js index 3a3c5089..6b40248c 100644 --- a/src/zip/compressDir.js +++ b/src/zip/compressDir.js @@ -1,9 +1,70 @@ +import { createWriteStream } from 'node:fs'; +import { mkdir, readdir, readFile } from 'node:fs/promises'; +import path from 'node:path'; +import { Readable } from 'node:stream'; +import { pipeline } from 'node:stream/promises'; +import { fileURLToPath } from 'node:url'; +import { createBrotliCompress } from 'node:zlib'; + const compressDir = async () => { // Write your code here // Read all files from workspace/toCompress/ // Compress entire directory structure into archive.br // Save to workspace/compressed/ // Use Streams API + const currentFilePath = fileURLToPath(import.meta.url); + const currentDir = path.dirname(currentFilePath); + const sourceRoot = path.resolve(currentDir, '../../workspace/toCompress'); + const targetDir = path.resolve(currentDir, '../../workspace/compressed'); + const archivePath = path.resolve(targetDir, 'archive.br'); + + const entries = []; + + const toArchivePath = (absolutePath) => path.relative(sourceRoot, absolutePath).split(path.sep).join('/'); + + const scanDirectory = async (directoryPath) => { + const items = await readdir(directoryPath, { withFileTypes: true }); + items.sort((left, right) => left.name.localeCompare(right.name)); + + for (const item of items) { + const itemPath = path.join(directoryPath, item.name); + const relativePath = toArchivePath(itemPath); + + if (item.isDirectory()) { + entries.push({ + path: relativePath, + type: 'directory', + }); + + await scanDirectory(itemPath); + } else if (item.isFile()) { + const content = await readFile(itemPath); + entries.push({ + path: relativePath, + type: 'file', + content: content.toString('base64'), + }); + } + } + }; + + try { + await scanDirectory(sourceRoot); + await mkdir(targetDir, { recursive: true }); + + const archivePayload = JSON.stringify({ + root: 'toCompress', + entries, + }); + + await pipeline( + Readable.from([archivePayload]), + createBrotliCompress(), + createWriteStream(archivePath), + ); + } catch { + throw new Error('ZLIB operation failed'); + } }; await compressDir(); diff --git a/src/zip/decompressDir.js b/src/zip/decompressDir.js index d6e770f6..e67f5f54 100644 --- a/src/zip/decompressDir.js +++ b/src/zip/decompressDir.js @@ -1,8 +1,72 @@ +import { createReadStream, createWriteStream } from 'node:fs'; +import { mkdir, rm } from 'node:fs/promises'; +import path from 'node:path'; +import { Readable } from 'node:stream'; +import { Writable } from 'node:stream'; +import { pipeline } from 'node:stream/promises'; +import { fileURLToPath } from 'node:url'; +import { createBrotliDecompress } from 'node:zlib'; + const decompressDir = async () => { // Write your code here // Read archive.br from workspace/compressed/ // Decompress and extract to workspace/decompressed/ // Use Streams API + const currentFilePath = fileURLToPath(import.meta.url); + const currentDir = path.dirname(currentFilePath); + const archivePath = path.resolve(currentDir, '../../workspace/compressed/archive.br'); + const targetRoot = path.resolve(currentDir, '../../workspace/decompressed'); + + let archivePayload = ''; + + try { + await pipeline( + createReadStream(archivePath), + createBrotliDecompress(), + new Writable({ + write(chunk, encoding, callback) { + archivePayload += chunk.toString('utf8'); + callback(); + }, + }), + ); + + const archive = JSON.parse(archivePayload); + + if (!archive || !Array.isArray(archive.entries)) { + throw new Error('Invalid archive format'); + } + + await rm(targetRoot, { recursive: true, force: true }); + await mkdir(targetRoot, { recursive: true }); + + for (const entry of archive.entries) { + if (!entry || typeof entry.path !== 'string' || typeof entry.type !== 'string') { + throw new Error('Invalid archive entry'); + } + + const targetPath = path.resolve(targetRoot, path.normalize(entry.path)); + if (targetPath !== targetRoot && !targetPath.startsWith(`${targetRoot}${path.sep}`)) { + throw new Error('Invalid archive entry path'); + } + + if (entry.type === 'directory') { + await mkdir(targetPath, { recursive: true }); + } else if (entry.type === 'file') { + await mkdir(path.dirname(targetPath), { recursive: true }); + + const fileContent = typeof entry.content === 'string' + ? Buffer.from(entry.content, 'base64') + : Buffer.alloc(0); + + await pipeline(Readable.from([fileContent]), createWriteStream(targetPath)); + } else { + throw new Error('Invalid archive entry type'); + } + } + } catch { + throw new Error('ZLIB operation failed'); + } }; await decompressDir();