diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..755c365b --- /dev/null +++ b/package-lock.json @@ -0,0 +1,17 @@ +{ + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "license": "ISC", + "engines": { + "node": ">=24.10.0", + "npm": ">=10.9.2" + } + } + } +} diff --git a/src/cli/interactive.js b/src/cli/interactive.js index d0e3e0d9..2463070d 100644 --- a/src/cli/interactive.js +++ b/src/cli/interactive.js @@ -1,8 +1,43 @@ +import readline from 'readline'; + const interactive = () => { - // Write your code here - // Use readline module for interactive CLI - // Support commands: uptime, cwd, date, exit - // Handle Ctrl+C and unknown commands + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + prompt: '> ' + }); + + const exitHandler = () => { + console.log('Goodbye!'); + rl.close(); + process.exit(0); + }; + + rl.on('SIGINT', exitHandler); + rl.on('close', exitHandler); + + rl.prompt(); + + rl.on('line', (line) => { + const cmd = line.trim(); + switch (cmd) { + case 'uptime': + console.log(`Uptime: ${process.uptime().toFixed(2)}s`); + break; + case 'cwd': + console.log(process.cwd()); + break; + case 'date': + console.log(new Date().toISOString()); + break; + case 'exit': + exitHandler(); + return; + default: + console.log('Unknown command'); + } + rl.prompt(); + }); }; -interactive(); +interactive(); \ No newline at end of file diff --git a/src/cli/progress.js b/src/cli/progress.js index 3e060763..2361af55 100644 --- a/src/cli/progress.js +++ b/src/cli/progress.js @@ -1,8 +1,46 @@ +const parseArg = (name, def) => { + const idx = process.argv.indexOf('--' + name); + if (idx !== -1 && process.argv[idx + 1]) { + return process.argv[idx + 1]; + } + return def; +}; + +const isValidHex = (hex) => /^#[0-9a-fA-F]{6}$/.test(hex); + const progress = () => { - // Write your code here - // Simulate progress bar from 0% to 100% over ~5 seconds - // Update in place using \r every 100ms - // Format: [████████████████████ ] 67% + const duration = Number(parseArg('duration', 5000)); + const interval = Number(parseArg('interval', 100)); + const length = Number(parseArg('length', 30)); + const color = parseArg('color', null); + const useColor = color && isValidHex(color); + let percent = 0; + let elapsed = 0; + const totalSteps = Math.ceil(duration / interval); + + const colorStart = useColor + ? `\x1b[38;2;${parseInt(color.slice(1, 3), 16)};${parseInt(color.slice(3, 5), 16)};${parseInt(color.slice(5, 7), 16)}m` + : ''; + const colorEnd = useColor ? '\x1b[0m' : ''; + + const timer = setInterval(() => { + percent = Math.min(1, elapsed / duration); + const filled = Math.round(length * percent); + const empty = length - filled; + const bar = + '[' + + (useColor ? colorStart : '') + + '█'.repeat(filled) + + (useColor ? colorEnd : '') + + ' '.repeat(empty) + + `] ${Math.round(percent * 100)}%`; + process.stdout.write('\r' + bar); + elapsed += interval; + if (percent >= 1) { + clearInterval(timer); + process.stdout.write('\nDone!\n'); + } + }, interval); }; -progress(); +progress(); \ No newline at end of file diff --git a/src/cp/execCommand.js b/src/cp/execCommand.js index 34a89c8d..0d399463 100644 --- a/src/cp/execCommand.js +++ b/src/cp/execCommand.js @@ -1,10 +1,22 @@ +import { spawn } from 'child_process'; + const execCommand = () => { - // Write your code here - // Take command from CLI argument - // Spawn child process - // Pipe child stdout/stderr to parent stdout/stderr - // Pass environment variables - // Exit with same code as child + const cmdStr = process.argv[2]; + if (!cmdStr) { + console.error('No command provided'); + process.exit(1); + } + // Split command into executable and args + const [command, ...args] = cmdStr.match(/(?:[^\s"]+|"[^"]*")+/g).map(s => s.replace(/^"|"$/g, '')); + const child = spawn(command, args, { + stdio: 'inherit', + env: process.env, + shell: process.platform === 'win32' // for Windows compatibility + }); + + child.on('exit', (code) => { + process.exit(code); + }); }; -execCommand(); +execCommand(); \ No newline at end of file diff --git a/src/fs/findByExt.js b/src/fs/findByExt.js index 24f06cb8..580156d0 100644 --- a/src/fs/findByExt.js +++ b/src/fs/findByExt.js @@ -1,7 +1,51 @@ +import fs from 'fs/promises'; +import path from 'path'; + +const WORKSPACE = process.cwd(); + +function parseExtArg() { + const extIndex = process.argv.indexOf('--ext'); + let ext = '.txt'; + if (extIndex !== -1 && process.argv[extIndex + 1]) { + ext = process.argv[extIndex + 1]; + if (!ext.startsWith('.')) ext = '.' + ext; + } + return ext; +} + +async function* walk(dir) { + let entries; + try { + entries = await fs.readdir(dir, { withFileTypes: true }); + } catch (e) { + throw new Error('FS operation failed'); + } + for (const entry of entries) { + const fullPath = path.join(dir, entry.name); + if (entry.isDirectory()) { + yield* walk(fullPath); + } else if (entry.isFile()) { + yield fullPath; + } + } +} + const findByExt = async () => { - // Write your code here - // Recursively find all files with specific extension - // Parse --ext CLI argument (default: .txt) + const ext = parseExtArg(); + let files = []; + try { + for await (const file of walk(WORKSPACE)) { + if (path.extname(file) === ext) { + files.push(path.relative(WORKSPACE, file)); + } + } + } catch (e) { + throw new Error('FS operation failed'); + } + files.sort(); + for (const f of files) { + console.log(f); + } }; await findByExt(); diff --git a/src/fs/merge.js b/src/fs/merge.js index cb8e0d8f..fb107512 100644 --- a/src/fs/merge.js +++ b/src/fs/merge.js @@ -1,8 +1,53 @@ +import fs from 'fs/promises'; +import path from 'path'; + +const PARTS_DIR = path.join(process.cwd(), 'workspace', 'parts'); +const MERGED_FILE = path.join(process.cwd(), 'workspace', 'merged.txt'); + +const parseFilesArg = () => { + const idx = process.argv.indexOf('--files'); + if (idx !== -1 && process.argv[idx + 1]) { + return process.argv[idx + 1].split(',').map(f => f.trim()).filter(Boolean); + } + return null; +}; + const merge = async () => { - // Write your code here - // Default: read all .txt files from workspace/parts in alphabetical order - // Optional: support --files filename1,filename2,... to merge specific files in provided order - // Concatenate content and write to workspace/merged.txt + let filesToMerge; + try { + const filesArg = parseFilesArg(); + if (filesArg) { + // --files mode + filesToMerge = filesArg.map(f => path.join(PARTS_DIR, f)); + // Check all files exist + await Promise.all(filesToMerge.map(async file => { + try { + await fs.access(file); + } catch { + throw new Error('FS operation failed'); + } + })); + } else { + // Default mode: all .txt files in parts, sorted + let entries; + try { + entries = await fs.readdir(PARTS_DIR, { withFileTypes: true }); + } catch { + throw new Error('FS operation failed'); + } + filesToMerge = entries + .filter(e => e.isFile() && e.name.endsWith('.txt')) + .map(e => path.join(PARTS_DIR, e.name)) + .sort(); + if (filesToMerge.length === 0) throw new Error('FS operation failed'); + } + // Read and concatenate + const contents = await Promise.all(filesToMerge.map(f => fs.readFile(f, 'utf-8'))); + await fs.mkdir(path.dirname(MERGED_FILE), { recursive: true }); + await fs.writeFile(MERGED_FILE, contents.join('')); + } catch (e) { + throw new Error('FS operation failed'); + } }; -await merge(); +await merge(); \ No newline at end of file diff --git a/src/fs/restore.js b/src/fs/restore.js index 96ae1ffb..a88d1885 100644 --- a/src/fs/restore.js +++ b/src/fs/restore.js @@ -1,8 +1,43 @@ +import fs from 'fs/promises'; +import path from 'path'; + +const SNAPSHOT = path.join(process.cwd(), 'snapshot.json'); +const RESTORE_DIR = path.join(process.cwd(), 'workspace_restored'); + const restore = async () => { - // Write your code here - // Read snapshot.json - // Treat snapshot.rootPath as metadata only - // Recreate directory/file structure in workspace_restored + // Check if snapshot.json exists + let snapshot; + try { + const data = await fs.readFile(SNAPSHOT, 'utf-8'); + snapshot = JSON.parse(data); + } catch (e) { + throw new Error('FS operation failed'); + } + + // Check if workspace_restored already exists + try { + await fs.access(RESTORE_DIR); + // If no error, directory exists + throw new Error('FS operation failed'); + } catch (e) { + if (e.code !== 'ENOENT') throw new Error('FS operation failed'); + // else, directory does not exist, continue + } + + // Create workspace_restored + await fs.mkdir(RESTORE_DIR); + + // Recreate structure + for (const entry of snapshot.entries) { + const dest = path.join(RESTORE_DIR, entry.path); + if (entry.type === 'directory') { + await fs.mkdir(dest, { recursive: true }); + } else if (entry.type === 'file') { + await fs.mkdir(path.dirname(dest), { recursive: true }); + const content = Buffer.from(entry.content, 'base64'); + await fs.writeFile(dest, content); + } + } }; await restore(); diff --git a/src/fs/snapshot.js b/src/fs/snapshot.js index 050103d3..e3b5fcd2 100644 --- a/src/fs/snapshot.js +++ b/src/fs/snapshot.js @@ -1,9 +1,51 @@ +import fs from 'fs/promises'; +import path from 'path'; + +const WORKSPACE = path.join(process.cwd(), 'workspace'); +const SNAPSHOT = path.join(process.cwd(), 'snapshot.json'); + +async function* walk(dir, base) { + let entries; + try { + entries = await fs.readdir(dir, { withFileTypes: true }); + } catch { + throw new Error('FS operation failed'); + } + for (const entry of entries) { + const fullPath = path.join(dir, entry.name); + const relPath = path.relative(base, fullPath).replace(/\\/g, '/'); + if (entry.isDirectory()) { + yield { path: relPath, type: 'directory' }; + yield* walk(fullPath, base); + } else if (entry.isFile()) { + const stat = await fs.stat(fullPath); + const content = await fs.readFile(fullPath); + yield { + path: relPath, + type: 'file', + size: stat.size, + content: content.toString('base64') + }; + } + } +} + const snapshot = async () => { - // Write your code here - // Recursively scan workspace directory - // Write snapshot.json with: - // - rootPath: absolute path to workspace - // - entries: flat array of relative paths and metadata + // Check if workspace exists + try { + await fs.access(WORKSPACE); + } catch { + throw new Error('FS operation failed'); + } + const entries = []; + for await (const entry of walk(WORKSPACE, WORKSPACE)) { + entries.push(entry); + } + const data = { + rootPath: WORKSPACE, + entries + }; + await fs.writeFile(SNAPSHOT, JSON.stringify(data, null, 2)); }; -await snapshot(); +await snapshot(); \ No newline at end of file diff --git a/src/hash/verify.js b/src/hash/verify.js index 7f1e8961..129dfacd 100644 --- a/src/hash/verify.js +++ b/src/hash/verify.js @@ -1,8 +1,34 @@ +import fs from 'fs/promises'; +import { createReadStream } from 'fs'; +import path from 'path'; +import { createHash } from 'crypto'; + +const CHECKSUMS = path.join(process.cwd(), 'checksums.json'); + const verify = async () => { - // Write your code here - // Read checksums.json - // Calculate SHA256 hash using Streams API - // Print result: filename — OK/FAIL + let checksums; + try { + const data = await fs.readFile(CHECKSUMS, 'utf-8'); + checksums = JSON.parse(data); + } catch { + throw new Error('FS operation failed'); + } + + for (const [filename, expected] of Object.entries(checksums)) { + try { + const hash = createHash('sha256'); + await new Promise((resolve, reject) => { + const stream = createReadStream(path.join(process.cwd(), filename)); + stream.on('error', reject); + stream.on('data', chunk => hash.update(chunk)); + stream.on('end', resolve); + }); + const actual = hash.digest('hex'); + console.log(`${filename} — ${actual === expected ? 'OK' : 'FAIL'}`); + } catch { + console.log(`${filename} — FAIL`); + } + } }; -await verify(); +await verify(); \ No newline at end of file diff --git a/src/modules/dynamic.js b/src/modules/dynamic.js index 008ca387..1d001bf3 100644 --- a/src/modules/dynamic.js +++ b/src/modules/dynamic.js @@ -1,9 +1,22 @@ const dynamic = async () => { - // Write your code here - // Accept plugin name as CLI argument - // Dynamically import plugin from plugins/ directory - // Call run() function and print result - // Handle missing plugin case + const pluginName = process.argv[2]; + if (!pluginName) { + console.log('Plugin not found'); + process.exit(1); + } + const pluginPath = `./plugins/${pluginName}.js`; + try { + const plugin = await import(pluginPath); + if (typeof plugin.run === 'function') { + console.log(plugin.run()); + } else { + console.log('Plugin not found'); + process.exit(1); + } + } catch { + console.log('Plugin not found'); + process.exit(1); + } }; -await dynamic(); +await dynamic(); \ No newline at end of file diff --git a/src/streams/filter.js b/src/streams/filter.js index 3868ab46..98a13b3c 100644 --- a/src/streams/filter.js +++ b/src/streams/filter.js @@ -1,9 +1,43 @@ +import { Transform, pipeline } from 'stream'; +import process from 'process'; + +const getPattern = () => { + const idx = process.argv.indexOf('--pattern'); + if (idx !== -1 && process.argv[idx + 1]) { + return process.argv[idx + 1]; + } + return ''; +}; + const filter = () => { - // Write your code here - // Read from process.stdin - // Filter lines by --pattern CLI argument - // Use Transform Stream - // Write to process.stdout + const pattern = getPattern(); + if (!pattern) { + process.exit(0); + } + let leftover = ''; + const filterStream = new Transform({ + transform(chunk, encoding, callback) { + const data = leftover + chunk.toString(); + const lines = data.split('\n'); + leftover = lines.pop(); + for (const line of lines) { + if (line.includes(pattern)) { + this.push(line + '\n'); + } + } + callback(); + }, + flush(callback) { + if (leftover && leftover.includes(pattern)) { + this.push(leftover + '\n'); + } + callback(); + } + }); + + pipeline(process.stdin, filterStream, process.stdout, (err) => { + if (err) process.exit(1); + }); }; -filter(); +filter(); \ No newline at end of file diff --git a/src/streams/lineNumberer.js b/src/streams/lineNumberer.js index 579d662e..88e50855 100644 --- a/src/streams/lineNumberer.js +++ b/src/streams/lineNumberer.js @@ -1,8 +1,30 @@ +import { Transform, pipeline } from 'stream'; +import process from 'process'; + const lineNumberer = () => { - // Write your code here - // Read from process.stdin - // Use Transform Stream to prepend line numbers - // Write to process.stdout + let leftover = ''; + let lineNum = 1; + const numberStream = new Transform({ + transform(chunk, encoding, callback) { + const data = leftover + chunk.toString(); + const lines = data.split('\n'); + leftover = lines.pop(); + for (const line of lines) { + this.push(`${lineNum++} | ${line}\n`); + } + callback(); + }, + flush(callback) { + if (leftover) { + this.push(`${lineNum++} | ${leftover}\n`); + } + callback(); + } + }); + + pipeline(process.stdin, numberStream, process.stdout, (err) => { + if (err) process.exit(1); + }); }; -lineNumberer(); +lineNumberer(); \ No newline at end of file diff --git a/src/streams/split.js b/src/streams/split.js index f8f814fa..93238ef5 100644 --- a/src/streams/split.js +++ b/src/streams/split.js @@ -1,8 +1,57 @@ +import fs from 'fs'; +import fsPromises from 'fs/promises'; +import path from 'path'; + +const getLinesArg = () => { + const idx = process.argv.indexOf('--lines'); + if (idx !== -1 && process.argv[idx + 1]) { + const n = Number(process.argv[idx + 1]); + if (!isNaN(n) && n > 0) return n; + } + return 10; +}; + const split = async () => { - // Write your code here - // Read source.txt using Readable Stream - // Split into chunk_1.txt, chunk_2.txt, etc. - // Each chunk max N lines (--lines CLI argument, default: 10) + const LINES_PER_CHUNK = getLinesArg(); + const srcPath = path.join(process.cwd(), 'source.txt'); + let src; + try { + src = fs.createReadStream(srcPath, { encoding: 'utf-8' }); + } catch { + throw new Error('FS operation failed'); + } + + let chunkIdx = 1; + let lineBuffer = []; + let leftover = ''; + let writers = []; + + src.on('data', chunk => { + const data = leftover + chunk; + const lines = data.split('\n'); + leftover = lines.pop(); + for (const line of lines) { + lineBuffer.push(line); + if (lineBuffer.length === LINES_PER_CHUNK) { + const chunkFile = path.join(process.cwd(), `chunk_${chunkIdx++}.txt`); + writers.push(fsPromises.writeFile(chunkFile, lineBuffer.join('\n') + '\n')); + lineBuffer = []; + } + } + }); + + src.on('end', async () => { + if (leftover) lineBuffer.push(leftover); + if (lineBuffer.length > 0) { + const chunkFile = path.join(process.cwd(), `chunk_${chunkIdx++}.txt`); + writers.push(fsPromises.writeFile(chunkFile, lineBuffer.join('\n') + '\n')); + } + await Promise.all(writers); + }); + + src.on('error', () => { + throw new Error('FS operation failed'); + }); }; -await split(); +await split(); \ No newline at end of file diff --git a/src/wt/main.js b/src/wt/main.js index d7d21f0c..f762edb1 100644 --- a/src/wt/main.js +++ b/src/wt/main.js @@ -1,11 +1,62 @@ +import fs from 'fs/promises'; +import os from 'os'; +import path from 'path'; +import { Worker } from 'worker_threads'; + const main = async () => { - // Write your code here - // Read data.json containing array of numbers - // Split into N chunks (N = CPU cores) - // Create N workers, send one chunk to each - // Collect sorted chunks - // Merge using k-way merge algorithm - // Log final sorted array + const dataPath = path.join(process.cwd(), 'data.json'); + let numbers; + try { + numbers = JSON.parse(await fs.readFile(dataPath, 'utf-8')); + } catch { + throw new Error('FS operation failed'); + } + + const numCores = os.cpus().length; + const chunkSize = Math.ceil(numbers.length / numCores); + const chunks = []; + for (let i = 0; i < numCores; i++) { + chunks.push(numbers.slice(i * chunkSize, (i + 1) * chunkSize)); + } + + const results = new Array(numCores); + + await Promise.all( + chunks.map((chunk, idx) => + new Promise((resolve, reject) => { + const worker = new Worker(path.join(__dirname, 'worker.js')); + worker.postMessage(chunk); + worker.on('message', (sortedChunk) => { + results[idx] = sortedChunk; + resolve(); + }); + worker.on('error', reject); + worker.on('exit', (code) => { + if (code !== 0) reject(new Error(`Worker stopped with exit code ${code}`)); + }); + }) + ) + ); + + const pointers = Array(numCores).fill(0); + const merged = []; + while (true) { + let min = Infinity; + let minIdx = -1; + for (let i = 0; i < numCores; i++) { + if (pointers[i] < results[i].length) { + if (results[i][pointers[i]] < min) { + min = results[i][pointers[i]]; + minIdx = i; + } + } + } + if (minIdx === -1) break; + merged.push(min); + pointers[minIdx]++; + } + + console.log(merged); }; -await main(); +await main(); \ No newline at end of file diff --git a/src/wt/worker.js b/src/wt/worker.js index 15f42fc8..bb6899f8 100644 --- a/src/wt/worker.js +++ b/src/wt/worker.js @@ -1,9 +1,8 @@ import { parentPort } from 'worker_threads'; -// Receive array from main thread -// Sort in ascending order -// Send back to main thread - -parentPort.on('message', (data) => { - // Write your code here -}); +parentPort.on('message', (numbers) => { + if (Array.isArray(numbers)) { + numbers.sort((a, b) => a - b); + parentPort.postMessage(numbers); + } +}); \ No newline at end of file diff --git a/src/zip/compressDir.js b/src/zip/compressDir.js index 3a3c5089..2a5960e5 100644 --- a/src/zip/compressDir.js +++ b/src/zip/compressDir.js @@ -1,9 +1,64 @@ +import fs from 'fs'; +import fsPromises from 'fs/promises'; +import path from 'path'; +import { pipeline } from 'stream/promises'; +import { createBrotliCompress } from 'zlib'; + +const WORKSPACE = path.join(process.cwd(), 'workspace'); +const SRC_DIR = path.join(WORKSPACE, 'toCompress'); +const OUT_DIR = path.join(WORKSPACE, 'compressed'); +const ARCHIVE = path.join(OUT_DIR, 'archive.br'); + +async function* walk(dir, base) { + let entries; + try { + entries = await fsPromises.readdir(dir, { withFileTypes: true }); + } catch { + throw new Error('FS operation failed'); + } + for (const entry of entries) { + const fullPath = path.join(dir, entry.name); + const relPath = path.relative(base, fullPath).replace(/\\/g, '/'); + if (entry.isDirectory()) { + yield* walk(fullPath, base); + } else if (entry.isFile()) { + yield { relPath, fullPath }; + } + } +} + const compressDir = async () => { - // Write your code here - // Read all files from workspace/toCompress/ - // Compress entire directory structure into archive.br - // Save to workspace/compressed/ - // Use Streams API + try { + await fsPromises.access(SRC_DIR); + } catch { + throw new Error('FS operation failed'); + } + + await fsPromises.mkdir(OUT_DIR, { recursive: true }); + + const archiveStream = fs.createWriteStream(ARCHIVE); + const brotli = createBrotliCompress(); + + const writer = async () => { + for await (const { relPath, fullPath } of walk(SRC_DIR, SRC_DIR)) { + const stat = await fsPromises.stat(fullPath); + archiveStream.write(relPath + '\n'); + archiveStream.write(stat.size + '\n'); + await pipeline( + fs.createReadStream(fullPath), + archiveStream, + ); + } + archiveStream.end(); + }; + + await pipeline( + async function* () { + await writer(); + }(), + brotli, + fs.createWriteStream(ARCHIVE) + ); }; -await compressDir(); +await compressDir(); \ No newline at end of file diff --git a/src/zip/decompressDir.js b/src/zip/decompressDir.js index d6e770f6..519c5c33 100644 --- a/src/zip/decompressDir.js +++ b/src/zip/decompressDir.js @@ -1,8 +1,64 @@ +import fs from 'fs'; +import fsPromises from 'fs/promises'; +import path from 'path'; +import { pipeline } from 'stream/promises'; +import { createBrotliDecompress } from 'zlib'; +import readline from 'readline'; + +const WORKSPACE = path.join(process.cwd(), 'workspace'); +const COMPRESSED_DIR = path.join(WORKSPACE, 'compressed'); +const ARCHIVE = path.join(COMPRESSED_DIR, 'archive.br'); +const OUT_DIR = path.join(WORKSPACE, 'decompressed'); + const decompressDir = async () => { - // Write your code here - // Read archive.br from workspace/compressed/ - // Decompress and extract to workspace/decompressed/ - // Use Streams API + try { + await fsPromises.access(COMPRESSED_DIR); + await fsPromises.access(ARCHIVE); + } catch { + throw new Error('FS operation failed'); + } + + await fsPromises.mkdir(OUT_DIR, { recursive: true }); + + const brotli = createBrotliDecompress(); + const archiveStream = fs.createReadStream(ARCHIVE); + const decompressedStream = archiveStream.pipe(brotli); + + const rl = readline.createInterface({ input: decompressedStream, crlfDelay: Infinity }); + + let state = 0; + let relPath = ''; + let size = 0; + let buffer = Buffer.alloc(0); + + rl.on('line', async (line) => { + if (state === 0) { + relPath = line; + state = 1; + } else if (state === 1) { + size = parseInt(line, 10); + buffer = Buffer.alloc(0); + state = 2; + } + }); + + decompressedStream.on('data', async (chunk) => { + if (state === 2 && size > 0) { + buffer = Buffer.concat([buffer, chunk]); + if (buffer.length >= size) { + const filePath = path.join(OUT_DIR, relPath); + await fsPromises.mkdir(path.dirname(filePath), { recursive: true }); + await fsPromises.writeFile(filePath, buffer.slice(0, size)); + buffer = buffer.slice(size); + state = 0; + } + } + }); + + await new Promise((resolve, reject) => { + decompressedStream.on('end', resolve); + decompressedStream.on('error', reject); + }); }; -await decompressDir(); +await decompressDir(); \ No newline at end of file