From 52c963da242a841e86ada2e856f4593879cea131 Mon Sep 17 00:00:00 2001 From: Alexey Date: Mon, 9 Mar 2026 13:20:35 +0300 Subject: [PATCH] tasks implementation --- package-lock.json | 17 +++++++++++ package.json | 3 +- src/cli/interactive.js | 30 ++++++++++++++++--- src/cli/progress.js | 35 +++++++++++++++++++--- src/cp/execCommand.js | 19 ++++++++---- src/fs/findByExt.js | 43 +++++++++++++++++++++++++-- src/fs/merge.js | 35 +++++++++++++++++++--- src/fs/restore.js | 24 ++++++++++++--- src/fs/snapshot.js | 43 +++++++++++++++++++++++---- src/hash/verify.js | 28 +++++++++++++++--- src/modules/dynamic.js | 24 +++++++++++---- src/streams/filter.js | 33 +++++++++++++++++---- src/streams/lineNumberer.js | 29 ++++++++++++++++--- src/streams/split.js | 58 +++++++++++++++++++++++++++++++++---- src/wt/main.js | 42 ++++++++++++++++++++++----- src/wt/worker.js | 6 +--- src/zip/compressDir.js | 41 ++++++++++++++++++++++---- src/zip/decompressDir.js | 33 ++++++++++++++++++--- 18 files changed, 467 insertions(+), 76 deletions(-) create mode 100644 package-lock.json diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..755c365b --- /dev/null +++ b/package-lock.json @@ -0,0 +1,17 @@ +{ + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "license": "ISC", + "engines": { + "node": ">=24.10.0", + "npm": ">=10.9.2" + } + } + } +} diff --git a/package.json b/package.json index dfecb12a..d321cb6e 100644 --- a/package.json +++ b/package.json @@ -17,6 +17,7 @@ "modules:dynamic": "node src/modules/dynamic.js uppercase", "hash:verify": "node src/hash/verify.js", "streams:lineNumberer": "echo 'hello\nworld' | node src/streams/lineNumberer.js", + "streams:lineNumbererWin": "echo 'hello' & echo 'world' | node src/streams/lineNumberer.js", "streams:filter": "echo 'hello\nworld\ntest' | node src/streams/filter.js --pattern test", "streams:split": "node src/streams/split.js --lines 10", "zip:compressDir": "node src/zip/compressDir.js", @@ -35,4 +36,4 @@ ], "author": "alreadybored", "license": "ISC" -} +} \ No newline at end of file diff --git a/src/cli/interactive.js b/src/cli/interactive.js index d0e3e0d9..c5802822 100644 --- a/src/cli/interactive.js +++ b/src/cli/interactive.js @@ -1,8 +1,30 @@ +import readline from 'readline'; + const interactive = () => { - // Write your code here - // Use readline module for interactive CLI - // Support commands: uptime, cwd, date, exit - // Handle Ctrl+C and unknown commands + const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); + + rl.setPrompt('> '); + rl.prompt(); + + rl.on('line', (line) => { + const cmd = line.trim(); + if (cmd === 'uptime') { + console.log(process.uptime()); + } else if (cmd === 'cwd') { + console.log(process.cwd()); + } else if (cmd === 'date') { + console.log(new Date().toISOString()); + } else if (cmd === 'exit') { + process.exit(0); + } else { + console.log(`Unknown command: ${cmd}`); + } + rl.prompt(); + }); + + rl.on('close', () => { + process.exit(0); + }); }; interactive(); diff --git a/src/cli/progress.js b/src/cli/progress.js index 3e060763..770c5eb5 100644 --- a/src/cli/progress.js +++ b/src/cli/progress.js @@ -1,8 +1,35 @@ const progress = () => { - // Write your code here - // Simulate progress bar from 0% to 100% over ~5 seconds - // Update in place using \r every 100ms - // Format: [████████████████████ ] 67% + const total = 50; + const barWidth = 30; + let step = 0; + + const args = process.argv; + const colorIdx = args.indexOf('--color'); + let ansiColor = ''; + let ansiReset = ''; + + if (colorIdx !== -1 && args[colorIdx + 1]) { + const hex = args[colorIdx + 1].replace('#', ''); + const r = parseInt(hex.slice(0, 2), 16); + const g = parseInt(hex.slice(2, 4), 16); + const b = parseInt(hex.slice(4, 6), 16); + ansiColor = `\x1b[38;2;${r};${g};${b}m`; + ansiReset = '\x1b[0m'; + } + + const interval = setInterval(() => { + step++; + const percent = Math.round((step / total) * 100); + const filled = Math.round((step / total) * barWidth); + const filledBar = '█'.repeat(filled); + const emptyBar = ' '.repeat(barWidth - filled); + process.stdout.write(`\r[${ansiColor}${filledBar}${ansiReset}${emptyBar}] ${percent}%`); + + if (step >= total) { + clearInterval(interval); + process.stdout.write('\n'); + } + }, 100); }; progress(); diff --git a/src/cp/execCommand.js b/src/cp/execCommand.js index 34a89c8d..12cae696 100644 --- a/src/cp/execCommand.js +++ b/src/cp/execCommand.js @@ -1,10 +1,17 @@ +import { spawn } from 'child_process'; + const execCommand = () => { - // Write your code here - // Take command from CLI argument - // Spawn child process - // Pipe child stdout/stderr to parent stdout/stderr - // Pass environment variables - // Exit with same code as child + const command = process.argv[2]; + if (!command) { + console.error('Please provide a command as an argument.'); + process.exit(1); + } + + const child = spawn(command, { shell: true, stdio: 'inherit' }); + + child.on('close', (code) => { + process.exit(code ?? 0); + }); }; execCommand(); diff --git a/src/fs/findByExt.js b/src/fs/findByExt.js index 24f06cb8..672fda76 100644 --- a/src/fs/findByExt.js +++ b/src/fs/findByExt.js @@ -1,7 +1,44 @@ +import fs from "fs"; +import path from "path"; + +async function scanDir(dirPath, ext, results = []) { + const items = await fs.promises.readdir(dirPath); + + for (const item of items) { + const fullPath = path.join(dirPath, item); + const stat = await fs.promises.stat(fullPath); + + if (stat.isDirectory()) { + if (item === "node_modules") continue; + await scanDir(fullPath, ext, results); + } else { + if (path.extname(item) === ext) { + results.push(fullPath); + } + } + } + + return results; +} + const findByExt = async () => { - // Write your code here - // Recursively find all files with specific extension - // Parse --ext CLI argument (default: .txt) + const args = process.argv; + const extIndex = args.indexOf("--ext"); + let ext = extIndex !== -1 ? args[extIndex + 1] : ".txt"; + + if (!ext.startsWith(".")) ext = "." + ext; + + const results = await scanDir(process.cwd(), ext); + results.sort(); + + if (results.length === 0) { + console.log(`No files found with extension ${ext}`); + } else { + console.log(`Found ${results.length} file(s) with extension ${ext}:`); + for (const file of results) { + console.log(" ", file); + } + } }; await findByExt(); diff --git a/src/fs/merge.js b/src/fs/merge.js index cb8e0d8f..aef51434 100644 --- a/src/fs/merge.js +++ b/src/fs/merge.js @@ -1,8 +1,35 @@ +import fs from "fs"; +import path from "path"; + const merge = async () => { - // Write your code here - // Default: read all .txt files from workspace/parts in alphabetical order - // Optional: support --files filename1,filename2,... to merge specific files in provided order - // Concatenate content and write to workspace/merged.txt + const partsDir = path.join(process.cwd(), "workspace", "parts"); + const outputFile = path.join(process.cwd(), "workspace", "merged.txt"); + + let files; + + const filesIndex = process.argv.indexOf("--files"); + if (filesIndex !== -1) { + const names = process.argv[filesIndex + 1].split(","); + files = names.map((name) => path.join(partsDir, name.trim())); + } else { + const items = await fs.promises.readdir(partsDir); + const txtFiles = items.filter((item) => path.extname(item) === ".txt"); + txtFiles.sort(); + files = txtFiles.map((name) => path.join(partsDir, name)); + } + + let merged = ""; + for (const file of files) { + const content = await fs.promises.readFile(file, "utf-8"); + merged += content; + } + + await fs.promises.writeFile(outputFile, merged, "utf-8"); + + console.log(`✓ Merged ${files.length} file(s) into workspace/merged.txt`); + for (const file of files) { + console.log(" ", path.basename(file)); + } }; await merge(); diff --git a/src/fs/restore.js b/src/fs/restore.js index 96ae1ffb..5385581c 100644 --- a/src/fs/restore.js +++ b/src/fs/restore.js @@ -1,8 +1,24 @@ +import fs from "fs"; +import path from "path"; + const restore = async () => { - // Write your code here - // Read snapshot.json - // Treat snapshot.rootPath as metadata only - // Recreate directory/file structure in workspace_restored + const snapshotFile = await fs.promises.readFile("snapshot.json", "utf-8"); + const snapshot = JSON.parse(snapshotFile); + + const restoreDir = path.join(process.cwd(), "workspace_restored"); + + await fs.promises.mkdir(restoreDir, { recursive: true }); + + for (const entry of snapshot.entries) { + const entryPath = path.join(restoreDir, entry.path); + + if (entry.type === "directory") { + await fs.promises.mkdir(entryPath, { recursive: true }); + } else { + await fs.promises.mkdir(path.dirname(entryPath), { recursive: true }); + await fs.promises.writeFile(entryPath, Buffer.from(entry.content, "base64")); + } + } }; await restore(); diff --git a/src/fs/snapshot.js b/src/fs/snapshot.js index 050103d3..e23102b7 100644 --- a/src/fs/snapshot.js +++ b/src/fs/snapshot.js @@ -1,9 +1,42 @@ +import fs from "fs"; +import path from "path"; + +async function scanDirectory(dirPath, basePath, entries = []) { + const items = await fs.promises.readdir(dirPath); + + for (const item of items) { + const fullPath = path.join(dirPath, item); + const stat = await fs.promises.stat(fullPath); + const relativePath = path.relative(basePath, fullPath); + + if (stat.isDirectory()) { + entries.push({ path: relativePath, type: "directory" }); + await scanDirectory(fullPath, basePath, entries); + } else { + const content = await fs.promises.readFile(fullPath); + entries.push({ + path: relativePath, + type: "file", + size: stat.size, + content: content.toString("base64"), + }); + } + } + + return entries; +} + const snapshot = async () => { - // Write your code here - // Recursively scan workspace directory - // Write snapshot.json with: - // - rootPath: absolute path to workspace - // - entries: flat array of relative paths and metadata + const workspacePath = path.resolve("workspace"); + const entries = await scanDirectory(workspacePath, workspacePath); + + const data = { rootPath: workspacePath, entries }; + + await fs.promises.writeFile( + "snapshot.json", + JSON.stringify(data, null, 2), + "utf-8", + ); }; await snapshot(); diff --git a/src/hash/verify.js b/src/hash/verify.js index 7f1e8961..474929c9 100644 --- a/src/hash/verify.js +++ b/src/hash/verify.js @@ -1,8 +1,28 @@ +import fs from 'fs'; +import crypto from 'crypto'; +import { readFile } from 'fs/promises'; + +const hashFile = (filePath) => + new Promise((resolve, reject) => { + const hash = crypto.createHash('sha256'); + const stream = fs.createReadStream(filePath); + stream.on('error', reject); + stream.on('data', (chunk) => hash.update(chunk)); + stream.on('end', () => resolve(hash.digest('hex'))); + }); + const verify = async () => { - // Write your code here - // Read checksums.json - // Calculate SHA256 hash using Streams API - // Print result: filename — OK/FAIL + const checksums = JSON.parse(await readFile('checksums.json', 'utf8')); + + for (const [filename, expected] of Object.entries(checksums)) { + try { + const actual = await hashFile(filename); + const status = actual === expected ? 'OK' : 'FAIL'; + console.log(`${filename} — ${status}`); + } catch { + console.log(`${filename} — FAIL`); + } + } }; await verify(); diff --git a/src/modules/dynamic.js b/src/modules/dynamic.js index 008ca387..b8e084e4 100644 --- a/src/modules/dynamic.js +++ b/src/modules/dynamic.js @@ -1,9 +1,23 @@ +import { fileURLToPath } from 'url'; +import path from 'path'; + const dynamic = async () => { - // Write your code here - // Accept plugin name as CLI argument - // Dynamically import plugin from plugins/ directory - // Call run() function and print result - // Handle missing plugin case + const name = process.argv[2]; + if (!name) { + console.error('Please provide a plugin name as an argument.'); + process.exit(1); + } + + const __dirname = path.dirname(fileURLToPath(import.meta.url)); + const pluginPath = path.join(__dirname, 'plugins', `${name}.js`); + + try { + const plugin = await import(pluginPath); + console.log(plugin.run()); + } catch { + console.error(`Plugin "${name}" not found.`); + process.exit(1); + } }; await dynamic(); diff --git a/src/streams/filter.js b/src/streams/filter.js index 3868ab46..8ebbe867 100644 --- a/src/streams/filter.js +++ b/src/streams/filter.js @@ -1,9 +1,32 @@ +import { Transform } from 'stream'; + const filter = () => { - // Write your code here - // Read from process.stdin - // Filter lines by --pattern CLI argument - // Use Transform Stream - // Write to process.stdout + const patternIndex = process.argv.indexOf('--pattern'); + const pattern = patternIndex !== -1 ? process.argv[patternIndex + 1] : ''; + + let buffer = ''; + + const transform = new Transform({ + transform(chunk, encoding, callback) { + buffer += chunk.toString(); + const lines = buffer.split('\n'); + buffer = lines.pop(); + for (const line of lines) { + if (line.includes(pattern)) { + this.push(`${line}\n`); + } + } + callback(); + }, + flush(callback) { + if (buffer.length > 0 && buffer.includes(pattern)) { + this.push(`${buffer}\n`); + } + callback(); + }, + }); + + process.stdin.pipe(transform).pipe(process.stdout); }; filter(); diff --git a/src/streams/lineNumberer.js b/src/streams/lineNumberer.js index 579d662e..dfc622e1 100644 --- a/src/streams/lineNumberer.js +++ b/src/streams/lineNumberer.js @@ -1,8 +1,29 @@ +import { Transform } from 'stream'; + const lineNumberer = () => { - // Write your code here - // Read from process.stdin - // Use Transform Stream to prepend line numbers - // Write to process.stdout + let lineNum = 1; + let leftover = ''; + + const transform = new Transform({ + transform(chunk, encoding, callback) { + leftover += chunk.toString(); + const lines = leftover.split('\n'); + leftover = lines.pop(); + for (const line of lines) { + this.push(`${lineNum}: ${line}\n`); + lineNum++; + } + callback(); + }, + flush(callback) { + if (leftover.length > 0) { + this.push(`${lineNum}: ${leftover}\n`); + } + callback(); + }, + }); + + process.stdin.pipe(transform).pipe(process.stdout); }; lineNumberer(); diff --git a/src/streams/split.js b/src/streams/split.js index f8f814fa..a1de09df 100644 --- a/src/streams/split.js +++ b/src/streams/split.js @@ -1,8 +1,54 @@ -const split = async () => { - // Write your code here - // Read source.txt using Readable Stream - // Split into chunk_1.txt, chunk_2.txt, etc. - // Each chunk max N lines (--lines CLI argument, default: 10) -}; +import fs from 'fs'; + +const split = () => + new Promise((resolve, reject) => { + const linesIndex = process.argv.indexOf('--lines'); + const maxLines = linesIndex !== -1 ? parseInt(process.argv[linesIndex + 1], 10) : 10; + + let buffer = ''; + let lineCount = 0; + let chunkIndex = 0; + let writer = null; + + const nextWriter = () => { + chunkIndex++; + lineCount = 0; + writer = fs.createWriteStream(`chunk_${chunkIndex}.txt`); + return writer; + }; + + const readable = fs.createReadStream('source.txt', { encoding: 'utf8' }); + + readable.on('error', reject); + + readable.on('data', (chunk) => { + buffer += chunk; + const lines = buffer.split('\n'); + buffer = lines.pop(); + + for (const line of lines) { + if (!writer) nextWriter(); + writer.write(`${line}\n`); + lineCount++; + if (lineCount >= maxLines) { + writer.end(); + writer = null; + } + } + }); + + readable.on('end', () => { + if (buffer.length > 0) { + if (!writer) nextWriter(); + writer.write(`${buffer}\n`); + } + if (writer) { + writer.end(); + writer.on('finish', resolve); + } else { + resolve(); + } + }); + }); await split(); diff --git a/src/wt/main.js b/src/wt/main.js index d7d21f0c..19fbade3 100644 --- a/src/wt/main.js +++ b/src/wt/main.js @@ -1,11 +1,39 @@ +import { Worker } from 'worker_threads'; +import os from 'os'; +import { readFile } from 'fs/promises'; +import { fileURLToPath } from 'url'; +import path from 'path'; + const main = async () => { - // Write your code here - // Read data.json containing array of numbers - // Split into N chunks (N = CPU cores) - // Create N workers, send one chunk to each - // Collect sorted chunks - // Merge using k-way merge algorithm - // Log final sorted array + const data = JSON.parse(await readFile('data.json', 'utf8')); + const cpuCount = os.cpus().length; + const chunkSize = Math.ceil(data.length / cpuCount); + + const chunks = []; + for (let i = 0; i < data.length; i += chunkSize) { + chunks.push(data.slice(i, i + chunkSize)); + } + + const __dirname = path.dirname(fileURLToPath(import.meta.url)); + const workerPath = path.join(__dirname, 'worker.js'); + + const sortedChunks = await Promise.all( + chunks.map( + (chunk) => + new Promise((resolve, reject) => { + const worker = new Worker(workerPath); + worker.on('message', (sorted) => { + resolve(sorted); + worker.terminate(); + }); + worker.on('error', reject); + worker.postMessage(chunk); + }), + ), + ); + + const sorted = sortedChunks.flat().sort((a, b) => a - b); + console.log(sorted); }; await main(); diff --git a/src/wt/worker.js b/src/wt/worker.js index 15f42fc8..10366cf4 100644 --- a/src/wt/worker.js +++ b/src/wt/worker.js @@ -1,9 +1,5 @@ import { parentPort } from 'worker_threads'; -// Receive array from main thread -// Sort in ascending order -// Send back to main thread - parentPort.on('message', (data) => { - // Write your code here + parentPort.postMessage([...data].sort((a, b) => a - b)); }); diff --git a/src/zip/compressDir.js b/src/zip/compressDir.js index 3a3c5089..be2ed1be 100644 --- a/src/zip/compressDir.js +++ b/src/zip/compressDir.js @@ -1,9 +1,40 @@ +import fs from 'fs'; +import path from 'path'; +import zlib from 'zlib'; +import { pipeline } from 'stream/promises'; +import { Readable } from 'stream'; +import { readdir, readFile, mkdir } from 'fs/promises'; + +const collectFiles = async (dir, base = dir) => { + const entries = await readdir(dir, { withFileTypes: true }); + const files = []; + for (const entry of entries) { + const fullPath = path.join(dir, entry.name); + if (entry.isDirectory()) { + files.push(...(await collectFiles(fullPath, base))); + } else { + const content = await readFile(fullPath); + files.push({ path: path.relative(base, fullPath), content: content.toString('base64') }); + } + } + return files; +}; + const compressDir = async () => { - // Write your code here - // Read all files from workspace/toCompress/ - // Compress entire directory structure into archive.br - // Save to workspace/compressed/ - // Use Streams API + const sourceDir = path.join('workspace', 'toCompress'); + const destDir = path.join('workspace', 'compressed'); + await mkdir(destDir, { recursive: true }); + + const files = await collectFiles(sourceDir); + const json = JSON.stringify(files); + + await pipeline( + Readable.from(json), + zlib.createBrotliCompress(), + fs.createWriteStream(path.join(destDir, 'archive.br')), + ); + + console.log('Compressed to workspace/compressed/archive.br'); }; await compressDir(); diff --git a/src/zip/decompressDir.js b/src/zip/decompressDir.js index d6e770f6..2ce4872f 100644 --- a/src/zip/decompressDir.js +++ b/src/zip/decompressDir.js @@ -1,8 +1,33 @@ +import fs from 'fs'; +import path from 'path'; +import zlib from 'zlib'; +import { mkdir, writeFile } from 'fs/promises'; + const decompressDir = async () => { - // Write your code here - // Read archive.br from workspace/compressed/ - // Decompress and extract to workspace/decompressed/ - // Use Streams API + const archivePath = path.join('workspace', 'compressed', 'archive.br'); + const destDir = path.join('workspace', 'decompressed'); + await mkdir(destDir, { recursive: true }); + + const json = await new Promise((resolve, reject) => { + const chunks = []; + const reader = fs.createReadStream(archivePath); + const brotli = zlib.createBrotliDecompress(); + reader.pipe(brotli); + brotli.on('data', (chunk) => chunks.push(chunk)); + brotli.on('end', () => resolve(Buffer.concat(chunks).toString('utf8'))); + brotli.on('error', reject); + reader.on('error', reject); + }); + + const files = JSON.parse(json); + + for (const { path: filePath, content } of files) { + const fullPath = path.join(destDir, filePath); + await mkdir(path.dirname(fullPath), { recursive: true }); + await writeFile(fullPath, Buffer.from(content, 'base64')); + } + + console.log('Extracted to workspace/decompressed/'); }; await decompressDir();