diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..755c365b --- /dev/null +++ b/package-lock.json @@ -0,0 +1,17 @@ +{ + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "license": "ISC", + "engines": { + "node": ">=24.10.0", + "npm": ">=10.9.2" + } + } + } +} diff --git a/src/cli/interactive.js b/src/cli/interactive.js index d0e3e0d9..3dfd80a6 100644 --- a/src/cli/interactive.js +++ b/src/cli/interactive.js @@ -1,8 +1,46 @@ +import { createInterface } from 'node:readline'; + const interactive = () => { - // Write your code here - // Use readline module for interactive CLI - // Support commands: uptime, cwd, date, exit - // Handle Ctrl+C and unknown commands + const { stdin, stdout, cwd } = process; + + const rl = createInterface({ + input: stdin, + output: stdout, + prompt: '> ' + }); + + rl.prompt(); + + rl.on('line', (line) => { + const command = line.trim(); + + switch (command) { + case 'uptime': + console.log(`Uptime: ${process.uptime().toFixed(2)}s`); + break; + case 'cwd': + console.log(`Current directory: ${cwd()}`); + break; + case 'date': + console.log(`Current date and time: ${new Date().toISOString()}`); + break; + case 'exit': + rl.close(); + break; + default: + console.log('Unknown command'); + } + + rl.prompt(); + }).on('close', () => { + console.log('Goodbye!'); + process.exit(0); + }); + + process.on('SIGINT', () => { + console.log('\nGoodbye!'); + process.exit(0); + }); }; interactive(); diff --git a/src/cli/progress.js b/src/cli/progress.js index 3e060763..9a83e52b 100644 --- a/src/cli/progress.js +++ b/src/cli/progress.js @@ -1,8 +1,60 @@ const progress = () => { - // Write your code here - // Simulate progress bar from 0% to 100% over ~5 seconds - // Update in place using \r every 100ms - // Format: [████████████████████ ] 67% + const { stdout, argv } = process; + + let duration = 5000; + let interval = 100; + let length = 30; + let color = null; + + for (let i = 2; i < argv.length; i++) { + switch (argv[i]) { + case '--duration': + duration = parseInt(argv[++i], 10) || duration; + break; + case '--interval': + interval = parseInt(argv[++i], 10) || interval; + break; + case '--length': + length = parseInt(argv[++i], 10) || length; + break; + case '--color': + const c = argv[++i]; + if (/^#([0-9A-Fa-f]{6})$/.test(c)) color = c; + break; + default: + console.warn(`Unknown param: ${argv[i]}`); + } + } + + const hexToAnsi = hex => { + const r = parseInt(hex.slice(1, 3), 16); + const g = parseInt(hex.slice(3, 5), 16); + const b = parseInt(hex.slice(5, 7), 16); + return `\x1b[38;2;${r};${g};${b}m`; + }; + + const drawProgress = percent => { + const filledLength = Math.round((percent / 100) * length); + const emptyLength = length - filledLength; + const filled = '█'.repeat(filledLength); + const empty = ' '.repeat(emptyLength); + const coloredFilled = color ? `${hexToAnsi(color)}${filled}\x1b[0m` : filled; + stdout.write(`\r[${coloredFilled}${empty}] ${percent}%`); + }; + + const steps = Math.ceil(duration / interval); + let currentStep = 0; + + const timer = setInterval(() => { + currentStep++; + const percent = Math.min(Math.round((currentStep / steps) * 100), 100); + drawProgress(percent); + + if (currentStep >= steps) { + clearInterval(timer); + stdout.write('\nDone!\n'); + } + }, interval); }; progress(); diff --git a/src/cp/execCommand.js b/src/cp/execCommand.js index 34a89c8d..77a2a022 100644 --- a/src/cp/execCommand.js +++ b/src/cp/execCommand.js @@ -1,10 +1,30 @@ +import { spawn } from 'node:child_process'; + const execCommand = () => { - // Write your code here - // Take command from CLI argument - // Spawn child process - // Pipe child stdout/stderr to parent stdout/stderr - // Pass environment variables - // Exit with same code as child + const { argv, exit, env } = process; + + const commandArg = argv[2]; + + if (!commandArg) { + console.error('Command not specified.'); + exit(1); + } + + const [command, ...args] = commandArg.split(' '); + + const child = spawn(command, args, { + stdio: 'inherit', + env, + }); + + child.on('exit', (code) => { + exit(code); + }); + + child.on('error', (err) => { + console.error('The command execution failed with an error', err.message); + exit(1); + }); }; execCommand(); diff --git a/src/fs/findByExt.js b/src/fs/findByExt.js index 24f06cb8..d1437e1d 100644 --- a/src/fs/findByExt.js +++ b/src/fs/findByExt.js @@ -1,7 +1,46 @@ +import { fileURLToPath } from 'node:url'; +import { join } from 'node:path'; +import { readdir } from 'node:fs/promises'; + const findByExt = async () => { - // Write your code here - // Recursively find all files with specific extension - // Parse --ext CLI argument (default: .txt) + const getDirEntries = async (path) => { + const childElements = await readdir(path, { withFileTypes: true }); + const dirEntries = []; + + for (const element of childElements) { + const elementName = element.name; + + if (element.isFile()) { + dirEntries.push(elementName); + continue; + } + + const fullPath = join(path, elementName); + const subDirEntries = await getDirEntries(fullPath); + + dirEntries.push(...subDirEntries.map(entry => join(elementName, entry))); + } + + return dirEntries; + }; + + const DEFAULT_EXT = 'txt'; + + const args = process.argv.slice(2); + const extArgIndex = args.indexOf('--ext'); + const ext = (extArgIndex !== -1 && (extArgIndex + 1) < args.length) ? args[extArgIndex + 1] : DEFAULT_EXT; + + const rootPath = join(fileURLToPath(import.meta.url), '..', '..', '..', 'workspace'); + + try { + const dirEntries = await getDirEntries(rootPath); + const extFilePaths = dirEntries.flat(Infinity).filter(filePath => filePath.endsWith(`.${ext}`)).sort(); + + console.log(extFilePaths.join('\n')); + } catch (error) { + error.message = `FS operation failed\n${error.message}`; + throw error; + } }; await findByExt(); diff --git a/src/fs/merge.js b/src/fs/merge.js index cb8e0d8f..eef3c693 100644 --- a/src/fs/merge.js +++ b/src/fs/merge.js @@ -1,8 +1,47 @@ +import { fileURLToPath } from 'node:url'; +import { join } from 'node:path'; +import { readdir, readFile, writeFile } from 'node:fs/promises'; + const merge = async () => { - // Write your code here - // Default: read all .txt files from workspace/parts in alphabetical order - // Optional: support --files filename1,filename2,... to merge specific files in provided order - // Concatenate content and write to workspace/merged.txt + const TEXT_EXT = 'txt'; + + const rootPath = join(fileURLToPath(import.meta.url), '..', '..', '..', 'workspace/parts'); + let fullContent = ''; + + const args = process.argv.slice(2); + const fileNamesArgIndex = args.indexOf('--files'); + + try { + if (fileNamesArgIndex !== -1 && (fileNamesArgIndex + 1) < args.length) { + const fileNames = args[fileNamesArgIndex + 1].split(','); + + for (const name of fileNames) { + fullContent += await readFile(join(rootPath, `${name}.${TEXT_EXT}`)); + } + } else { + let isTextFileExist = false; + const allEntries = (await readdir(rootPath, { withFileTypes: true })).sort(); + + for (const entry of allEntries) { + if (entry.isFile() && entry.name.endsWith(`.${TEXT_EXT}`)) { + if (!isTextFileExist) { + isTextFileExist = true; + } + + fullContent += await readFile(join(rootPath, entry.name)); + } + } + + if (!isTextFileExist) { + throw new Error('FS operation failed\nThere are no files with the .txt extension in the folder.'); + } + } + } catch (error) { + error.message = `FS operation failed\n${error.message}`; + throw error; + } + + await writeFile(join(rootPath, '..', 'merged.txt'), fullContent); }; await merge(); diff --git a/src/fs/restore.js b/src/fs/restore.js index 96ae1ffb..de1e39d7 100644 --- a/src/fs/restore.js +++ b/src/fs/restore.js @@ -1,8 +1,32 @@ +import { fileURLToPath } from 'node:url'; +import { join, dirname } from 'node:path'; +import { readFile, mkdir, writeFile, constants } from 'node:fs/promises'; + const restore = async () => { - // Write your code here - // Read snapshot.json - // Treat snapshot.rootPath as metadata only - // Recreate directory/file structure in workspace_restored + const currentFilePath = fileURLToPath(import.meta.url); + const rootPath = join(currentFilePath, '..', '..', '..', 'workspace_restored'); + const snapshotPath = join(currentFilePath, '..', '..', '..', 'snapshot.json'); + + try { + await mkdir(rootPath); + + const dirSnapshot = JSON.parse(await readFile(snapshotPath, { encoding: 'utf8' })); + + for (const entry of dirSnapshot.entries) { + if (entry.type === 'directory') { + await mkdir(join(rootPath, entry.path), { recursive: true }); + continue; + } + + const fileData = Buffer.from(entry.content, "base64"); + + await mkdir(join(rootPath, dirname(entry.path)), { recursive: true }); + await writeFile(join(rootPath, entry.path), fileData); + } + } catch (error) { + error.message = `FS operation failed\n${error.message}`; + throw error; + } }; await restore(); diff --git a/src/fs/snapshot.js b/src/fs/snapshot.js index 050103d3..a6822c62 100644 --- a/src/fs/snapshot.js +++ b/src/fs/snapshot.js @@ -1,9 +1,55 @@ +import { fileURLToPath } from 'node:url'; +import { join } from 'node:path'; +import { readdir, readFile, lstat, writeFile } from 'node:fs/promises'; + const snapshot = async () => { - // Write your code here - // Recursively scan workspace directory - // Write snapshot.json with: - // - rootPath: absolute path to workspace - // - entries: flat array of relative paths and metadata + const getDirEntries = async (path) => { + const childElements = await readdir(path, { withFileTypes: true }); + const dirEntries = []; + + for (const element of childElements) { + const entry = { path: element.name }; + const fullPath = join(path, entry.path); + + if (element.isFile()) { + const stats = await lstat(fullPath); + + entry.type = 'file'; + entry.size = stats.size; + entry.content = (await readFile(fullPath)).toString('base64'); + + dirEntries.push(entry); + continue; + } + + entry.type = 'directory'; + + const subDirEntries = await getDirEntries(fullPath); + + dirEntries.push([ + entry, + subDirEntries.map(element => ({ + ...element, + path: join(entry.path, element.path), + })), + ]); + } + + return dirEntries; + }; + + try { + const rootPath = join(fileURLToPath(import.meta.url), '..', '..', '..', 'workspace'); + const entries = await getDirEntries(rootPath); + + await writeFile( + join(rootPath, '..', 'snapshot.json'), + JSON.stringify({ rootPath, entries: entries.flat(Infinity) }, null, 2) + ); + } catch (error) { + error.message = `FS operation failed\n${error.message}`; + throw error; + } }; await snapshot(); diff --git a/src/hash/verify.js b/src/hash/verify.js index 7f1e8961..82ade401 100644 --- a/src/hash/verify.js +++ b/src/hash/verify.js @@ -1,8 +1,43 @@ +import { fileURLToPath } from 'node:url'; +import { createReadStream } from 'node:fs'; +import { readFile } from 'node:fs/promises'; +import { createHash } from 'node:crypto'; +import { join } from 'node:path'; + const verify = async () => { - // Write your code here - // Read checksums.json - // Calculate SHA256 hash using Streams API - // Print result: filename — OK/FAIL + const getHash = (filePath) => new Promise((resolve, reject) => { + const hash = createHash('sha256'); + const readStream = createReadStream(filePath); + + readStream.on('error', reject); + + readStream.on('data', (chunk) => hash.update(chunk)); + + readStream.on('end', () => resolve(hash.digest('hex'))); + }); + + const currentFilePath = fileURLToPath(import.meta.url); + const rootPath = join(currentFilePath, '..', '..', '..', 'checksums.json'); + + let checksums; + + try { + checksums = JSON.parse(await readFile(rootPath, 'utf8')); + } catch { + error.message = `FS operation failed\n${error.message}`; + throw error; + } + + for (const [fileName, expectedHash] of Object.entries(checksums)) { + try { + const actualHash = await getHash(join(currentFilePath, '..', '..', '..', fileName)); + const status = actualHash === expectedHash ? 'OK' : 'FAIL'; + + console.log(`${fileName} — ${status}`); + } catch { + console.log(`${fileName} — FAIL`); + } + } }; -await verify(); +verify(); \ No newline at end of file diff --git a/src/modules/dynamic.js b/src/modules/dynamic.js index 008ca387..2b6a3d7e 100644 --- a/src/modules/dynamic.js +++ b/src/modules/dynamic.js @@ -1,9 +1,22 @@ const dynamic = async () => { - // Write your code here - // Accept plugin name as CLI argument - // Dynamically import plugin from plugins/ directory - // Call run() function and print result - // Handle missing plugin case + const { argv, exit } = process; + + const pluginName = argv[2]; + + if (!pluginName) { + console.error('The plugin name is not specified.'); + exit(1); + } + + try { + const plugin = await import(`./plugins/${pluginName}.js`); + const result = await plugin.run(); + + console.log(result); + } catch (error) { + console.error('Plugin with this name not found.'); + exit(1); + } }; await dynamic(); diff --git a/src/streams/filter.js b/src/streams/filter.js index 3868ab46..145f721c 100644 --- a/src/streams/filter.js +++ b/src/streams/filter.js @@ -1,9 +1,26 @@ +import { Transform } from 'node:stream'; + const filter = () => { - // Write your code here - // Read from process.stdin - // Filter lines by --pattern CLI argument - // Use Transform Stream - // Write to process.stdout + const { stdin, stdout, argv, exit } = process; + + const args = argv.slice(2); + const patternArgIndex = args.indexOf('--pattern'); + + if (patternArgIndex === -1 || (patternArgIndex + 1) >= args.length) { + console.error('No pattern specified. Restart with pattern.'); + exit(1); + } + + const filterStream = new Transform({ + transform(data, _, callback) { + const lines = data.toString().split('\\n'); + + this.push(lines.filter(line => line.includes(args[patternArgIndex + 1])).join('\n')); + callback(); + } + }); + + stdin.pipe(filterStream).pipe(stdout); }; filter(); diff --git a/src/streams/lineNumberer.js b/src/streams/lineNumberer.js index 579d662e..94745cfe 100644 --- a/src/streams/lineNumberer.js +++ b/src/streams/lineNumberer.js @@ -1,8 +1,20 @@ +import { Transform } from 'node:stream'; + const lineNumberer = () => { - // Write your code here - // Read from process.stdin - // Use Transform Stream to prepend line numbers - // Write to process.stdout + const { stdin, stdout } = process; + + let lineNumber = 1; + + const lineNumberStream = new Transform({ + transform(data, _, callback) { + const lines = data.toString().split('\\n'); + + this.push(lines.map(line => `${lineNumber++} | ${line}`).join('\n')); + callback(); + } + }); + + stdin.pipe(lineNumberStream).pipe(stdout); }; lineNumberer(); diff --git a/src/streams/split.js b/src/streams/split.js index f8f814fa..7983c65e 100644 --- a/src/streams/split.js +++ b/src/streams/split.js @@ -1,8 +1,74 @@ +import { fileURLToPath } from 'node:url'; +import { join } from 'node:path'; +import { createReadStream, createWriteStream } from 'node:fs'; +import { Transform } from 'node:stream'; + const split = async () => { - // Write your code here - // Read source.txt using Readable Stream - // Split into chunk_1.txt, chunk_2.txt, etc. - // Each chunk max N lines (--lines CLI argument, default: 10) + const { argv, exit } = process; + + const DEFAULT_LINE_LIMIT = 10; + + const currentFilePath = fileURLToPath(import.meta.url); + const rootPath = join(currentFilePath, '..', '..', '..', 'source.txt'); + + const args = argv.slice(2); + const chunkLineLimitArgIndex = args.indexOf('--lines'); + let chunkLineLimit = DEFAULT_LINE_LIMIT; + + if (chunkLineLimitArgIndex !== -1 || (chunkLineLimitArgIndex + 1) < args.length) { + chunkLineLimit = parseInt(args[chunkLineLimitArgIndex + 1], 10); + } + + if (!Number.isInteger(chunkLineLimit) || chunkLineLimit <= 0) { + console.error('Invalid line count. Must be a positive integer.'); + exit(1); + } + + const readStream = createReadStream(rootPath, { encoding: 'utf8' }); + + const writeChunk = (chunkContent, chunkNumber) => { + const writeChunkStream = createWriteStream( + join(currentFilePath, '..', '..', '..', `chunk_${chunkNumber}.txt`), + { flags: 'a' }, + ); + + writeChunkStream.write(chunkContent); + writeChunkStream.end(); + }; + + let chunkNumber = 1; + let chunkLineNumber = 0; + let chunkLines = []; + + const splitStream = new Transform({ + transform(data, _, callback) { + const lines = data.toString().split('\n'); + + for (const line of lines) { + chunkLineNumber++; + chunkLines.push(line); + + if (chunkLineNumber === chunkLineLimit) { + writeChunk(chunkLines.join('\n'), chunkNumber); + + chunkNumber++; + chunkLineNumber = 0; + chunkLines = []; + } + } + + callback(); + }, + flush(callback) { + if (chunkLines.length) { + writeChunk(chunkLines.join('\n'), chunkNumber); + } + + callback(); + } + }); + + readStream.pipe(splitStream); }; await split(); diff --git a/src/wt/main.js b/src/wt/main.js index d7d21f0c..6a7f07c1 100644 --- a/src/wt/main.js +++ b/src/wt/main.js @@ -1,11 +1,69 @@ +import { fileURLToPath } from 'node:url'; +import { join } from 'node:path'; +import { Worker } from 'node:worker_threads'; +import { cpus } from 'node:os'; +import { readFile } from 'node:fs/promises'; + const main = async () => { - // Write your code here - // Read data.json containing array of numbers - // Split into N chunks (N = CPU cores) - // Create N workers, send one chunk to each - // Collect sorted chunks - // Merge using k-way merge algorithm - // Log final sorted array + const currentFilePath = fileURLToPath(import.meta.url); + const rootPath = join(currentFilePath, '..', '..', '..', 'data.json'); + const workerPath = join(currentFilePath, '..', 'worker.js'); + const data = JSON.parse(await readFile(rootPath, 'utf8')); + + if (!Array.isArray(data)) throw new Error('JSON must contain an array'); + + const numCPUs = cpus().length; + const chunkSize = Math.ceil(data.length / numCPUs); + const chunks = []; + + for (let i = 0; i < numCPUs; i++) { + const start = i * chunkSize; + const end = start + chunkSize; + + chunks.push(data.slice(start, end)); + } + + const workers = chunks.map(() => new Worker(workerPath)); + const sortedChunks = await Promise.all( + workers.map((worker, index) => { + return new Promise((resolve, reject) => { + worker.once('message', resolve); + worker.once('error', reject); + worker.postMessage(chunks[index]); + }); + }) + ); + + const finalSorted = mergeSortedChunks(sortedChunks); + + console.log('Final sorted array:', finalSorted); + + workers.forEach(w => w.terminate()); }; +function mergeSortedChunks(arrays) { + const result = []; + const pointers = new Array(arrays.length).fill(0); + + while (true) { + let minVal = Infinity; + let minIdx = -1; + + for (let i = 0; i < arrays.length; i++) { + const ptr = pointers[i]; + if (ptr < arrays[i].length && arrays[i][ptr] < minVal) { + minVal = arrays[i][ptr]; + minIdx = i; + } + } + + if (minIdx === -1) break; + + result.push(minVal); + pointers[minIdx]++; + } + + return result; +} + await main(); diff --git a/src/wt/worker.js b/src/wt/worker.js index 15f42fc8..e242f135 100644 --- a/src/wt/worker.js +++ b/src/wt/worker.js @@ -1,9 +1,10 @@ import { parentPort } from 'worker_threads'; -// Receive array from main thread -// Sort in ascending order -// Send back to main thread - -parentPort.on('message', (data) => { - // Write your code here +parentPort.on('message', data => { + if (Array.isArray(data)) { + const sortedArray = data.slice().sort((a, b) => a - b); + parentPort.postMessage(sortedArray); + } else { + parentPort.postMessage({ error: 'Data must be an array of numbers' }); + } });