diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..755c365b --- /dev/null +++ b/package-lock.json @@ -0,0 +1,17 @@ +{ + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "license": "ISC", + "engines": { + "node": ">=24.10.0", + "npm": ">=10.9.2" + } + } + } +} diff --git a/package.json b/package.json index dfecb12a..e2b5fdd0 100644 --- a/package.json +++ b/package.json @@ -10,11 +10,11 @@ "scripts": { "fs:snapshot": "node src/fs/snapshot.js", "fs:restore": "node src/fs/restore.js", - "fs:findByExt": "node src/fs/findByExt.js --ext txt", + "fs:findByExt": "node src/fs/findByExt.js --ext js", "fs:merge": "node src/fs/merge.js", "cli:interactive": "node src/cli/interactive.js", "cli:progress": "node src/cli/progress.js", - "modules:dynamic": "node src/modules/dynamic.js uppercase", + "modules:dynamic": "node src/modules/dynamic.js reverse", "hash:verify": "node src/hash/verify.js", "streams:lineNumberer": "echo 'hello\nworld' | node src/streams/lineNumberer.js", "streams:filter": "echo 'hello\nworld\ntest' | node src/streams/filter.js --pattern test", diff --git a/part/textt.txt b/part/textt.txt new file mode 100644 index 00000000..ecbaa70c --- /dev/null +++ b/part/textt.txt @@ -0,0 +1 @@ +Text 1 \ No newline at end of file diff --git a/part/ttt.txt b/part/ttt.txt new file mode 100644 index 00000000..21e6e024 --- /dev/null +++ b/part/ttt.txt @@ -0,0 +1 @@ +jkkdfkjsdjfkjs \ No newline at end of file diff --git a/src/cli/interactive.js b/src/cli/interactive.js index d0e3e0d9..e4e6238d 100644 --- a/src/cli/interactive.js +++ b/src/cli/interactive.js @@ -1,8 +1,40 @@ +import readline from "readline"; const interactive = () => { // Write your code here // Use readline module for interactive CLI // Support commands: uptime, cwd, date, exit // Handle Ctrl+C and unknown commands + + console.log( + "Help:\n\tuptime — prints process uptime in seconds (e.g. Uptime: 12.34s)\n\tcwd — prints the current working directory\n\tdate — prints the current date and time in ISO format\n\texit — prints Goodbye! and terminates the process", + ); + + const read = readline.createInterface({ + input: process.stdin, + output: process.stdout, + prompt: "> ", + }); + read.prompt(); + read.on("line", (line) => { + const command = line.trim(); + switch (command) { + case "uptime": + console.log(`\tUptime: ${process.uptime()} seconds`); + break; + case "cwd": + console.log("\t", process.cwd()); + break; + case "date": + console.log("\t", new Date()); + break; + case "exit": + console.log("\tGoodbye!\n"); + read.close(); + return; + default: + console.log("Unknown command"); + } + }); }; interactive(); diff --git a/src/cli/progress.js b/src/cli/progress.js index 3e060763..f6e0dc9d 100644 --- a/src/cli/progress.js +++ b/src/cli/progress.js @@ -1,8 +1,69 @@ +import readline from "readline"; + +function hexToAnsi(hex) { + try { + const match = hex.match(/^#([0-9a-f]{6})$/i); + if (!match) return ""; + + const num = parseInt(match[1], 16); + + const r = (num >> 16) & 255; + const g = (num >> 8) & 255; + const b = num & 255; + + return `\x1b[38;2;${r};${g};${b}m`; + } catch { + console.error("Invalid hex color code. Use format: '#RRGGBB'"); + return ""; + } +} + const progress = () => { // Write your code here // Simulate progress bar from 0% to 100% over ~5 seconds // Update in place using \r every 100ms // Format: [████████████████████ ] 67% -}; + const args = process.argv; + let duration = 5000; + let interval = 100; + let length = 30; + let color = hexToAnsi("#ffffff"); + args.forEach((arg, index) => { + switch (arg) { + case "--duration": + duration = args[index + 1]; + break; + case "--interval": + interval = parseInt(args[index + 1]); + break; + case "--length": + length = parseInt(args[index + 1]); + break; + case "--color": + color = hexToAnsi(args[index + 1]); + break; + default: + break; + } + }); + + let progress = 0; + const steps = Math.ceil(duration / interval); + const timer = setInterval(() => { + progress++; + const percent = Math.min((progress / steps) * 100, 100); + const filledLength = Math.round((length * percent) / 100); + if (progress > steps) { + const bar = color + "█".repeat(filledLength); + console.log("\nDone"); + clearInterval(timer); + } + + const bar = + color + "█".repeat(filledLength) + " ".repeat(length - filledLength); + + process.stdout.write(`\r[${bar}] ${percent.toFixed(1)}%`); + }, interval); +}; progress(); diff --git a/src/cp/execCommand.js b/src/cp/execCommand.js index 34a89c8d..5086aca7 100644 --- a/src/cp/execCommand.js +++ b/src/cp/execCommand.js @@ -1,3 +1,12 @@ +import { Worker, } from "worker_threads"; +import {spawn} from "child_process" + +function setArrayInWorker() { + const worker = new Worker(` + const + `); +} + const execCommand = () => { // Write your code here // Take command from CLI argument @@ -5,6 +14,19 @@ const execCommand = () => { // Pipe child stdout/stderr to parent stdout/stderr // Pass environment variables // Exit with same code as child +const command=process.argv[2]; + if (!command) { + console.error("No command provided. Usage: node execCommand.js \"ls -la\""); + process.exit(1); + } +const child=spawn(command,{ + shell:true, + env: process.env +}) +child.stdout.pipe(process.stdout) +child.stdin.pipe(process.stdin) +child.on('close', (code)=>process.exit(code)) + }; execCommand(); diff --git a/src/fs/findByExt.js b/src/fs/findByExt.js index 24f06cb8..54a180c9 100644 --- a/src/fs/findByExt.js +++ b/src/fs/findByExt.js @@ -1,7 +1,36 @@ +import fs from "fs/promises"; +import path from "path"; + const findByExt = async () => { // Write your code here // Recursively find all files with specific extension // Parse --ext CLI argument (default: .txt) + + const root = process.cwd(); + const filesArray = []; + async function scanDir(ext, dir) { + const files = await fs.readdir(dir); + for (let file of files) { + const fullPath = path.join(dir, file); + const stat = await fs.stat(fullPath); + if (stat.isDirectory()) await scanDir(ext, fullPath); + else { + if (path.extname(file).replace(".", "") === ext) { + filesArray.push(path.relative(root, fullPath)); + } + } + } + } + + try { + const index = process.argv.indexOf("--ext"); + if (index < 0) throw new Error("Not found"); + const ext = process.argv[index + 1]; + await scanDir(ext, root); + console.log(filesArray); + } catch (e) { + console.error("Error\n", "FS operation failed", "\n\n"); + } }; await findByExt(); diff --git a/src/fs/merge.js b/src/fs/merge.js index cb8e0d8f..38b8252b 100644 --- a/src/fs/merge.js +++ b/src/fs/merge.js @@ -1,8 +1,45 @@ +import fs from "fs/promises"; +import path from "path"; + const merge = async () => { // Write your code here // Default: read all .txt files from workspace/parts in alphabetical order // Optional: support --files filename1,filename2,... to merge specific files in provided order // Concatenate content and write to workspace/merged.txt + + const root = path.join(process.cwd(), "part"); + let textFiles = []; + async function readDirectory(pathDir) { + const files = await fs.readdir(pathDir); + for (const file of files) { + const fullPath = path.join(pathDir, file); + const stat = await fs.stat(fullPath); + if (stat.isDirectory()) { + await readDirectory(fullPath); + } else { + if (path.extname(fullPath) === ".txt") { + textFiles.push(fullPath); + } + } + } + } + try { + await readDirectory(root); + if (textFiles.length === 0) { + throw new Error(); + } + textFiles.sort((file1, file2) => { + return path.basename(file1).localeCompare(path.basename(file2)); + }); + let result = ""; + for (const file of textFiles) { + const content = await fs.readFile(file); + result += content + "\n"; + } + await fs.writeFile(path.join(process.cwd(), "merged.txt"), result); + } catch { + console.error("Error:\n", "FS operation failed", "\n"); + } }; await merge(); diff --git a/src/fs/restore.js b/src/fs/restore.js index 96ae1ffb..9c0fb728 100644 --- a/src/fs/restore.js +++ b/src/fs/restore.js @@ -1,8 +1,33 @@ +import fs from "fs/promises"; +import path from "path"; + const restore = async () => { // Write your code here // Read snapshot.json // Treat snapshot.rootPath as metadata only // Recreate directory/file structure in workspace_restored + const root = process.cwd(); + const rootPath = path.join(root, "restore"); + + try { + const data = await fs.readFile("./src/fs/snapshot.json"); + const files = JSON.parse(data).entries; + await fs.mkdir(rootPath, { recursive: true }); + + for (const file of files) { + if (file.type === "directory") { + await fs.mkdir(path.join(rootPath, file.path)); + } + } + + for (const file of files) { + if (file.type === "file") { + await fs.writeFile(path.join(rootPath, file.path), file.content); + } + } + } catch (e) { + console.error("Error\n", "FS operation failed", "\n"); + } }; await restore(); diff --git a/src/fs/snapshot.js b/src/fs/snapshot.js index 050103d3..35890180 100644 --- a/src/fs/snapshot.js +++ b/src/fs/snapshot.js @@ -1,9 +1,43 @@ +import fs from "fs" +import path from "path" + + const snapshot = async () => { - // Write your code here - // Recursively scan workspace directory - // Write snapshot.json with: - // - rootPath: absolute path to workspace - // - entries: flat array of relative paths and metadata + let entries=[] +const rootPath=process.cwd(); + + function readDirectory(pathDirectory){ + const files=fs.readdirSync(pathDirectory); + for(let file of files){ + const fullPath=path.join(pathDirectory, file); + const stat=fs.statSync(fullPath); + if(stat.isFile()){ + const content=fs.readFileSync(fullPath,{encoding:"base64"}); + entries.push({ + path:path.relative(rootPath, fullPath), + type:'file', + size:stat.size, + content + }) + }else{ + entries.push({ + path:path.relative(rootPath, fullPath), + type:"directory"}) + readDirectory(fullPath); + } + } + } + try{ + readDirectory(rootPath) + const data={ + rootPath, + entries + } + fs.writeFileSync("./src/fs/snapshot.json",JSON.stringify(data, "",2)) + }catch(e){ + console.error("Error\n","FS operation failed",'\n') + } + }; await snapshot(); diff --git a/src/hash/file1.txt b/src/hash/file1.txt new file mode 100644 index 00000000..e69de29b diff --git a/src/hash/file2.txt b/src/hash/file2.txt new file mode 100644 index 00000000..e69de29b diff --git a/src/hash/verify.js b/src/hash/verify.js index 7f1e8961..dff243d6 100644 --- a/src/hash/verify.js +++ b/src/hash/verify.js @@ -1,8 +1,48 @@ +import fs from "fs/promises"; +import fs2 from "fs"; +import crypto from "crypto"; +import path from "path"; + const verify = async () => { // Write your code here // Read checksums.json // Calculate SHA256 hash using Streams API // Print result: filename — OK/FAIL -}; -await verify(); + function calculateHash(filePath) { + return new Promise((resolve, reject) => { + if (!fs2.existsSync(filePath)) { + resolve({ file: filePath, error: "File does not exist" }); + return; + } + const hash = crypto.createHash("sha256"); + const stream = fs2.createReadStream(filePath); + stream.on("data", (data) => hash.update(data)); + stream.on("end", () => { + const result = hash.digest("hex"); + resolve({ file: filePath, hash: result }); + }); + stream.on("error", (err) => reject(err)); + }); + } + + try { + const content = await fs.readFile("src/hash/checksums.json", "utf-8"); + const files = JSON.parse(content); + for (const file in files) { + const fullPath = path.join(process.cwd(), "src/hash", file); + const result = await calculateHash(fullPath); + if (result.error) { + console.log(`${file} — ${result.error}`); + } else { + const expectedHash = files[file]; + console.log( + `${file} — ${result.hash === expectedHash ? "OK" : "FAIL"}`, + ); + } + } + } catch (e) { + console.error("FS operation failed"); + } +}; +verify(); diff --git a/src/modules/dynamic.js b/src/modules/dynamic.js index 008ca387..79b71dad 100644 --- a/src/modules/dynamic.js +++ b/src/modules/dynamic.js @@ -4,6 +4,10 @@ const dynamic = async () => { // Dynamically import plugin from plugins/ directory // Call run() function and print result // Handle missing plugin case + + const command = process.argv[2]; + const module = await import(`./plugins/${command}.js`); + console.log(module.run()); }; await dynamic(); diff --git a/src/streams/filter.js b/src/streams/filter.js index 3868ab46..2c26113f 100644 --- a/src/streams/filter.js +++ b/src/streams/filter.js @@ -1,9 +1,32 @@ +import { Transform } from "stream"; + const filter = () => { // Write your code here // Read from process.stdin // Filter lines by --pattern CLI argument // Use Transform Stream // Write to process.stdout + + let buffer = ""; + const filterTransform = new Transform({ + transform(chunk, encoding, callback) { + buffer += chunk.toString(); + const lines = buffer.split("\n"); + buffer = lines.pop(); + const patternIndex = process.argv.indexOf("--pattern"); + const pattern = process.argv[patternIndex + 1] || ""; + const result = + lines.filter((line) => line.includes(pattern)).join("\n") + "\n"; + callback(null, result); + }, + flush(callback) { + if (buffer && buffer.includes(pattern)) { + this.push(buffer + "\n"); + } + callback(); + }, + }); + process.stdin.pipe(filterTransform).pipe(process.stdout); }; filter(); diff --git a/src/streams/lineNumberer.js b/src/streams/lineNumberer.js index 579d662e..d91200ce 100644 --- a/src/streams/lineNumberer.js +++ b/src/streams/lineNumberer.js @@ -1,8 +1,30 @@ +import { Transform } from "stream"; + const lineNumberer = () => { // Write your code here // Read from process.stdin // Use Transform Stream to prepend line numbers // Write to process.stdout + let buffer = ""; + let lineNumber = 1; + const lineTransform = new Transform({ + transform(chunk, encoding, callback) { + buffer += chunk.toString(); + const lines = buffer.split("\n"); + buffer = lines.pop(); + const result = + lines.map((line, index) => `${lineNumber++}: ${line}`).join("\n") + + "\n"; + callback(null, result); + }, + flush(callback) { + if (buffer.length > 0) { + this.push(`${lineNumber++}: ${buffer}\n`); + } + callback(); + }, + }); + process.stdin.pipe(lineTransform).pipe(process.stdout); }; lineNumberer(); diff --git a/src/streams/split.js b/src/streams/split.js index f8f814fa..b4ecdd86 100644 --- a/src/streams/split.js +++ b/src/streams/split.js @@ -1,8 +1,40 @@ +import fs from "fs"; +import path from "path"; + +const chunkArrays = (arr, size) => { + const result = []; + for (let i = 0; i < arr.length; i += size) { + result.push(arr.slice(i, i + size)); + } + return result; +}; + const split = async () => { // Write your code here // Read source.txt using Readable Stream // Split into chunk_1.txt, chunk_2.txt, etc. // Each chunk max N lines (--lines CLI argument, default: 10) + + const readStream = fs.createReadStream( + path.join(process.cwd(), "src", "streams", "source.txt"), + "utf-8", + ); + readStream.on("data", (chunk) => { + const indexParam = process.argv.indexOf("--lines"); + const countLines = process.argv[indexParam + 1] || 5; + const lines = chunk.split("\n"); + const arrays = chunkArrays(lines, countLines); + for (let i = 0; i < arrays.length; i++) { + fs.writeFileSync( + path.join(process.cwd(), "src/streams", `chunk_${i + 1}.txt`), + arrays[i].join("\n"), + ); + } + console.log(arrays); + }); + readStream.on("end", () => { + console.log("File reading finish"); + }); }; await split(); diff --git a/src/wt/main.js b/src/wt/main.js index d7d21f0c..507daea9 100644 --- a/src/wt/main.js +++ b/src/wt/main.js @@ -1,11 +1,69 @@ +import { readFile } from "fs/promises"; +import path from "path"; +import { isMainThread, parentPort, Worker } from "worker_threads"; +import os from "os"; + +function sortArrays(chunks) { + const result = []; + const indexes = new Array(chunks.length).fill(0); + + while (true) { + let minValue = Infinity; + let minChunk = -1; + + for (let i = 0; i < chunks.length; i++) { + const idx = indexes[i]; + + if (idx < chunks[i].length && chunks[i][idx] < minValue) { + minValue = chunks[i][idx]; + minChunk = i; + } + } + + if (minChunk === -1) break; + + result.push(minValue); + indexes[minChunk]++; + } + return result; +} + const main = async () => { - // Write your code here - // Read data.json containing array of numbers - // Split into N chunks (N = CPU cores) - // Create N workers, send one chunk to each - // Collect sorted chunks - // Merge using k-way merge algorithm - // Log final sorted array + + + if (isMainThread) { + const dataJson = await readFile( + path.join(process.cwd(), "src/wt/data.json"), + { encoding: "utf-8" }, + ); + const arr = JSON.parse(dataJson).array; + const cpuCount = os.cpus().length; + const sizeArr = Math.floor(arr.length / cpuCount); + let rem = arr.length % cpuCount; + const arrays = []; + const arraysSort = []; + for (let i = 0; i < cpuCount; i++) { + arrays.push( + arr.slice(i * sizeArr, (i + 1) * sizeArr + (rem > 0 ? 1 : 0)), + ); + rem--; + } + let completed = 0; + for (let i = 0; i < arrays.length; i++) { + const worker = new Worker(path.join(process.cwd(), "src/wt/worker.js")); + worker.on("message", (msg) => { + arraysSort[i] = msg; + completed++; + if (completed >= cpuCount) { + console.log(sortArrays(arraysSort)); + } + }); + worker.postMessage(arrays[i]); + } + } else { + parentPort.postMessage("hello"); + console.log("no main"); + } }; await main(); diff --git a/src/wt/worker.js b/src/wt/worker.js index 15f42fc8..d5696771 100644 --- a/src/wt/worker.js +++ b/src/wt/worker.js @@ -1,9 +1,13 @@ -import { parentPort } from 'worker_threads'; +import { parentPort } from "worker_threads"; // Receive array from main thread // Sort in ascending order // Send back to main thread -parentPort.on('message', (data) => { +parentPort.on("message", (data) => { // Write your code here + if (Array.isArray(data)) { + const arrSort = data.sort((a, b) => a - b); + parentPort.postMessage(arrSort); + } }); diff --git a/src/zip/compressDir.js b/src/zip/compressDir.js index 3a3c5089..83268a5a 100644 --- a/src/zip/compressDir.js +++ b/src/zip/compressDir.js @@ -1,9 +1,94 @@ +import fs from "fs"; +import path from "path"; +import zlib from "zlib"; +import { pipeline } from "stream/promises"; + +function createTarHeader(name, size, type = "0") { + const buf = Buffer.alloc(512); + buf.write(name); + buf.write("0000777", 100); + buf.write("0000000", 108); + buf.write("0000000", 116); + buf.write(size.toString(8).padStart(11, "0") + "\0", 124); + buf.write( + Math.floor(Date.now() / 1000) + .toString(8) + .padStart(11, "0") + "\0", + 136, + ); + buf.fill(" ", 148, 156); + buf.write(type, 156); + buf.write("ustar", 257); + buf.write("00", 263); + + let sum = 0; + for (let i = 0; i < 512; i++) sum += buf[i]; + buf.write(sum.toString(8).padStart(6, "0") + "\0 ", 148); + + return buf; +} + +async function* walk(dir, base = dir) { + const entries = await fs.promises.readdir(dir, { withFileTypes: true }); + for (const entry of entries) { + const full = path.join(dir, entry.name); + const rel = path.relative(base, full); + + if (entry.isDirectory()) { + yield { path: rel + "/", size: 0, type: "5", full }; + yield* walk(full, base); + } else if (entry.isFile()) { + const stat = await fs.promises.stat(full); + yield { path: rel, size: stat.size, type: "0", full }; + } + } +} + const compressDir = async () => { - // Write your code here - // Read all files from workspace/toCompress/ - // Compress entire directory structure into archive.br - // Save to workspace/compressed/ - // Use Streams API + const sourceDir = path.join(process.cwd(), "toCompress"); + const outputDir = path.join(process.cwd(), "compressed"); + const outputFile = path.join(outputDir, "archive.br"); + + if (!fs.existsSync(sourceDir)) { + throw new Error("FS operation failed"); + } + + fs.mkdirSync(outputDir, { recursive: true }); + + const brotli = zlib.createBrotliCompress({ + params: { [zlib.constants.BROTLI_PARAM_QUALITY]: 6 }, + }); + + const outStream = fs.createWriteStream(outputFile); + brotli.pipe(outStream); + + for await (const file of walk(sourceDir)) { + brotli.write(createTarHeader(file.path, file.size, file.type)); + + if (file.type === "0") { + await pipeline( + fs.createReadStream(file.full), + async function* (source) { + for await (const chunk of source) yield chunk; + }, + brotli, + { end: false }, + ); + + const remainder = file.size % 512; + if (remainder) { + brotli.write(Buffer.alloc(512 - remainder)); + } + } + } + + brotli.write(Buffer.alloc(1024)); + brotli.end(); + + return new Promise((resolve, reject) => { + outStream.on("finish", () => resolve()); + outStream.on("error", reject); + }); }; await compressDir(); diff --git a/toCompress/directory1/dir3/text4.txt b/toCompress/directory1/dir3/text4.txt new file mode 100644 index 00000000..e69de29b diff --git a/toCompress/directory1/text1.txt b/toCompress/directory1/text1.txt new file mode 100644 index 00000000..0f0c75fb --- /dev/null +++ b/toCompress/directory1/text1.txt @@ -0,0 +1 @@ +dfgdfg \ No newline at end of file diff --git a/toCompress/directory1/text2.txt b/toCompress/directory1/text2.txt new file mode 100644 index 00000000..9d0e3afc --- /dev/null +++ b/toCompress/directory1/text2.txt @@ -0,0 +1 @@ +dfghdfghfdgh \ No newline at end of file diff --git a/toCompress/directory2/text3.txt b/toCompress/directory2/text3.txt new file mode 100644 index 00000000..ed125237 --- /dev/null +++ b/toCompress/directory2/text3.txt @@ -0,0 +1 @@ +dfgdfgsdf \ No newline at end of file