From 4276f471f07b72c4439a2a9f1cb66f7884b29f7a Mon Sep 17 00:00:00 2001 From: natasyo Date: Sat, 7 Mar 2026 17:17:10 +0300 Subject: [PATCH 01/17] feat: snapshot --- package-lock.json | 17 +++++++++++++++++ src/fs/snapshot.js | 39 ++++++++++++++++++++++++++++++++++----- 2 files changed, 51 insertions(+), 5 deletions(-) create mode 100644 package-lock.json diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..755c365b --- /dev/null +++ b/package-lock.json @@ -0,0 +1,17 @@ +{ + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "license": "ISC", + "engines": { + "node": ">=24.10.0", + "npm": ">=10.9.2" + } + } + } +} diff --git a/src/fs/snapshot.js b/src/fs/snapshot.js index 050103d3..32b891b1 100644 --- a/src/fs/snapshot.js +++ b/src/fs/snapshot.js @@ -1,9 +1,38 @@ +import fs from "fs" +import path from "path" + + const snapshot = async () => { - // Write your code here - // Recursively scan workspace directory - // Write snapshot.json with: - // - rootPath: absolute path to workspace - // - entries: flat array of relative paths and metadata + let entries=[] +const rootPath=process.cwd(); + + function readDirectory(pathDirectory){ + const files=fs.readdirSync(pathDirectory); + for(let file of files){ + const fullPath=path.join(pathDirectory, file); + const stat=fs.statSync(fullPath); + if(stat.isFile()){ + const content=fs.readFileSync(fullPath,{encoding:"utf-8"}); + entries.push({ + path:path.relative(rootPath, fullPath), + type:'file', + size:stat.size, + content + }) + }else{ + entries.push({ + path:path.relative(rootPath, fullPath), + type:"directory"}) + readDirectory(fullPath); + } + } + } + readDirectory(rootPath) + const data={ + rootPath, + entries + } + fs.writeFileSync("./src/fs/data.json",JSON.stringify(data, "",2)) }; await snapshot(); From e1865680ef841bdb699ef2b145779b4147918abf Mon Sep 17 00:00:00 2001 From: natasyo Date: Sat, 7 Mar 2026 21:41:23 +0300 Subject: [PATCH 02/17] feat:restore --- src/fs/restore.js | 28 ++++++++++++++++++++++++++++ src/fs/snapshot.js | 4 ++-- 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/src/fs/restore.js b/src/fs/restore.js index 96ae1ffb..1d347597 100644 --- a/src/fs/restore.js +++ b/src/fs/restore.js @@ -1,8 +1,36 @@ +import fs from "fs/promises" +import path from "path"; + const restore = async () => { // Write your code here // Read snapshot.json // Treat snapshot.rootPath as metadata only // Recreate directory/file structure in workspace_restored + const root=process.cwd() + const rootPath=path.join(root, 'restore'); + +try{ + const data= await fs.readFile('./src/fs/snapshot.json'); + const files=JSON.parse(data).entries; + await fs.mkdir(rootPath, {recursive:true}) + + for(const file of files){ + if(file.type==='directory'){ + await fs.mkdir(path.join(rootPath, file.path), ) + } + } + + + for(const file of files){ + if(file.type==='file'){ + await fs.writeFile(path.join(rootPath, file.path), file.content) + } + } +} +catch(e){ + console.error("Error\n",e.message,'\n') +} + }; await restore(); diff --git a/src/fs/snapshot.js b/src/fs/snapshot.js index 32b891b1..10b28943 100644 --- a/src/fs/snapshot.js +++ b/src/fs/snapshot.js @@ -12,7 +12,7 @@ const rootPath=process.cwd(); const fullPath=path.join(pathDirectory, file); const stat=fs.statSync(fullPath); if(stat.isFile()){ - const content=fs.readFileSync(fullPath,{encoding:"utf-8"}); + const content=fs.readFileSync(fullPath,{encoding:"base64"}); entries.push({ path:path.relative(rootPath, fullPath), type:'file', @@ -32,7 +32,7 @@ const rootPath=process.cwd(); rootPath, entries } - fs.writeFileSync("./src/fs/data.json",JSON.stringify(data, "",2)) + fs.writeFileSync("./src/fs/snapshot.json",JSON.stringify(data, "",2)) }; await snapshot(); From 66c4e80c99a4d31d548d498ad475393135f1dd97 Mon Sep 17 00:00:00 2001 From: natasyo Date: Sun, 8 Mar 2026 10:13:57 +0300 Subject: [PATCH 03/17] feat: findByExt --- package.json | 2 +- src/fs/findByExt.js | 32 ++++++++++++++++++++++++++++++++ src/fs/text.txt | 1 + 3 files changed, 34 insertions(+), 1 deletion(-) create mode 100644 src/fs/text.txt diff --git a/package.json b/package.json index dfecb12a..0d24a00e 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,7 @@ "scripts": { "fs:snapshot": "node src/fs/snapshot.js", "fs:restore": "node src/fs/restore.js", - "fs:findByExt": "node src/fs/findByExt.js --ext txt", + "fs:findByExt": "node src/fs/findByExt.js --ext js", "fs:merge": "node src/fs/merge.js", "cli:interactive": "node src/cli/interactive.js", "cli:progress": "node src/cli/progress.js", diff --git a/src/fs/findByExt.js b/src/fs/findByExt.js index 24f06cb8..e7fff930 100644 --- a/src/fs/findByExt.js +++ b/src/fs/findByExt.js @@ -1,7 +1,39 @@ +import fs from "fs/promises" +import path from "path"; + const findByExt = async () => { // Write your code here // Recursively find all files with specific extension // Parse --ext CLI argument (default: .txt) + + + const root=process.cwd(); + const filesArray=[] + async function scanDir(ext, dir){ + const files=await fs.readdir(dir); + for(let file of files){ + const fullPath=path.join(dir, file) + const stat=await fs.stat(fullPath) + if(stat.isDirectory()) await scanDir(ext, fullPath) + else{ + if(path.extname(file).replace('.','')===ext){ + filesArray.push(path.relative(root, fullPath)) + } + } + } + } + + try{ + const index=process.argv.indexOf('--ext'); + if(index <0) throw new Error('Not found') + const ext=process.argv[index+1]; + await scanDir(ext, root); + console.log(filesArray) + + }catch(e){ + console.error('Error\n',e.message, '\n\n') + } + }; await findByExt(); diff --git a/src/fs/text.txt b/src/fs/text.txt new file mode 100644 index 00000000..07e5bb93 --- /dev/null +++ b/src/fs/text.txt @@ -0,0 +1 @@ +frtrer \ No newline at end of file From bcde920a3806cc1df4b4d0c2ace7e22aeddb11d1 Mon Sep 17 00:00:00 2001 From: natasyo Date: Sun, 8 Mar 2026 10:32:59 +0300 Subject: [PATCH 04/17] fix: throw error --- src/fs/findByExt.js | 2 +- src/fs/restore.js | 2 +- src/fs/snapshot.js | 7 ++++++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/fs/findByExt.js b/src/fs/findByExt.js index e7fff930..b4d0123e 100644 --- a/src/fs/findByExt.js +++ b/src/fs/findByExt.js @@ -31,7 +31,7 @@ const findByExt = async () => { console.log(filesArray) }catch(e){ - console.error('Error\n',e.message, '\n\n') + console.error('Error\n',"FS operation failed", '\n\n') } }; diff --git a/src/fs/restore.js b/src/fs/restore.js index 1d347597..b058daff 100644 --- a/src/fs/restore.js +++ b/src/fs/restore.js @@ -28,7 +28,7 @@ try{ } } catch(e){ - console.error("Error\n",e.message,'\n') + console.error("Error\n","FS operation failed",'\n') } }; diff --git a/src/fs/snapshot.js b/src/fs/snapshot.js index 10b28943..35890180 100644 --- a/src/fs/snapshot.js +++ b/src/fs/snapshot.js @@ -27,12 +27,17 @@ const rootPath=process.cwd(); } } } - readDirectory(rootPath) + try{ + readDirectory(rootPath) const data={ rootPath, entries } fs.writeFileSync("./src/fs/snapshot.json",JSON.stringify(data, "",2)) + }catch(e){ + console.error("Error\n","FS operation failed",'\n') + } + }; await snapshot(); From e6b0f6302c614c3eb5d4a07fc8ae3d1475ac5007 Mon Sep 17 00:00:00 2001 From: natasyo Date: Sun, 8 Mar 2026 11:07:43 +0300 Subject: [PATCH 05/17] feat: fs:merge --- part/textt.txt | 1 + part/ttt.txt | 1 + src/fs/merge.js | 41 +++++++++++++++++++++++++++++++++++++++++ src/fs/text.txt | 1 - 4 files changed, 43 insertions(+), 1 deletion(-) create mode 100644 part/textt.txt create mode 100644 part/ttt.txt delete mode 100644 src/fs/text.txt diff --git a/part/textt.txt b/part/textt.txt new file mode 100644 index 00000000..ecbaa70c --- /dev/null +++ b/part/textt.txt @@ -0,0 +1 @@ +Text 1 \ No newline at end of file diff --git a/part/ttt.txt b/part/ttt.txt new file mode 100644 index 00000000..21e6e024 --- /dev/null +++ b/part/ttt.txt @@ -0,0 +1 @@ +jkkdfkjsdjfkjs \ No newline at end of file diff --git a/src/fs/merge.js b/src/fs/merge.js index cb8e0d8f..88082d6b 100644 --- a/src/fs/merge.js +++ b/src/fs/merge.js @@ -1,8 +1,49 @@ +import fs from "fs/promises" +import path from "path"; + const merge = async () => { // Write your code here // Default: read all .txt files from workspace/parts in alphabetical order // Optional: support --files filename1,filename2,... to merge specific files in provided order // Concatenate content and write to workspace/merged.txt + + const root=path.join(process.cwd(), 'part'); + let textFiles=[] + async function readDirectory(pathDir) { + const files=await fs.readdir(pathDir); + for(const file of files){ + const fullPath=path.join(pathDir, file) + const stat=await fs.stat(fullPath); + if(stat.isDirectory()){ + await readDirectory(fullPath) + } else{ + if(path.extname(fullPath)==='.txt'){ + textFiles.push(fullPath); + } + } + } + + } + try{ + await readDirectory(root); + if(textFiles.length===0){ + throw new Error(); + } + textFiles.sort((file1, file2)=>{ + return path.basename(file1).localeCompare(path.basename(file2)) + }) + let result="" + for (const file of textFiles){ + const content=await fs.readFile(file); + result+=content+'\n' + } + await fs.writeFile(path.join(process.cwd(), "merged.txt"),result) + } + catch{ + console.error('Error:\n','FS operation failed', '\n') + } + + }; await merge(); diff --git a/src/fs/text.txt b/src/fs/text.txt deleted file mode 100644 index 07e5bb93..00000000 --- a/src/fs/text.txt +++ /dev/null @@ -1 +0,0 @@ -frtrer \ No newline at end of file From 3e4d23542a22cca0b6dd7f6d303d84ce9b771edd Mon Sep 17 00:00:00 2001 From: natasyo Date: Sun, 8 Mar 2026 12:17:56 +0300 Subject: [PATCH 06/17] feat: cli:interactive --- src/cli/interactive.js | 32 ++++++++++++++++++++++++++ src/fs/findByExt.js | 45 +++++++++++++++++------------------- src/fs/merge.js | 52 +++++++++++++++++++----------------------- src/fs/restore.js | 43 ++++++++++++++++------------------ 4 files changed, 97 insertions(+), 75 deletions(-) diff --git a/src/cli/interactive.js b/src/cli/interactive.js index d0e3e0d9..e4e6238d 100644 --- a/src/cli/interactive.js +++ b/src/cli/interactive.js @@ -1,8 +1,40 @@ +import readline from "readline"; const interactive = () => { // Write your code here // Use readline module for interactive CLI // Support commands: uptime, cwd, date, exit // Handle Ctrl+C and unknown commands + + console.log( + "Help:\n\tuptime — prints process uptime in seconds (e.g. Uptime: 12.34s)\n\tcwd — prints the current working directory\n\tdate — prints the current date and time in ISO format\n\texit — prints Goodbye! and terminates the process", + ); + + const read = readline.createInterface({ + input: process.stdin, + output: process.stdout, + prompt: "> ", + }); + read.prompt(); + read.on("line", (line) => { + const command = line.trim(); + switch (command) { + case "uptime": + console.log(`\tUptime: ${process.uptime()} seconds`); + break; + case "cwd": + console.log("\t", process.cwd()); + break; + case "date": + console.log("\t", new Date()); + break; + case "exit": + console.log("\tGoodbye!\n"); + read.close(); + return; + default: + console.log("Unknown command"); + } + }); }; interactive(); diff --git a/src/fs/findByExt.js b/src/fs/findByExt.js index b4d0123e..54a180c9 100644 --- a/src/fs/findByExt.js +++ b/src/fs/findByExt.js @@ -1,4 +1,4 @@ -import fs from "fs/promises" +import fs from "fs/promises"; import path from "path"; const findByExt = async () => { @@ -6,34 +6,31 @@ const findByExt = async () => { // Recursively find all files with specific extension // Parse --ext CLI argument (default: .txt) - - const root=process.cwd(); - const filesArray=[] - async function scanDir(ext, dir){ - const files=await fs.readdir(dir); - for(let file of files){ - const fullPath=path.join(dir, file) - const stat=await fs.stat(fullPath) - if(stat.isDirectory()) await scanDir(ext, fullPath) - else{ - if(path.extname(file).replace('.','')===ext){ - filesArray.push(path.relative(root, fullPath)) - } + const root = process.cwd(); + const filesArray = []; + async function scanDir(ext, dir) { + const files = await fs.readdir(dir); + for (let file of files) { + const fullPath = path.join(dir, file); + const stat = await fs.stat(fullPath); + if (stat.isDirectory()) await scanDir(ext, fullPath); + else { + if (path.extname(file).replace(".", "") === ext) { + filesArray.push(path.relative(root, fullPath)); } } } + } - try{ - const index=process.argv.indexOf('--ext'); - if(index <0) throw new Error('Not found') - const ext=process.argv[index+1]; - await scanDir(ext, root); - console.log(filesArray) - - }catch(e){ - console.error('Error\n',"FS operation failed", '\n\n') + try { + const index = process.argv.indexOf("--ext"); + if (index < 0) throw new Error("Not found"); + const ext = process.argv[index + 1]; + await scanDir(ext, root); + console.log(filesArray); + } catch (e) { + console.error("Error\n", "FS operation failed", "\n\n"); } - }; await findByExt(); diff --git a/src/fs/merge.js b/src/fs/merge.js index 88082d6b..38b8252b 100644 --- a/src/fs/merge.js +++ b/src/fs/merge.js @@ -1,4 +1,4 @@ -import fs from "fs/promises" +import fs from "fs/promises"; import path from "path"; const merge = async () => { @@ -7,43 +7,39 @@ const merge = async () => { // Optional: support --files filename1,filename2,... to merge specific files in provided order // Concatenate content and write to workspace/merged.txt - const root=path.join(process.cwd(), 'part'); - let textFiles=[] + const root = path.join(process.cwd(), "part"); + let textFiles = []; async function readDirectory(pathDir) { - const files=await fs.readdir(pathDir); - for(const file of files){ - const fullPath=path.join(pathDir, file) - const stat=await fs.stat(fullPath); - if(stat.isDirectory()){ - await readDirectory(fullPath) - } else{ - if(path.extname(fullPath)==='.txt'){ - textFiles.push(fullPath); + const files = await fs.readdir(pathDir); + for (const file of files) { + const fullPath = path.join(pathDir, file); + const stat = await fs.stat(fullPath); + if (stat.isDirectory()) { + await readDirectory(fullPath); + } else { + if (path.extname(fullPath) === ".txt") { + textFiles.push(fullPath); } } } - } - try{ + try { await readDirectory(root); - if(textFiles.length===0){ + if (textFiles.length === 0) { throw new Error(); } - textFiles.sort((file1, file2)=>{ - return path.basename(file1).localeCompare(path.basename(file2)) - }) - let result="" - for (const file of textFiles){ - const content=await fs.readFile(file); - result+=content+'\n' + textFiles.sort((file1, file2) => { + return path.basename(file1).localeCompare(path.basename(file2)); + }); + let result = ""; + for (const file of textFiles) { + const content = await fs.readFile(file); + result += content + "\n"; } - await fs.writeFile(path.join(process.cwd(), "merged.txt"),result) + await fs.writeFile(path.join(process.cwd(), "merged.txt"), result); + } catch { + console.error("Error:\n", "FS operation failed", "\n"); } - catch{ - console.error('Error:\n','FS operation failed', '\n') - } - - }; await merge(); diff --git a/src/fs/restore.js b/src/fs/restore.js index b058daff..9c0fb728 100644 --- a/src/fs/restore.js +++ b/src/fs/restore.js @@ -1,4 +1,4 @@ -import fs from "fs/promises" +import fs from "fs/promises"; import path from "path"; const restore = async () => { @@ -6,31 +6,28 @@ const restore = async () => { // Read snapshot.json // Treat snapshot.rootPath as metadata only // Recreate directory/file structure in workspace_restored - const root=process.cwd() - const rootPath=path.join(root, 'restore'); + const root = process.cwd(); + const rootPath = path.join(root, "restore"); -try{ - const data= await fs.readFile('./src/fs/snapshot.json'); - const files=JSON.parse(data).entries; - await fs.mkdir(rootPath, {recursive:true}) + try { + const data = await fs.readFile("./src/fs/snapshot.json"); + const files = JSON.parse(data).entries; + await fs.mkdir(rootPath, { recursive: true }); - for(const file of files){ - if(file.type==='directory'){ - await fs.mkdir(path.join(rootPath, file.path), ) - } - } - - - for(const file of files){ - if(file.type==='file'){ - await fs.writeFile(path.join(rootPath, file.path), file.content) - } - } -} -catch(e){ - console.error("Error\n","FS operation failed",'\n') -} + for (const file of files) { + if (file.type === "directory") { + await fs.mkdir(path.join(rootPath, file.path)); + } + } + for (const file of files) { + if (file.type === "file") { + await fs.writeFile(path.join(rootPath, file.path), file.content); + } + } + } catch (e) { + console.error("Error\n", "FS operation failed", "\n"); + } }; await restore(); From f55c9470ab92351115e373a4ac84171ca28c7ab1 Mon Sep 17 00:00:00 2001 From: Natallia Date: Sun, 8 Mar 2026 16:56:30 +0300 Subject: [PATCH 07/17] feat: cli:progress --- src/cli/progress.js | 63 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 62 insertions(+), 1 deletion(-) diff --git a/src/cli/progress.js b/src/cli/progress.js index 3e060763..f6e0dc9d 100644 --- a/src/cli/progress.js +++ b/src/cli/progress.js @@ -1,8 +1,69 @@ +import readline from "readline"; + +function hexToAnsi(hex) { + try { + const match = hex.match(/^#([0-9a-f]{6})$/i); + if (!match) return ""; + + const num = parseInt(match[1], 16); + + const r = (num >> 16) & 255; + const g = (num >> 8) & 255; + const b = num & 255; + + return `\x1b[38;2;${r};${g};${b}m`; + } catch { + console.error("Invalid hex color code. Use format: '#RRGGBB'"); + return ""; + } +} + const progress = () => { // Write your code here // Simulate progress bar from 0% to 100% over ~5 seconds // Update in place using \r every 100ms // Format: [████████████████████ ] 67% -}; + const args = process.argv; + let duration = 5000; + let interval = 100; + let length = 30; + let color = hexToAnsi("#ffffff"); + args.forEach((arg, index) => { + switch (arg) { + case "--duration": + duration = args[index + 1]; + break; + case "--interval": + interval = parseInt(args[index + 1]); + break; + case "--length": + length = parseInt(args[index + 1]); + break; + case "--color": + color = hexToAnsi(args[index + 1]); + break; + default: + break; + } + }); + + let progress = 0; + const steps = Math.ceil(duration / interval); + const timer = setInterval(() => { + progress++; + const percent = Math.min((progress / steps) * 100, 100); + const filledLength = Math.round((length * percent) / 100); + if (progress > steps) { + const bar = color + "█".repeat(filledLength); + console.log("\nDone"); + clearInterval(timer); + } + + const bar = + color + "█".repeat(filledLength) + " ".repeat(length - filledLength); + + process.stdout.write(`\r[${bar}] ${percent.toFixed(1)}%`); + }, interval); +}; progress(); From 8d12116ab5a132683a8ad486551bc76fbe493e94 Mon Sep 17 00:00:00 2001 From: Natallia Date: Sun, 8 Mar 2026 19:43:47 +0300 Subject: [PATCH 08/17] feat: modules:dynamic --- package.json | 2 +- src/modules/dynamic.js | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index dfecb12a..9d77b1bc 100644 --- a/package.json +++ b/package.json @@ -14,7 +14,7 @@ "fs:merge": "node src/fs/merge.js", "cli:interactive": "node src/cli/interactive.js", "cli:progress": "node src/cli/progress.js", - "modules:dynamic": "node src/modules/dynamic.js uppercase", + "modules:dynamic": "node src/modules/dynamic.js reverse", "hash:verify": "node src/hash/verify.js", "streams:lineNumberer": "echo 'hello\nworld' | node src/streams/lineNumberer.js", "streams:filter": "echo 'hello\nworld\ntest' | node src/streams/filter.js --pattern test", diff --git a/src/modules/dynamic.js b/src/modules/dynamic.js index 008ca387..79b71dad 100644 --- a/src/modules/dynamic.js +++ b/src/modules/dynamic.js @@ -4,6 +4,10 @@ const dynamic = async () => { // Dynamically import plugin from plugins/ directory // Call run() function and print result // Handle missing plugin case + + const command = process.argv[2]; + const module = await import(`./plugins/${command}.js`); + console.log(module.run()); }; await dynamic(); From 57f17530186bf64fd658da4bffc949eb0fbba5c8 Mon Sep 17 00:00:00 2001 From: Natallia Date: Sun, 8 Mar 2026 20:32:37 +0300 Subject: [PATCH 09/17] feat: dynamic --- src/hash/file1.txt | 0 src/hash/file2.txt | 0 src/hash/verify.js | 23 +++++++++++++++++++++++ src/modules/dynamic.js | 4 ++++ 4 files changed, 27 insertions(+) create mode 100644 src/hash/file1.txt create mode 100644 src/hash/file2.txt diff --git a/src/hash/file1.txt b/src/hash/file1.txt new file mode 100644 index 00000000..e69de29b diff --git a/src/hash/file2.txt b/src/hash/file2.txt new file mode 100644 index 00000000..e69de29b diff --git a/src/hash/verify.js b/src/hash/verify.js index 7f1e8961..53703c6d 100644 --- a/src/hash/verify.js +++ b/src/hash/verify.js @@ -1,8 +1,31 @@ +import fs from "fs/promises"; +import fs2 from "fs"; +import crypto from "crypto"; +import path from "path"; + +function calculateHash(filePath) { + return new Promise((resolve, reject) => { + if (fs2.existsSync(filePath)) { + resolve({ file: filePath, error: "File does not exist" }); + } + }); +} + const verify = async () => { // Write your code here // Read checksums.json // Calculate SHA256 hash using Streams API // Print result: filename — OK/FAIL + try { + const content = await fs.readFile("src/hash/checksums.json", "utf-8"); + const files = JSON.parse(content); + for (const file in files) { + const fullPath = path.join(process.cwd(), "src/hash", file); + await calculateHash(fullPath); + } + } catch (e) { + console.error("FS operation failed"); + } }; await verify(); diff --git a/src/modules/dynamic.js b/src/modules/dynamic.js index 008ca387..79b71dad 100644 --- a/src/modules/dynamic.js +++ b/src/modules/dynamic.js @@ -4,6 +4,10 @@ const dynamic = async () => { // Dynamically import plugin from plugins/ directory // Call run() function and print result // Handle missing plugin case + + const command = process.argv[2]; + const module = await import(`./plugins/${command}.js`); + console.log(module.run()); }; await dynamic(); From 96235ae37c8d1d3db521940fec62023dceaf5447 Mon Sep 17 00:00:00 2001 From: Natallia Date: Sun, 8 Mar 2026 20:53:41 +0300 Subject: [PATCH 10/17] feat: hash:verify --- src/hash/verify.js | 39 ++++++++++++++++++++++++++++----------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/src/hash/verify.js b/src/hash/verify.js index 53703c6d..dff243d6 100644 --- a/src/hash/verify.js +++ b/src/hash/verify.js @@ -3,29 +3,46 @@ import fs2 from "fs"; import crypto from "crypto"; import path from "path"; -function calculateHash(filePath) { - return new Promise((resolve, reject) => { - if (fs2.existsSync(filePath)) { - resolve({ file: filePath, error: "File does not exist" }); - } - }); -} - const verify = async () => { // Write your code here // Read checksums.json // Calculate SHA256 hash using Streams API // Print result: filename — OK/FAIL + + function calculateHash(filePath) { + return new Promise((resolve, reject) => { + if (!fs2.existsSync(filePath)) { + resolve({ file: filePath, error: "File does not exist" }); + return; + } + const hash = crypto.createHash("sha256"); + const stream = fs2.createReadStream(filePath); + stream.on("data", (data) => hash.update(data)); + stream.on("end", () => { + const result = hash.digest("hex"); + resolve({ file: filePath, hash: result }); + }); + stream.on("error", (err) => reject(err)); + }); + } + try { const content = await fs.readFile("src/hash/checksums.json", "utf-8"); const files = JSON.parse(content); for (const file in files) { const fullPath = path.join(process.cwd(), "src/hash", file); - await calculateHash(fullPath); + const result = await calculateHash(fullPath); + if (result.error) { + console.log(`${file} — ${result.error}`); + } else { + const expectedHash = files[file]; + console.log( + `${file} — ${result.hash === expectedHash ? "OK" : "FAIL"}`, + ); + } } } catch (e) { console.error("FS operation failed"); } }; - -await verify(); +verify(); From 9b77c5e88ae798076d686a8bc4cbcdbafb3511e5 Mon Sep 17 00:00:00 2001 From: Natallia Date: Sun, 8 Mar 2026 22:43:13 +0300 Subject: [PATCH 11/17] feat: strams line --- src/streams/lineNumberer.js | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/src/streams/lineNumberer.js b/src/streams/lineNumberer.js index 579d662e..d91200ce 100644 --- a/src/streams/lineNumberer.js +++ b/src/streams/lineNumberer.js @@ -1,8 +1,30 @@ +import { Transform } from "stream"; + const lineNumberer = () => { // Write your code here // Read from process.stdin // Use Transform Stream to prepend line numbers // Write to process.stdout + let buffer = ""; + let lineNumber = 1; + const lineTransform = new Transform({ + transform(chunk, encoding, callback) { + buffer += chunk.toString(); + const lines = buffer.split("\n"); + buffer = lines.pop(); + const result = + lines.map((line, index) => `${lineNumber++}: ${line}`).join("\n") + + "\n"; + callback(null, result); + }, + flush(callback) { + if (buffer.length > 0) { + this.push(`${lineNumber++}: ${buffer}\n`); + } + callback(); + }, + }); + process.stdin.pipe(lineTransform).pipe(process.stdout); }; lineNumberer(); From d0bf03dc14d0f36f1b5477a68d329e6c6f327ad7 Mon Sep 17 00:00:00 2001 From: Natallia Date: Sun, 8 Mar 2026 23:00:19 +0300 Subject: [PATCH 12/17] feat: streams:filter --- src/streams/filter.js | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/src/streams/filter.js b/src/streams/filter.js index 3868ab46..2c26113f 100644 --- a/src/streams/filter.js +++ b/src/streams/filter.js @@ -1,9 +1,32 @@ +import { Transform } from "stream"; + const filter = () => { // Write your code here // Read from process.stdin // Filter lines by --pattern CLI argument // Use Transform Stream // Write to process.stdout + + let buffer = ""; + const filterTransform = new Transform({ + transform(chunk, encoding, callback) { + buffer += chunk.toString(); + const lines = buffer.split("\n"); + buffer = lines.pop(); + const patternIndex = process.argv.indexOf("--pattern"); + const pattern = process.argv[patternIndex + 1] || ""; + const result = + lines.filter((line) => line.includes(pattern)).join("\n") + "\n"; + callback(null, result); + }, + flush(callback) { + if (buffer && buffer.includes(pattern)) { + this.push(buffer + "\n"); + } + callback(); + }, + }); + process.stdin.pipe(filterTransform).pipe(process.stdout); }; filter(); From 96bd3217311db0fed0939d6e2aa21f69ec68fbd8 Mon Sep 17 00:00:00 2001 From: Natallia Date: Sun, 8 Mar 2026 23:26:11 +0300 Subject: [PATCH 13/17] feat: streams:split --- src/streams/split.js | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/src/streams/split.js b/src/streams/split.js index f8f814fa..b4ecdd86 100644 --- a/src/streams/split.js +++ b/src/streams/split.js @@ -1,8 +1,40 @@ +import fs from "fs"; +import path from "path"; + +const chunkArrays = (arr, size) => { + const result = []; + for (let i = 0; i < arr.length; i += size) { + result.push(arr.slice(i, i + size)); + } + return result; +}; + const split = async () => { // Write your code here // Read source.txt using Readable Stream // Split into chunk_1.txt, chunk_2.txt, etc. // Each chunk max N lines (--lines CLI argument, default: 10) + + const readStream = fs.createReadStream( + path.join(process.cwd(), "src", "streams", "source.txt"), + "utf-8", + ); + readStream.on("data", (chunk) => { + const indexParam = process.argv.indexOf("--lines"); + const countLines = process.argv[indexParam + 1] || 5; + const lines = chunk.split("\n"); + const arrays = chunkArrays(lines, countLines); + for (let i = 0; i < arrays.length; i++) { + fs.writeFileSync( + path.join(process.cwd(), "src/streams", `chunk_${i + 1}.txt`), + arrays[i].join("\n"), + ); + } + console.log(arrays); + }); + readStream.on("end", () => { + console.log("File reading finish"); + }); }; await split(); From f82c2da8db8aff023cea9a4d0de6fd2ad284b49d Mon Sep 17 00:00:00 2001 From: Natallia Date: Mon, 9 Mar 2026 20:41:30 +0300 Subject: [PATCH 14/17] feat: compressed --- compressed/archive.br | Bin 0 -> 166 bytes src/zip/compressDir.js | 95 +++++++++++++++++++++++++-- toCompress/directory1/dir3/text4.txt | 0 toCompress/directory1/text1.txt | 1 + toCompress/directory1/text2.txt | 1 + toCompress/directory2/text3.txt | 1 + 6 files changed, 93 insertions(+), 5 deletions(-) create mode 100644 compressed/archive.br create mode 100644 toCompress/directory1/dir3/text4.txt create mode 100644 toCompress/directory1/text1.txt create mode 100644 toCompress/directory1/text2.txt create mode 100644 toCompress/directory2/text3.txt diff --git a/compressed/archive.br b/compressed/archive.br new file mode 100644 index 0000000000000000000000000000000000000000..2907d0a50d718db4f2d23e3aebe306354144deb3 GIT binary patch literal 166 zcmV;X09pSV{}%wnmK!M=lH*}OEXXu@H|L!Wp3~s~XS^vDZ7TI&`QQ+V$6FLdyB%t6 zv { - // Write your code here - // Read all files from workspace/toCompress/ - // Compress entire directory structure into archive.br - // Save to workspace/compressed/ - // Use Streams API + const sourceDir = path.join(process.cwd(), "toCompress"); + const outputDir = path.join(process.cwd(), "compressed"); + const outputFile = path.join(outputDir, "archive.br"); + + if (!fs.existsSync(sourceDir)) { + throw new Error("FS operation failed"); + } + + fs.mkdirSync(outputDir, { recursive: true }); + + const brotli = zlib.createBrotliCompress({ + params: { [zlib.constants.BROTLI_PARAM_QUALITY]: 6 }, + }); + + const outStream = fs.createWriteStream(outputFile); + brotli.pipe(outStream); + + for await (const file of walk(sourceDir)) { + brotli.write(createTarHeader(file.path, file.size, file.type)); + + if (file.type === "0") { + await pipeline( + fs.createReadStream(file.full), + async function* (source) { + for await (const chunk of source) yield chunk; + }, + brotli, + { end: false }, + ); + + const remainder = file.size % 512; + if (remainder) { + brotli.write(Buffer.alloc(512 - remainder)); + } + } + } + + brotli.write(Buffer.alloc(1024)); + brotli.end(); + + return new Promise((resolve, reject) => { + outStream.on("finish", () => resolve()); + outStream.on("error", reject); + }); }; await compressDir(); diff --git a/toCompress/directory1/dir3/text4.txt b/toCompress/directory1/dir3/text4.txt new file mode 100644 index 00000000..e69de29b diff --git a/toCompress/directory1/text1.txt b/toCompress/directory1/text1.txt new file mode 100644 index 00000000..0f0c75fb --- /dev/null +++ b/toCompress/directory1/text1.txt @@ -0,0 +1 @@ +dfgdfg \ No newline at end of file diff --git a/toCompress/directory1/text2.txt b/toCompress/directory1/text2.txt new file mode 100644 index 00000000..9d0e3afc --- /dev/null +++ b/toCompress/directory1/text2.txt @@ -0,0 +1 @@ +dfghdfghfdgh \ No newline at end of file diff --git a/toCompress/directory2/text3.txt b/toCompress/directory2/text3.txt new file mode 100644 index 00000000..ed125237 --- /dev/null +++ b/toCompress/directory2/text3.txt @@ -0,0 +1 @@ +dfgdfgsdf \ No newline at end of file From 59b4e6f6b13560dfcf0787125bd5c712bf2938f2 Mon Sep 17 00:00:00 2001 From: Natallia Date: Tue, 10 Mar 2026 18:34:52 +0300 Subject: [PATCH 15/17] feat: Worker Threads --- compressed/archive.br | Bin 166 -> 0 bytes src/cp/execCommand.js | 8 ++++++ src/wt/main.js | 64 ++++++++++++++++++++++++++++++++++++++++++ src/wt/worker.js | 8 ++++-- 4 files changed, 78 insertions(+), 2 deletions(-) delete mode 100644 compressed/archive.br diff --git a/compressed/archive.br b/compressed/archive.br deleted file mode 100644 index 2907d0a50d718db4f2d23e3aebe306354144deb3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 166 zcmV;X09pSV{}%wnmK!M=lH*}OEXXu@H|L!Wp3~s~XS^vDZ7TI&`QQ+V$6FLdyB%t6 zv { // Write your code here // Take command from CLI argument diff --git a/src/wt/main.js b/src/wt/main.js index d7d21f0c..7a06700c 100644 --- a/src/wt/main.js +++ b/src/wt/main.js @@ -1,3 +1,33 @@ +import { readFile } from "fs/promises"; +import path from "path"; +import { isMainThread, parentPort, Worker } from "worker_threads"; +import os from "os"; + +function sortArrays(chunks) { + const result = []; + const indexes = new Array(chunks.length).fill(0); + + while (true) { + let minValue = Infinity; + let minChunk = -1; + + for (let i = 0; i < chunks.length; i++) { + const idx = indexes[i]; + + if (idx < chunks[i].length && chunks[i][idx] < minValue) { + minValue = chunks[i][idx]; + minChunk = i; + } + } + + if (minChunk === -1) break; + + result.push(minValue); + indexes[minChunk]++; + } + return result; +} + const main = async () => { // Write your code here // Read data.json containing array of numbers @@ -6,6 +36,40 @@ const main = async () => { // Collect sorted chunks // Merge using k-way merge algorithm // Log final sorted array + + if (isMainThread) { + const dataJson = await readFile( + path.join(process.cwd(), "src/wt/data.json"), + { encoding: "utf-8" }, + ); + const arr = JSON.parse(dataJson).array; + const cpuCount = os.cpus().length; + const sizeArr = Math.floor(arr.length / cpuCount); + let rem = arr.length % cpuCount; + const arrays = []; + const arraysSort = []; + for (let i = 0; i < cpuCount; i++) { + arrays.push( + arr.slice(i * sizeArr, (i + 1) * sizeArr + (rem > 0 ? 1 : 0)), + ); + rem--; + } + let completed = 0; + for (let i = 0; i < arrays.length; i++) { + const worker = new Worker(path.join(process.cwd(), "src/wt/worker.js")); + worker.on("message", (msg) => { + arraysSort[i] = msg; + completed++; + if (completed >= cpuCount) { + console.log(sortArrays(arraysSort)); + } + }); + worker.postMessage(arrays[i]); + } + } else { + parentPort.postMessage("hello"); + console.log("no main"); + } }; await main(); diff --git a/src/wt/worker.js b/src/wt/worker.js index 15f42fc8..d5696771 100644 --- a/src/wt/worker.js +++ b/src/wt/worker.js @@ -1,9 +1,13 @@ -import { parentPort } from 'worker_threads'; +import { parentPort } from "worker_threads"; // Receive array from main thread // Sort in ascending order // Send back to main thread -parentPort.on('message', (data) => { +parentPort.on("message", (data) => { // Write your code here + if (Array.isArray(data)) { + const arrSort = data.sort((a, b) => a - b); + parentPort.postMessage(arrSort); + } }); From ce176751f19b67892d6d4fb681b2121617ef7e9d Mon Sep 17 00:00:00 2001 From: natasyo Date: Tue, 10 Mar 2026 20:15:21 +0300 Subject: [PATCH 16/17] feat: cp command --- src/cp/execCommand.js | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/src/cp/execCommand.js b/src/cp/execCommand.js index 295e22a8..5086aca7 100644 --- a/src/cp/execCommand.js +++ b/src/cp/execCommand.js @@ -1,4 +1,5 @@ -import { Worker } from "worker_threads"; +import { Worker, } from "worker_threads"; +import {spawn} from "child_process" function setArrayInWorker() { const worker = new Worker(` @@ -13,6 +14,19 @@ const execCommand = () => { // Pipe child stdout/stderr to parent stdout/stderr // Pass environment variables // Exit with same code as child +const command=process.argv[2]; + if (!command) { + console.error("No command provided. Usage: node execCommand.js \"ls -la\""); + process.exit(1); + } +const child=spawn(command,{ + shell:true, + env: process.env +}) +child.stdout.pipe(process.stdout) +child.stdin.pipe(process.stdin) +child.on('close', (code)=>process.exit(code)) + }; execCommand(); From b8898a6cc72d606585676521ef54d32365a363b9 Mon Sep 17 00:00:00 2001 From: natasyo Date: Tue, 10 Mar 2026 20:23:50 +0300 Subject: [PATCH 17/17] fix main --- src/wt/main.js | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/src/wt/main.js b/src/wt/main.js index 7a06700c..507daea9 100644 --- a/src/wt/main.js +++ b/src/wt/main.js @@ -29,13 +29,7 @@ function sortArrays(chunks) { } const main = async () => { - // Write your code here - // Read data.json containing array of numbers - // Split into N chunks (N = CPU cores) - // Create N workers, send one chunk to each - // Collect sorted chunks - // Merge using k-way merge algorithm - // Log final sorted array + if (isMainThread) { const dataJson = await readFile(