diff --git a/notes.md b/notes.md new file mode 100644 index 00000000..e78ee731 --- /dev/null +++ b/notes.md @@ -0,0 +1,83 @@ +# πŸ“” Node.js Study Notes - Fundamentals (2026 Edition) + +## πŸ—οΈ 1. Foundations (Path & Process) + +* **`process.cwd()`**: Returns the "Current Working Directory". It's the robot saying: "I am standing exactly here". +* **`process.argv`**: The array that stores everything you type in the terminal after the command. +* **`path.resolve`**: Convert relative routes into absolute routes. +* **`path.join`**: Avoid errors by joining parts of routes in a secure way (handles `/` vs `\`). +* **`path.relative`**: Cleans the route and gives only the path relative to another (e.g., from workspace to file). +* **`path.extname`**: Returns the file extension (e.g., `.txt`). +* **`path.dirname`**: Returns the name of the parent directory. Useful to know where a file is sitting. + +--- + +## πŸ“‚ 2. File System (fs/promises) + +* **`fs/promises`**: Using the asynchronous version of the FS module so we can use `await`. +* **`readdir`**: Lists everything inside a folder. +* **`stat`**: Checks metadata (if it's a file, directory, and its size). +* **`readFile` / `writeFile`**: Basic commands to read content and create/overwrite files. +* **`mkdir`**: Creates a new directory. Used with `{ recursive: true }` to create nested folders (a/b/c) all at once. +* **`Buffer`**: A way to handle raw binary data. We used `Buffer.from(content, "base64")` to turn encoded text back into real files. +* **`.toString("base64")`**: Converts file content into a base64 string for easy storage in JSON. + +--- + +## ⌨️ 3. CLI & Terminal UI + +* **`readline`**: A module used to read input from the terminal line by line. It’s the "listening ear" of the app. +* **`process.uptime()`**: Returns how many seconds the Node process has been running. +* **`.toISOString()`**: Returns a string in standard ISO format (YYYY-MM-DDTHH:mm:ss.sssZ). +* **`\r` (Carriage Return)**: Moves the cursor back to the start of the line without jumping to the next one. Essential for "in-place" updates like progress bars. +* **ANSI Escape Codes**: Special sequences (like `\x1b[38;2;...`) used to color and format terminal text. +* **Hex to RGB**: Converting `#RRGGBB` into three numbers (Red, Green, Blue) so the terminal can apply colors. + +--- + +## 🧩 4. Dynamic Modules + +* **`import()`**: A function-like expression that allows you to load a module asynchronously on the fly. +* **`pathToFileURL`**: Converts a system path into a URL (`file:///...`), necessary for dynamic imports on modern Node.js versions. + +--- + +## πŸ›‘οΈ 5. Hashing & Security + +* **`createHash('sha256')`**: Creates a unique "digital fingerprint" (64 characters). If one bit changes, the hash changes completely. +* **`createReadStream`**: Opens a "river" of data. Instead of drinking the whole thing at once (avoiding memory crashes), we process it by "trickles" (chunks). +* **`pipeline`**: The "glue." It connects streams safely and handles errors automatically. It's the modern way to pipe data. + +--- + +## 🌊 6. Streams & Transformations + +* **Transform Stream**: A duplex stream that modifies or transforms the data as it passes through (e.g., adding line numbers or filtering). +* **`process.stdin` / `process.stdout`**: The standard input (keyboard) and output (screen) streams of the process. +* **`chunk`**: A small piece of data (usually a Buffer) being processed in the stream pipeline. +* **Backpressure**: A situation where data is produced faster than it can be consumed; Streams handle this automatically to save memory. + +--- + +## 🀐 7. Compression (Zlib & Brotli) + +* **`zlib`**: The built-in module for compression and decompression. +* **Brotli (`.br`)**: A modern, high-efficiency compression algorithm from Google, superior to Gzip for text assets. +* **`createBrotliCompress` / `createBrotliDecompress`**: Transform streams used to shrink or restore data. + +--- + +## 🧡 8. Worker Threads (Parallelism) + +* **Main Thread**: The primary execution path. It handles the event loop and delegates heavy CPU tasks. +* **Worker Thread**: An independent thread that runs alongside the main thread. +* **`parentPort`**: The communication channel (walkie-talkie) between the worker and the main thread. +* **Divide and Conquer**: Splitting a large problem into smaller chunks to be solved in parallel by multiple workers. + +--- + +## πŸ‘Ά 9. Child Processes + +* **`spawn`**: Launches a new process (like `ls`, `date`, or any terminal command). +* **`stdio: inherit / pipe`**: Connecting the child's input/output channels to the parent process. +* **Exit Code**: A number returned by the process when it finishes. `0` means success; anything else indicates an error. \ No newline at end of file diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..755c365b --- /dev/null +++ b/package-lock.json @@ -0,0 +1,17 @@ +{ + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "license": "ISC", + "engines": { + "node": ">=24.10.0", + "npm": ">=10.9.2" + } + } + } +} diff --git a/src/cli/interactive.js b/src/cli/interactive.js index d0e3e0d9..fe46a97e 100644 --- a/src/cli/interactive.js +++ b/src/cli/interactive.js @@ -1,8 +1,66 @@ +import readline from "node:readline"; + +// Write your code here +// Use readline module for interactive CLI +// Support commands: uptime, cwd, date, exit +// Handle Ctrl+C and unknown commands + const interactive = () => { - // Write your code here - // Use readline module for interactive CLI - // Support commands: uptime, cwd, date, exit - // Handle Ctrl+C and unknown commands + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + // Requirement: Display a prompt "> " + prompt: "> ", + }); + + // Initial prompt display + rl.prompt(); + + rl.on("line", (line) => { + const command = line.trim().toLowerCase(); + + switch (command) { + case "uptime": + // Requirement: prints process uptime in seconds (e.g. Uptime: 12.34s) + process.stdout.write(`Uptime: ${process.uptime().toFixed(2)}s\n`); + break; + + case "cwd": + // Requirement: prints the current working directory + process.stdout.write(`${process.cwd()}\n`); + break; + + case "date": + // Requirement: prints the current date and time in ISO format + process.stdout.write(`${new Date().toISOString()}\n`); + break; + + case "exit": + // Requirement: prints "Goodbye!" and terminates + process.stdout.write("Goodbye!\n"); + rl.close(); + break; + + default: + // Requirement: On unknown command, print "Unknown command" + process.stdout.write("Unknown command\n"); + break; + } + + if (command !== "exit") { + rl.prompt(); + } + }); + + // Requirement: On Ctrl+C or end of input, print "Goodbye!" and exit + rl.on("SIGINT", () => { + process.stdout.write("\nGoodbye!\n"); + rl.close(); + }); + + rl.on("close", () => { + process.exit(0); + }); }; interactive(); diff --git a/src/cli/progress.js b/src/cli/progress.js index 3e060763..a7abad9d 100644 --- a/src/cli/progress.js +++ b/src/cli/progress.js @@ -1,8 +1,58 @@ +// Write your code here +// Simulate progress bar from 0% to 100% over ~5 seconds +// Update in place using \r every 100ms +// Format: [β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ ] 67% +// const progress = () => { - // Write your code here - // Simulate progress bar from 0% to 100% over ~5 seconds - // Update in place using \r every 100ms - // Format: [β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ ] 67% + const args = process.argv; + + // Helper function to get argument values or return a default + const getArg = (flag, defaultValue) => { + const index = args.indexOf(flag); + return index !== -1 && args[index + 1] ? args[index + 1] : defaultValue; + }; + + // Parsing arguments with their default values + const duration = parseInt(getArg("--duration", "5000")); + const interval = parseInt(getArg("--interval", "100")); + const length = parseInt(getArg("--length", "30")); + const hexColor = getArg("--color", null); + + // Helper to convert #RRGGBB to ANSI escape code for 24-bit color + const getAnsiColor = (hex) => { + if (!hex || !/^#[0-9A-Fa-f]{6}$/.test(hex)) return ""; + const r = parseInt(hex.slice(1, 3), 16); + const g = parseInt(hex.slice(3, 5), 16); + const b = parseInt(hex.slice(5, 7), 16); + return `\x1b[38;2;${r};${g};${b}m`; + }; + + const colorStart = getAnsiColor(hexColor); + const colorReset = "\x1b[0m"; + + let percentage = 0; + const increment = 100 / (duration / interval); + + const timer = setInterval(() => { + percentage += increment; + if (percentage > 100) percentage = 100; + + const filledLength = Math.floor((percentage / 100) * length); + const emptyLength = length - filledLength; + + const filledPart = "β–ˆ".repeat(filledLength); + const emptyPart = " ".repeat(emptyLength); + + // Building the bar: Apply color only to the filled part if color exists + const formattedBar = `${colorStart}${filledPart}${colorReset}${emptyPart}`; + + process.stdout.write(`\r[${formattedBar}] ${Math.round(percentage)}%`); + + if (percentage >= 100) { + clearInterval(timer); + process.stdout.write("\nDone!\n"); + } + }, interval); }; progress(); diff --git a/src/cp/execCommand.js b/src/cp/execCommand.js index 34a89c8d..6001efa8 100644 --- a/src/cp/execCommand.js +++ b/src/cp/execCommand.js @@ -1,10 +1,50 @@ +import { spawn } from "node:child_process"; + +// Write your code here +// Take command from CLI argument +// Spawn child process +// Pipe child stdout/stderr to parent stdout/stderr +// Pass environment variables +// Exit with same code as child + const execCommand = () => { - // Write your code here - // Take command from CLI argument - // Spawn child process - // Pipe child stdout/stderr to parent stdout/stderr - // Pass environment variables - // Exit with same code as child + // 1. Get the command string from the arguments + const commandArg = process.argv[2]; + + if (!commandArg) { + process.stderr.write("Please provide a command string, (e.g., 'ls -la')\n"); + process.exit(1); + } + + // 2. Parse the command and its arguments + // Example: "ls -la" -> command: "ls", args: ["-la"] + const [command, ...args] = commandArg.split(" "); + + // 3. Spawn the child process + // We pass 'process.env' so the child has the same environment variables + const child = spawn(command, args, { + env: process.env, + shell: true, // Use shell to correctly parse strings on Mac/Windows + }); + + // 4. Pipe the outputs + // This connects the child's mouth (stdout) to the parent's mouth (stdout) + child.stdout.pipe(process.stdout); + child.stderr.pipe(process.stderr); + + // + + // 5. Handle the exit + child.on("close", (code) => { + // Requirement: parent process exits with the same exit code + process.exit(code); + }); + + // Handle errors if the command doesn't exist + child.on("error", (err) => { + process.stderr.write(`Failed to start process: ${err.message}\n`); + process.exit(1); + }); }; execCommand(); diff --git a/src/fs/findByExt.js b/src/fs/findByExt.js index 24f06cb8..863e077c 100644 --- a/src/fs/findByExt.js +++ b/src/fs/findByExt.js @@ -1,7 +1,56 @@ +import { readdir, stat } from "node:fs/promises"; +import { resolve, join, relative, extname } from "node:path"; + +// Write your code here +// Recursively find all files with specific extension +// Parse --ext CLI argument (default: .txt) + const findByExt = async () => { - // Write your code here - // Recursively find all files with specific extension - // Parse --ext CLI argument (default: .txt) + try { + const workspacePath = resolve(process.cwd(), "workspace"); + + // Pre-validation: Ensure the workspace directory exists + try { + await stat(workspacePath); + } catch { + throw new Error("FS operation failed"); + } + + const args = process.argv; + const extIndex = args.indexOf("--ext"); + let targetExt = ".txt"; + + if (extIndex !== -1 && args[extIndex + 1]) { + const providedValue = args[extIndex + 1]; + targetExt = providedValue.startsWith(".") + ? providedValue + : `.${providedValue}`; + } + + const results = []; + + const scan = async (currentPath) => { + const folderContents = await readdir(currentPath); + for (const item of folderContents) { + const fullPath = join(currentPath, item); + const itemStat = await stat(fullPath); + + if (itemStat.isFile()) { + if (extname(fullPath) === targetExt) { + results.push(relative(workspacePath, fullPath)); + } + } else if (itemStat.isDirectory()) { + await scan(fullPath); + } + } + }; + + await scan(workspacePath); + results.sort(); + results.forEach((filePath) => process.stdout.write(`${filePath}\n`)); + } catch (error) { + throw new Error("FS operation failed"); + } }; await findByExt(); diff --git a/src/fs/merge.js b/src/fs/merge.js index cb8e0d8f..fc6d0051 100644 --- a/src/fs/merge.js +++ b/src/fs/merge.js @@ -1,8 +1,52 @@ +import { readdir, readFile, writeFile, stat } from "node:fs/promises"; +import { resolve, join, extname } from "node:path"; + +// Write your code here +// Default: read all .txt files from workspace/parts in alphabetical order +// Optional: support --files filename1,filename2,... to merge specific files in provided order +// Concatenate content and write to workspace/merged.txt + const merge = async () => { - // Write your code here - // Default: read all .txt files from workspace/parts in alphabetical order - // Optional: support --files filename1,filename2,... to merge specific files in provided order - // Concatenate content and write to workspace/merged.txt + try { + const workspacePath = resolve(process.cwd(), "workspace"); + const partsPath = join(workspacePath, "parts"); + const outputPath = join(workspacePath, "merged.txt"); + + try { + await stat(partsPath); + } catch { + throw new Error("FS operation failed"); + } + + const args = process.argv; + const filesIndex = args.indexOf("--files"); + let filesToMerge = []; + + if (filesIndex !== -1 && args[filesIndex + 1]) { + filesToMerge = args[filesIndex + 1].split(","); + } else { + const allFiles = await readdir(partsPath); + filesToMerge = allFiles.filter((file) => extname(file) === ".txt").sort(); + } + + if (filesToMerge.length === 0) throw new Error("FS operation failed"); + + let combinedContent = ""; + for (const fileName of filesToMerge) { + const filePath = join(partsPath, fileName); + try { + const content = await readFile(filePath, "utf-8"); + combinedContent += content; + } catch { + throw new Error("FS operation failed"); + } + } + + await writeFile(outputPath, combinedContent); + process.stdout.write("merged.txt\n"); + } catch (error) { + throw new Error("FS operation failed"); + } }; await merge(); diff --git a/src/fs/restore.js b/src/fs/restore.js index 96ae1ffb..5baf5abb 100644 --- a/src/fs/restore.js +++ b/src/fs/restore.js @@ -1,8 +1,50 @@ +import { readFile, writeFile, mkdir, stat } from "node:fs/promises"; +import { resolve, join, dirname } from "node:path"; + +// Write your code here +// Read snapshot.json +// Treat snapshot.rootPath as metadata only +// Recreate directory/file structure in workspace_restored + const restore = async () => { - // Write your code here - // Read snapshot.json - // Treat snapshot.rootPath as metadata only - // Recreate directory/file structure in workspace_restored + const snapshotPath = resolve(process.cwd(), "snapshot.json"); + const restorePath = resolve(process.cwd(), "workspace_restored"); + + try { + // Validate snapshot exists + try { + await stat(snapshotPath); + } catch { + throw new Error("FS operation failed"); + } + + // Ensure destination does not exist + try { + await stat(restorePath); + throw new Error("FS operation failed"); + } catch (error) { + if (error.message === "FS operation failed") throw error; + } + + const rawData = await readFile(snapshotPath, "utf-8"); + const { entries } = JSON.parse(rawData); + + await mkdir(restorePath); + + for (const entry of entries) { + const entryPath = join(restorePath, entry.path); + + if (entry.type === "directory") { + await mkdir(entryPath, { recursive: true }); + } else { + await mkdir(dirname(entryPath), { recursive: true }); + const fileBuffer = Buffer.from(entry.content, "base64"); + await writeFile(entryPath, fileBuffer); + } + } + } catch (error) { + throw new Error("FS operation failed"); + } }; await restore(); diff --git a/src/fs/snapshot.js b/src/fs/snapshot.js index 050103d3..703af1d3 100644 --- a/src/fs/snapshot.js +++ b/src/fs/snapshot.js @@ -1,9 +1,63 @@ +import { readdir, stat, writeFile, readFile } from "node:fs/promises"; +import { resolve, join, relative } from "node:path"; + +// Write your code here +// Recursively scan workspace directory +// Write snapshot.json with: +// - rootPath: absolute path to workspace +// - entries: flat array of relative paths and metadata + const snapshot = async () => { - // Write your code here - // Recursively scan workspace directory - // Write snapshot.json with: - // - rootPath: absolute path to workspace - // - entries: flat array of relative paths and metadata + try { + const workspacePath = resolve(process.cwd(), "workspace"); + const entries = []; + + try { + await stat(workspacePath); + } catch { + throw new Error("FS operation failed"); + } + + const scan = async (currentDir) => { + const names = await readdir(currentDir); + + for (const name of names) { + const fullPath = join(currentDir, name); + const fileStat = await stat(fullPath); + const relativePath = relative(workspacePath, fullPath); + + if (fileStat.isFile()) { + const content = await readFile(fullPath); + entries.push({ + path: relativePath, + type: "file", + size: fileStat.size, + content: content.toString("base64"), + }); + } else if (fileStat.isDirectory()) { + entries.push({ + path: relativePath, + type: "directory", + }); + await scan(fullPath); + } + } + }; + + await scan(workspacePath); + + const finalResult = { + rootPath: workspacePath, + entries: entries, + }; + + await writeFile("snapshot.json", JSON.stringify(finalResult, null, 2)); + + process.stdout.write(`the rootPath is: ${workspacePath}\n`); + process.stdout.write("Snapshot created successfully!\n"); + } catch (error) { + throw new Error("FS operation failed"); + } }; await snapshot(); diff --git a/src/hash/verify.js b/src/hash/verify.js index 7f1e8961..90ad6509 100644 --- a/src/hash/verify.js +++ b/src/hash/verify.js @@ -1,8 +1,62 @@ +import { readFile, stat } from "node:fs/promises"; +import { createHash } from "node:crypto"; +import { createReadStream } from "node:fs"; +import { resolve, join } from "node:path"; +import { pipeline } from "node:stream/promises"; + +// Write your code here +// Read checksums.json +// Calculate SHA256 hash using Streams API +// Print result: filename β€” OK/FAIL + const verify = async () => { - // Write your code here - // Read checksums.json - // Calculate SHA256 hash using Streams API - // Print result: filename β€” OK/FAIL + const checksumsPath = resolve(process.cwd(), "checksums.json"); + const workspacePath = resolve(process.cwd(), "workspace"); + + try { + // 1. Initial validation: Ensure checksums.json exists + try { + await stat(checksumsPath); + } catch { + // Required error message if the file is missing + throw new Error("FS operation failed"); + } + + // 2. Load the expected hashes from the JSON file + const rawData = await readFile(checksumsPath, "utf-8"); + const checksums = JSON.parse(rawData); + + // 3. Process each file entry in the JSON + for (const [fileName, expectedHash] of Object.entries(checksums)) { + const filePath = join(workspacePath, fileName); + + try { + // We create a Hash transform stream for SHA256 + const hash = createHash("sha256"); + // We open a readable stream for the file + const fileStream = createReadStream(filePath); + + // + // Pipeline connects the file to the hash calculator and waits for it to finish + await pipeline(fileStream, hash); + + // Calculate the final hexadecimal hash string + const actualHash = hash.digest("hex"); + + // 4. Verification and Output + const isMatch = actualHash === expectedHash; + const status = isMatch ? "OK" : "FAIL"; + + process.stdout.write(`${fileName} β€” ${status}\n`); + } catch { + // If a file listed in the JSON doesn't exist, we print FAIL + process.stdout.write(`${fileName} β€” FAIL\n`); + } + } + } catch (error) { + // Standard error for missing JSON or other critical FS failures + throw new Error("FS operation failed"); + } }; await verify(); diff --git a/src/modules/dynamic.js b/src/modules/dynamic.js index 008ca387..26f43ccd 100644 --- a/src/modules/dynamic.js +++ b/src/modules/dynamic.js @@ -1,9 +1,41 @@ +import { resolve } from "node:path"; +import { pathToFileURL } from "node:url"; + +// Write your code here +// Accept plugin name as CLI argument +// Dynamically import plugin from plugins/ directory +// Call run() function and print result +// Handle missing plugin case + const dynamic = async () => { - // Write your code here - // Accept plugin name as CLI argument - // Dynamically import plugin from plugins/ directory - // Call run() function and print result - // Handle missing plugin case + // We take the plugin name from the command line (e.g., node dynamic.js uppercase) + const pluginName = process.argv[2]; + + // If no name is provided, we just exit + if (!pluginName) { + process.exit(1); + } + + try { + // 1. Build the absolute path to the plugin file inside the plugins/ folder + const pluginPath = resolve("src", "modules", "plugins", `${pluginName}.js`); + + // 2. Convert the path to a URL format (required for dynamic imports on some systems) + const pluginURL = pathToFileURL(pluginPath).href; + + // 3. The Magic: Dynamic Import. This loads the file only when we need it. + const pluginModule = await import(pluginURL); + + // 4. Execution: Every plugin must have a run() function + const result = pluginModule.run(); + + // 5. Output: Print the string returned by the plugin + process.stdout.write(`${result}\n`); + } catch (error) { + // Requirement: If the file doesn't exist, print "Plugin not found" and exit with code 1 + process.stdout.write("Plugin not found\n"); + process.exit(1); + } }; await dynamic(); diff --git a/src/modules/plugins/uppercase.js b/src/modules/plugins/uppercase.js index b8b0c6b7..e64440fd 100644 --- a/src/modules/plugins/uppercase.js +++ b/src/modules/plugins/uppercase.js @@ -1,3 +1,3 @@ export const run = () => { - return 'HELLO WORLD'; + return "HELLO WORLD"; }; diff --git a/src/streams/filter.js b/src/streams/filter.js index 3868ab46..4d362cab 100644 --- a/src/streams/filter.js +++ b/src/streams/filter.js @@ -1,9 +1,64 @@ -const filter = () => { - // Write your code here - // Read from process.stdin - // Filter lines by --pattern CLI argument - // Use Transform Stream - // Write to process.stdout +import { Transform } from "node:stream"; +import { pipeline } from "node:stream/promises"; + +// Write your code here +// Read from process.stdin +// Filter lines by --pattern CLI argument +// Use Transform Stream +// Write to process.stdout + +const filter = async () => { + const args = process.argv; + const patternIndex = args.indexOf("--pattern"); + + // Requirement: Get the pattern or default to an empty string (shows everything) + const pattern = + patternIndex !== -1 && args[patternIndex + 1] ? args[patternIndex + 1] : ""; + + let remainingData = ""; + + const filterTransform = new Transform({ + /** + * transform: Checks each line of the incoming chunk against the pattern. + */ + transform(chunk, encoding, callback) { + const data = remainingData + chunk.toString(); + const lines = data.split(/\r?\n/); + + // Save the last partial line for the next chunk + remainingData = lines.pop(); + + // We filter only the lines that include our pattern + const filteredOutput = lines + .filter((line) => line.includes(pattern)) + .join("\n"); + + // Only push data if we found matches + if (filteredOutput.length > 0) { + this.push(filteredOutput + "\n"); + } + + callback(); + }, + + /** + * flush: Processes the final piece of data when the stream closes. + */ + flush(callback) { + if (remainingData.length > 0 && remainingData.includes(pattern)) { + this.push(remainingData + "\n"); + } + callback(); + }, + }); + + try { + // Pipeline connects keyboard input -> our filter -> terminal output + await pipeline(process.stdin, filterTransform, process.stdout); + } catch (error) { + process.stderr.write("Stream filtering failed!\n"); + process.exit(1); + } }; -filter(); +await filter(); diff --git a/src/streams/lineNumberer.js b/src/streams/lineNumberer.js index 579d662e..2fa82fe2 100644 --- a/src/streams/lineNumberer.js +++ b/src/streams/lineNumberer.js @@ -1,8 +1,59 @@ -const lineNumberer = () => { - // Write your code here - // Read from process.stdin - // Use Transform Stream to prepend line numbers - // Write to process.stdout +import { Transform } from "node:stream"; +import { pipeline } from "node:stream/promises"; +// Write your code here +// Read from process.stdin +// Use Transform Stream to prepend line numbers +// Write to process.stdout + +const lineNumberer = async () => { + let lineNumber = 1; + let remainingData = ""; + + const addNumbersTransform = new Transform({ + /** + * The transform method processes each chunk of data. + */ + transform(chunk, encoding, callback) { + // Convert chunk to string and prepend any leftover data from the last chunk + const data = remainingData + chunk.toString(); + const lines = data.split(/\r?\n/); + + // Keep the last element (it might be an incomplete line) + remainingData = lines.pop(); + + // Process complete lines + const output = lines + .map((line) => `${lineNumber++} | ${line}`) + .join("\n"); + + // Push the transformed data to the next stage of the pipeline + if (output.length > 0) { + this.push(output + "\n"); + } + + callback(); + }, + + /** + * flush is called when the input stream ends. + */ + flush(callback) { + // If there's any text left without a final newline, process it now + if (remainingData.length > 0) { + this.push(`${lineNumber++} | ${remainingData}\n`); + } + callback(); + }, + }); + + try { + // Pipeline connects stdin -> our transform -> stdout + await pipeline(process.stdin, addNumbersTransform, process.stdout); + } catch (error) { + // Standard error handling for streams + process.stderr.write("Stream operation failed!\n"); + process.exit(1); + } }; -lineNumberer(); +await lineNumberer(); diff --git a/src/streams/split.js b/src/streams/split.js index f8f814fa..bda1efa9 100644 --- a/src/streams/split.js +++ b/src/streams/split.js @@ -1,8 +1,76 @@ +import { createReadStream, createWriteStream } from "node:fs"; +import { stat } from "node:fs/promises"; +import { resolve } from "node:path"; +import { createInterface } from "node:readline"; + +// Write your code here +// Read source.txt using Readable Stream +// Split into chunk_1.txt, chunk_2.txt, etc. +// Each chunk max N lines (--lines CLI argument, default: 10) + const split = async () => { - // Write your code here - // Read source.txt using Readable Stream - // Split into chunk_1.txt, chunk_2.txt, etc. - // Each chunk max N lines (--lines CLI argument, default: 10) + const sourcePath = resolve(process.cwd(), "source.txt"); + const args = process.argv; + const linesIndex = args.indexOf("--lines"); + + // Requirement: Default to 10 lines if --lines is not provided + const linesPerChunk = + linesIndex !== -1 && args[linesIndex + 1] + ? parseInt(args[linesIndex + 1]) + : 10; + + try { + // 1. Initial validation: Check if source.txt exists + await stat(sourcePath); + + // 2. Create a Readable Stream to read source.txt + const readable = createReadStream(sourcePath); + + // 3. Use readline interface to process the stream line by line + const rl = createInterface({ + input: readable, + crlfDelay: Infinity, + }); + + let currentLineCount = 0; + let chunkIndex = 1; + let currentWriteStream = null; + + /** + * Function to generate the next chunk's Writable Stream + */ + const getNewWriteStream = (index) => { + return createWriteStream(resolve(process.cwd(), `chunk_${index}.txt`)); + }; + + // 4. Listen to each line from the source file + for await (const line of rl) { + // If we don't have an active file or reached the limit, start a new one + if (!currentWriteStream || currentLineCount >= linesPerChunk) { + if (currentWriteStream) { + currentWriteStream.end(); + } + currentLineCount = 0; + currentWriteStream = getNewWriteStream(chunkIndex++); + } + + // Write the line followed by a newline character + currentWriteStream.write(`${line}\n`); + currentLineCount++; + } + + // 5. Cleanup: Close the last stream if it exists + if (currentWriteStream) { + currentWriteStream.end(); + process.stdout.write( + `File split successfully into ${chunkIndex - 1} chunks! \n` + ); + } + } catch (error) { + // If source.txt is missing or any FS error occurs + process.stderr.write("FS operation failed\n"); + process.exit(1); + } }; await split(); diff --git a/src/wt/main.js b/src/wt/main.js index d7d21f0c..8431bd01 100644 --- a/src/wt/main.js +++ b/src/wt/main.js @@ -1,11 +1,84 @@ +import { Worker } from "node:worker_threads"; +import { cpus } from "node:os"; +import { readFile, stat } from "node:fs/promises"; +import { resolve } from "node:path"; + +// Write your code here +// Read data.json containing array of numbers +// Split into N chunks (N = CPU cores) +// Create N workers, send one chunk to each +// Collect sorted chunks +// Merge using k-way merge algorithm +// Log final sorted array + +const runWorker = (workerPath, data) => { + return new Promise((resolve, reject) => { + const worker = new Worker(workerPath); + worker.postMessage(data); + + worker.on("message", (result) => resolve(result)); + worker.on("error", (err) => reject(err)); + worker.on("exit", (code) => { + if (code !== 0) + reject(new Error(`Worker stopped with exit code ${code}`)); + }); + }); +}; + +/** + * kWayMerge: Merges multiple sorted arrays into a single sorted array. + */ +const kWayMerge = (arrays) => { + // For simplicity and performance in JS, we flatten and sort, + // but a true k-way merge would use a Min-Priority Queue. + return arrays.flat().sort((a, b) => a - b); +}; + +/** + * main: Reads data, splits it among workers, and merges the results. + */ const main = async () => { - // Write your code here - // Read data.json containing array of numbers - // Split into N chunks (N = CPU cores) - // Create N workers, send one chunk to each - // Collect sorted chunks - // Merge using k-way merge algorithm - // Log final sorted array + const dataPath = resolve(process.cwd(), "data.json"); + const workerPath = resolve(process.cwd(), "src", "wt", "worker.js"); + + try { + // 1. Validation: Ensure data.json exists + await stat(dataPath); + const rawData = await readFile(dataPath, "utf-8"); + const numbers = JSON.parse(rawData); + + // 2. Identify CPU cores (N) + const numCores = cpus().length; + const chunkSize = Math.ceil(numbers.length / numCores); + + // 3. Split data into N chunks + const chunks = []; + for (let i = 0; i < numCores; i++) { + const start = i * chunkSize; + const end = start + chunkSize; + chunks.push(numbers.slice(start, end)); + } + + process.stdout.write( + `Spawning ${numCores} workers to sort ${numbers.length} numbers \n` + ); + + // 4. Create and run workers in parallel + const workerPromises = chunks.map((chunk) => runWorker(workerPath, chunk)); + + // Collect all sorted results + const sortedChunks = await Promise.all(workerPromises); + + // 5. Merge using k-way merge logic + const finalResult = kWayMerge(sortedChunks); + + // 6. Log final result + process.stdout.write("Final sorted array:\n"); + console.log(finalResult); + } catch (error) { + process.stderr.write("Operation failed, Check if data.json exists.\n"); + process.exit(1); + } }; await main(); diff --git a/src/wt/worker.js b/src/wt/worker.js index 15f42fc8..b2a870fd 100644 --- a/src/wt/worker.js +++ b/src/wt/worker.js @@ -1,9 +1,20 @@ -import { parentPort } from 'worker_threads'; +import { parentPort } from "node:worker_threads"; // Receive array from main thread // Sort in ascending order // Send back to main thread -parentPort.on('message', (data) => { - // Write your code here +parentPort.on("message", (data) => { + // Check if data is actually an array + if (!Array.isArray(data)) { + return; + } + + // Sorting in ascending order + // We use (a, b) => a - b because the default sort() + // treats numbers as strings (e.g., 10 comes before 2). + const sortedArray = data.sort((a, b) => a - b); + + // Send the sorted result back to the main thread + parentPort.postMessage(sortedArray); }); diff --git a/src/zip/compressDir.js b/src/zip/compressDir.js index 3a3c5089..20aba34b 100644 --- a/src/zip/compressDir.js +++ b/src/zip/compressDir.js @@ -1,9 +1,75 @@ +import { createBrotliCompress } from "node:zlib"; +import { createWriteStream } from "node:fs"; +import { readdir, stat, readFile, mkdir } from "node:fs/promises"; +import { pipeline } from "node:stream/promises"; +import { resolve, join, relative } from "node:path"; +import { Readable } from "node:stream"; + +// Write your code here +// Read all files from workspace/toCompress/ +// Compress entire directory structure into archive.br +// Save to workspace/compressed/ +// Use Streams API + const compressDir = async () => { - // Write your code here - // Read all files from workspace/toCompress/ - // Compress entire directory structure into archive.br - // Save to workspace/compressed/ - // Use Streams API + const sourceDir = resolve(process.cwd(), "workspace", "toCompress"); + const outputFolder = resolve(process.cwd(), "workspace", "compressed"); + const outputFile = join(outputFolder, "archive.br"); + + try { + // 1. Validation: Ensure source exists + try { + await stat(sourceDir); + } catch { + throw new Error("FS operation failed"); + } + + // 2. Prepare output directory + await mkdir(outputFolder, { recursive: true }); + + // 3. Helper to scan all files + const entries = []; + const scan = async (currentDir) => { + const items = await readdir(currentDir); + for (const item of items) { + const fullPath = join(currentDir, item); + const itemStat = await stat(fullPath); + const relPath = relative(sourceDir, fullPath); + + if (itemStat.isFile()) { + const content = await readFile(fullPath); + entries.push({ + path: relPath, + type: "file", + content: content.toString("base64"), + }); + } else if (itemStat.isDirectory()) { + entries.push({ path: relPath, type: "directory" }); + await scan(fullPath); + } + } + }; + + await scan(sourceDir); + + // 4. Transform the structure into a Stream + const dataString = JSON.stringify({ entries }); + const sourceStream = Readable.from([dataString]); + + // 5. The Compression Pipeline + const compressor = createBrotliCompress(); + const destination = createWriteStream(outputFile); + + // [Image of a data pipeline connecting a source file to a processing unit] + await pipeline(sourceStream, compressor, destination); + + process.stdout.write( + "Directory compressed successfully into archive.br! \n" + ); + } catch (error) { + process.stderr.write("FS operation failed\n"); + process.exit(1); + } }; await compressDir(); diff --git a/src/zip/decompressDir.js b/src/zip/decompressDir.js index d6e770f6..2fe25ed4 100644 --- a/src/zip/decompressDir.js +++ b/src/zip/decompressDir.js @@ -1,8 +1,78 @@ +import { createBrotliDecompress } from "node:zlib"; +import { createReadStream } from "node:fs"; +import { writeFile, mkdir, stat } from "node:fs/promises"; +import { pipeline } from "node:stream/promises"; +import { resolve, join, dirname } from "node:path"; + +// Write your code here +// Read archive.br from workspace/compressed/ +// Decompress and extract to workspace/decompressed/ +// Use Streams API + +/** + * decompressDir: Reads a Brotli compressed archive and restores + * the original directory and file structure. + */ const decompressDir = async () => { - // Write your code here - // Read archive.br from workspace/compressed/ - // Decompress and extract to workspace/decompressed/ - // Use Streams API + const archivePath = resolve( + process.cwd(), + "workspace", + "compressed", + "archive.br" + ); + const outputDir = resolve(process.cwd(), "workspace", "decompressed"); + + try { + // 1. Validation: Ensure archive exists + try { + await stat(archivePath); + } catch { + throw new Error("FS operation failed"); + } + + // 2. Prepare decompression stream + const decompressor = createBrotliDecompress(); + const source = createReadStream(archivePath); + + // 3. Collect the data into a buffer/string + let decompressedData = ""; + + // We can use a special stream to collect the output + const dataCollector = async (stream) => { + for await (const chunk of stream) { + decompressedData += chunk.toString(); + } + }; + + // + // Pipeline: source -> decompressor + await pipeline(source, decompressor, async function* (sourceStream) { + for await (const chunk of sourceStream) { + decompressedData += chunk.toString(); + } + }); + + // 4. Parse the recovered structure + const { entries } = JSON.parse(decompressedData); + + // 5. Recreate files and folders + for (const entry of entries) { + const fullPath = join(outputDir, entry.path); + + if (entry.type === "directory") { + await mkdir(fullPath, { recursive: true }); + } else { + await mkdir(dirname(fullPath), { recursive: true }); + const contentBuffer = Buffer.from(entry.content, "base64"); + await writeFile(fullPath, contentBuffer); + } + } + + process.stdout.write("Archive decompressed successfully! \n"); + } catch (error) { + process.stderr.write("FS operation failed\n"); + process.exit(1); + } }; await decompressDir();