diff --git a/src/cli/interactive.js b/src/cli/interactive.js index d0e3e0d9..71de0741 100644 --- a/src/cli/interactive.js +++ b/src/cli/interactive.js @@ -1,8 +1,49 @@ +import readline from 'readline'; + + const interactive = () => { // Write your code here // Use readline module for interactive CLI // Support commands: uptime, cwd, date, exit // Handle Ctrl+C and unknown commands + + const rl = readline.createInterface( + process.stdin, process.stdout); + const startTime = performance.now(); + rl.setPrompt(`>`); + rl.prompt(); + rl.on('line', (cmd) => { + switch (cmd) { + case 'pwd': + console.log(process.cwd()) + break; + case 'uptime': + upTime(startTime) + break; + case 'date': + printDate() + break; + case 'exit': + process.exit(0) + default: + console.log(`Unknown command`) + } + }); }; +function printDate() { + const now = new Date(); + console.log(now.toISOString()) +} + +function upTime (startTime) { + const endTime = performance.now(); + const elapsedTime = ((endTime - startTime) / 1000).toFixed(2); + console.log(`Uptime: ${elapsedTime}`) +} + interactive(); + +process.on('exit', (code) => { + console.log(`Goodbye!`) +}); diff --git a/src/cli/progress.js b/src/cli/progress.js index 3e060763..73bee4bd 100644 --- a/src/cli/progress.js +++ b/src/cli/progress.js @@ -1,8 +1,68 @@ + const progress = () => { // Write your code here // Simulate progress bar from 0% to 100% over ~5 seconds // Update in place using \r every 100ms // Format: [████████████████████ ] 67% + let duration = 5000 + let interval = 100 + let length = 30 + + let st = 0; + let color = '\x1b[37m' + + const colorIdx = process.argv.indexOf("--color") + if (colorIdx > -1 && colorIdx < process.argv.length - 1) { + const rgb = hexToRgb(process.argv[colorIdx + 1]) + if (rgb != null) color = rgb + } + const durationIdx = process.argv.indexOf("--duration") + if (durationIdx > -1 && durationIdx < process.argv.length - 1) { + duration = parseInt(process.argv[durationIdx + 1]) + } + + const intervalIdx = process.argv.indexOf("--interval") + if (intervalIdx > -1 && intervalIdx < process.argv.length - 1) { + interval = parseInt(process.argv[intervalIdx + 1]) + } + + const lengthIdx = process.argv.indexOf("--length") + if (lengthIdx > -1 && lengthIdx < process.argv.length - 1) { + length = parseInt(process.argv[lengthIdx + 1]) + if (length < 1) { + length = 30 + } + } + + setInterval(() => { + updateProgress(st, duration, length, color); + st += interval; + }, interval); + + setTimeout(() => { + console.log(`\nDone!`); + process.exit(0); + }, duration + interval) +}; +const hexToRgb = (hex) => { + const r = parseInt(hex.slice(1, 3), 16); + const g = parseInt(hex.slice(3, 5), 16); + const b = parseInt(hex.slice(5, 7), 16); + if (isNaN(r) || isNaN(g) || isNaN(b)) { + return null + } + return `\x1b[38;2;${r};${g};${b}m`; }; +function updateProgress(progress, total, length, color) { + const percentage = Math.floor((progress / total) * 100); + + const filledBar = `${color}${('█'.repeat((progress / total) * length))}`; + const emptyBar = ' '.repeat(length - (progress / total) * length); + + process.stdout.clearLine(); + process.stdout.cursorTo(0); + process.stdout.write("\x1b[37m[" + filledBar + emptyBar +"\x1b[37m] "+ percentage+ "%"); +} + progress(); diff --git a/src/cp/execCommand.js b/src/cp/execCommand.js index 34a89c8d..078a3f55 100644 --- a/src/cp/execCommand.js +++ b/src/cp/execCommand.js @@ -1,3 +1,5 @@ +import {spawn} from "child_process" + const execCommand = () => { // Write your code here // Take command from CLI argument @@ -5,6 +7,31 @@ const execCommand = () => { // Pipe child stdout/stderr to parent stdout/stderr // Pass environment variables // Exit with same code as child + + if (process.argv.length != 3) { + console.log("command not passed") + process.exit(1) + } + + const [command, ...args] = process.argv[2].split(' '); + + const child = spawn(command, args, { + env: process.env, + stdio: ['inherit', 'pipe', 'pipe'], + }); + + child.stdout.pipe(process.stdout); + child.stderr.pipe(process.stderr); + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error(`Failed to start child process: ${err.message}`); + process.exit(1); + }); }; + execCommand(); diff --git a/src/fs/findByExt.js b/src/fs/findByExt.js index 24f06cb8..089c22fc 100644 --- a/src/fs/findByExt.js +++ b/src/fs/findByExt.js @@ -1,7 +1,34 @@ +import os from 'os'; +import { readdir } from 'node:fs/promises'; + const findByExt = async () => { // Write your code here // Recursively find all files with specific extension // Parse --ext CLI argument (default: .txt) + let ext = ".txt" + if (process.argv.length > 1) { + const idx = process.argv.indexOf("--ext") + if (idx > -1 && idx < process.argv.length-1) { + ext = process.argv[idx + 1] + } + } + + const sep = (os.platform() == 'win32') ? '\\' : '/'; + let files + + try { + files = await readdir('workspace', { withFileTypes: true, recursive: true }); + } catch (error) { + console.log('FS operation failed') + process.exit(1) + } + + let filteredFiles = files.filter(f => f.isFile() && f.name.endsWith(ext)).map(f => f.parentPath + sep + f.name) + filteredFiles.sort() + for (let fileName of filteredFiles) { + console.log(fileName) + } }; + await findByExt(); diff --git a/src/fs/merge.js b/src/fs/merge.js index cb8e0d8f..83aa22fa 100644 --- a/src/fs/merge.js +++ b/src/fs/merge.js @@ -1,8 +1,59 @@ +import os from 'os'; +import { readdir, readFile, writeFile } from 'node:fs/promises'; + const merge = async () => { // Write your code here // Default: read all .txt files from workspace/parts in alphabetical order // Optional: support --files filename1,filename2,... to merge specific files in provided order // Concatenate content and write to workspace/merged.txt + + const sep = (os.platform() == 'win32') ? '\\' : '/'; + const workspace = 'workspace' + sep + "parts"; + let files + + try { + files = await readdir(workspace, { withFileTypes: true, recursive: true }); + } catch (error) { + console.log('FS operation failed') + process.exit(1) + } + + let filenames = null + if (process.argv.length > 1) { + const idx = process.argv.indexOf("--files") + if (idx > -1 && idx < process.argv.length-1) { + filenames = process.argv[idx + 1].split(",") + } + } + + const contents = [] + const promises = files.filter(f => f.name.endsWith(".txt") && + (filenames == null || filenames != null && filenames.indexOf(f.name.replace(".txt", "")) > -1)) + .map( f => readFile(f.parentPath + sep + f.name, {encoding: "utf-8"}).then(d => contents.push([f.name, d]))); + + await Promise.all(promises); + + // check all found + if (filenames != null) { + const filteredNames = contents.map(c => c[0]) + const missedCount = filenames.filter(f => filteredNames.indexOf(f + '.txt') == -1).length + if (missedCount > 0) { + console.log('FS operation failed') + process.exit(1) + } + } + + // merge + let concate = ""; + if (contents.length > 0) { + contents.sort() + for (let [_, content] of contents) { + concate += content + } + console.log(concate) + } + const mergedFileName = 'workspace' + sep + 'merged.txt' + await writeFile(mergedFileName, concate, 'utf8') }; await merge(); diff --git a/src/fs/restore.js b/src/fs/restore.js index 96ae1ffb..a9d81180 100644 --- a/src/fs/restore.js +++ b/src/fs/restore.js @@ -1,8 +1,58 @@ +import { readFile, writeFile, mkdir } from 'node:fs/promises'; +import os from 'os'; + const restore = async () => { - // Write your code here - // Read snapshot.json - // Treat snapshot.rootPath as metadata only - // Recreate directory/file structure in workspace_restored + let sep = (os.platform() == 'win32') ? '\\' : '/'; + let fileName = 'snapshot.json'; + // read content + let snapshot; + try { + snapshot = await readFile(fileName, { encoding: 'utf8' }) + } catch (err) { + console.log('FS operation failed') + process.exit(1) + } + + try { + snapshot = JSON.parse(snapshot) + } catch (err) { + console.log('Json content expected') + process.exit(1) + } + + // validate structure + if (snapshot.rootPath == undefined) { + console.log('wrong json structure: rootPath is not declared') + process.exit(1) + } + + if (snapshot.entries == undefined) { + console.log('wrong json structure: entries is not declared') + process.exit(1) + } + + if (snapshot.entries == undefined || snapshot.entries.length == 0) { + return + } + + const basePath = 'workspace_restored' + try { + await mkdir(basePath) + } catch (err) { + console.log('FS operation failed') + process.exit(1) + } + + const promises = snapshot.entries + .filter(f => f.type == 'directory') + .map(d => mkdir(basePath + sep + d.path, {recursive: true})) + await Promise.all(promises); + + + const filePromises = snapshot.entries + .filter(f => f.type == 'file') + .map(f => writeFile(basePath + sep + f.path, atob(f.content), 'utf8')) + await Promise.all(filePromises); }; await restore(); diff --git a/src/fs/snapshot.js b/src/fs/snapshot.js index 050103d3..22802fe4 100644 --- a/src/fs/snapshot.js +++ b/src/fs/snapshot.js @@ -1,9 +1,48 @@ +import { readdir, readFile, writeFile } from 'node:fs/promises'; +import os from 'os'; + const snapshot = async () => { // Write your code here // Recursively scan workspace directory // Write snapshot.json with: // - rootPath: absolute path to workspace // - entries: flat array of relative paths and metadata + // Check if the file is readable. + + const userHomeDir = os.homedir(); + let sep = (os.platform() == 'win32') ? '\\' : '/'; + let workspace = 'workspace'; + + let files + try { + files = await readdir(workspace, { withFileTypes: true, recursive: true }); + } catch (error) { + console.log('FS operation failed') + process.exit(1) + } + + var fileContents = [] + var promises = [] + + for (const file of files) { + + let relativePath = file.parentPath + sep + file.name + const relativeFilePath = relativePath.replace(workspace + sep, '') + // console.log(file); + if (file.isDirectory()) { + fileContents.push({ path: relativeFilePath, type: 'directory' }) + } else { + const promise = readFile(relativePath) + .then(data => { + fileContents.push({path: relativeFilePath, type: 'file', size: data.length, content: Buffer.from(data).toString('base64')}) + }) + promises.push(promise) + } + } + await Promise.all(promises); + // prepare final object + let shapshotContent = JSON.stringify({ rootPath: process.cwd() + sep + workspace, entries: fileContents }) + await writeFile('snapshot.json', shapshotContent, 'utf8'); }; await snapshot(); diff --git a/src/hash/verify.js b/src/hash/verify.js index 7f1e8961..33c1648a 100644 --- a/src/hash/verify.js +++ b/src/hash/verify.js @@ -1,8 +1,49 @@ +import { readFile } from 'node:fs/promises'; +import { createHash } from 'node:crypto'; +import { createReadStream } from 'node:fs'; + + const verify = async () => { // Write your code here // Read checksums.json // Calculate SHA256 hash using Streams API // Print result: filename — OK/FAIL + + + let content + try { + content = await readFile('checksum.js', { encoding: 'utf-8' }).then(c => JSON.parse(c)) + } catch (err) { + console.log('FS operation failed') + process.exit(1) + } + if (content != null && content != '{}') { + const promises = Object.keys(content) + .map(k => compareHashes(k, content[k])) + + await Promise.all(promises); + } }; +function compareHashes(fileName, hash) { + return getFileChecksum(fileName).then(actual => { + let res = 'OK' + if (actual != hash) { + res = 'FAIL' + } + console.log(`${fileName} - ${res}`) + }) +} + +function getFileChecksum(filePath) { + return new Promise((resolve, reject) => { + const hash = createHash('sha256'); + const stream = createReadStream(filePath); + + stream.on('data', (data) => hash.update(data)); + stream.on('end', () => resolve(hash.digest('hex'))); + stream.on('error', (err) => reject(err)); + }); +} + await verify(); diff --git a/src/modules/dynamic.js b/src/modules/dynamic.js index 008ca387..b86be810 100644 --- a/src/modules/dynamic.js +++ b/src/modules/dynamic.js @@ -1,9 +1,35 @@ +import { readdir } from 'node:fs/promises'; +import { fileURLToPath } from 'url'; +import { dirname, sep } from 'path'; + const dynamic = async () => { // Write your code here // Accept plugin name as CLI argument // Dynamically import plugin from plugins/ directory // Call run() function and print result // Handle missing plugin case + if (process.argv.length < 3) { + console.log('Plugin name should be passed in') + process.exit(1) + } + const __filename = fileURLToPath(import.meta.url); + const __dirname = dirname(__filename); + const modules = await readdir(__dirname + sep + 'plugins'); + const fileName = process.argv[2] + + if (modules.indexOf(fileName + '.js') == -1) { + console.log('Plugin not found') + process.exit(1) + } + + await import(__dirname + sep + 'plugins' + sep + fileName + '.js') + .then((module) => { + console.log(module.run()); + }) + .catch((err) => { + console.error("Failed to load module", err); + }); + }; await dynamic(); diff --git a/src/streams/filter.js b/src/streams/filter.js index 3868ab46..ced7460d 100644 --- a/src/streams/filter.js +++ b/src/streams/filter.js @@ -1,9 +1,51 @@ +import { + Transform, +} from 'node:stream'; + + const filter = () => { // Write your code here // Read from process.stdin // Filter lines by --pattern CLI argument // Use Transform Stream // Write to process.stdout + + + const patternIdx = process.argv.indexOf("--pattern") + if (patternIdx == -1 || patternIdx == process.argv.length - 1) { + console.log('Pattern parameter should be passed in') + process.exit(1) + } + + const strLiteral = getLiteral(process.argv[patternIdx+1]); + console.log(strLiteral) + + const transformer = new Transform({ + transform(chunk, encoding, callback) { + const str = chunk.toString() + if (strLiteral.test(str)) { + this.push(str); + } + callback(); + }, + }); + process.stdin.pipe(transformer).pipe(process.stdout); }; +function getLiteral(patternStr) { + // 1. Extract pattern (between slashes) and flags (after last slash) + const match = patternStr.match(/^\/(.*)\/([a-z]*)$/); + let strLiteral + if (match) { + const pattern = match[1]; + const flags = match[2]; + + // 2. Create the RegExp object + strLiteral = new RegExp(pattern, flags); + } else { + strLiteral = new RegExp(patternStr); + } + return strLiteral +} + filter(); diff --git a/src/streams/lineNumberer.js b/src/streams/lineNumberer.js index 579d662e..3f57c132 100644 --- a/src/streams/lineNumberer.js +++ b/src/streams/lineNumberer.js @@ -1,8 +1,34 @@ +import { + Transform, +} from 'node:stream'; + const lineNumberer = () => { // Write your code here // Read from process.stdin // Use Transform Stream to prepend line numbers // Write to process.stdout + + + const transformer = new Transform({ + transform(chunk, encoding, callback) { + this.push(numerate(chunk.toString())); + callback(); + }, + }); + + process.stdin.pipe(transformer).pipe(process.stdout); }; +function numerate(content) { + const parts = content.split('\\n') + let result = '' + for (let i = 0; i < parts.length; i++) { + result += `${i + 1} | ${parts[i]}` + if (i != parts.length - 1) { + result += '\n' + } + } + return result +} + lineNumberer(); diff --git a/src/streams/split.js b/src/streams/split.js index f8f814fa..9690af74 100644 --- a/src/streams/split.js +++ b/src/streams/split.js @@ -1,8 +1,101 @@ +import { Transform } from 'stream'; +import { createReadStream, createWriteStream, writeFileSync, existsSync } from 'node:fs'; + + const split = async () => { // Write your code here // Read source.txt using Readable Stream // Split into chunk_1.txt, chunk_2.txt, etc. // Each chunk max N lines (--lines CLI argument, default: 10) + const maxLines = parseLinesArg(); + + if (!existsSync("source.txt")) { + console.error(`Error: source.txt not found`); + process.exit(1); + } + + const readable = createReadStream("source.txt", { encoding: 'utf-8' }); + const splitter = new LineSplitter(maxLines); + + readable + .pipe(splitter); + + readable.on('error', (err) => { + console.error(`Read error: ${err.message}`); + process.exit(1); + }); }; + + +function parseLinesArg() { + const args = process.argv.slice(2); + const idx = args.indexOf('--lines'); + + if (idx !== -1 && args[idx + 1] !== undefined) { + const parsed = parseInt(args[idx + 1], 10); + if (Number.isNaN(parsed) || parsed <= 0) { + console.error('Error: --lines must be a positive integer'); + process.exit(1); + } + return parsed; + } + + return 10; +} + +class LineSplitter extends Transform { + constructor(maxLines) { + super(); + this.maxLines = maxLines; + this.lineBuffer = []; // accumulates complete lines + this.remainder = ''; // incomplete trailing text from previous chunk + this.chunkIndex = 1; // current output file number + } + + _transform(chunk, encoding, callback) { + const data = this.remainder + chunk.toString(); + const lines = data.split('\n'); + + // Last element is either '' (if data ended with \n) or an + // incomplete line — save it for the next _transform call. + this.remainder = lines.pop(); + + for (const line of lines) { + this.lineBuffer.push(line); + + if (this.lineBuffer.length === this.maxLines) { + this._writeChunk(); + } + } + + callback(); + } + + + _flush(callback) { + if (this.remainder.length > 0) { + this.lineBuffer.push(this.remainder); + this.remainder = ''; + } + + if (this.lineBuffer.length > 0) { + this._writeChunk(); + } + + callback(); + } + + _writeChunk() { + const fileName = `chunk_${this.chunkIndex}.txt`; + const content = this.lineBuffer.join('\n') + '\n'; + + writeFileSync(fileName, content); + console.log(`${fileName} (${this.lineBuffer.length} lines)`); + + this.lineBuffer = []; + this.chunkIndex++; + } +} + await split(); diff --git a/src/wt/main.js b/src/wt/main.js index d7d21f0c..9b517e7a 100644 --- a/src/wt/main.js +++ b/src/wt/main.js @@ -1,3 +1,10 @@ +import { Worker } from 'worker_threads'; +import os from 'os'; +import fs from 'fs'; +import { fileURLToPath } from 'url'; +import { dirname, join} from 'path'; +//import path from 'path'; + const main = async () => { // Write your code here // Read data.json containing array of numbers @@ -6,6 +13,81 @@ const main = async () => { // Collect sorted chunks // Merge using k-way merge algorithm // Log final sorted array + + const raw = fs.readFileSync('data.json', 'utf-8'); + const numbers = JSON.parse(raw); + + if (!Array.isArray(numbers) || numbers.length === 0) { + throw new Error('data.json must contain a non-empty array of numbers'); + } + + const cpuCount = os.cpus().length; + + const chunks = split(numbers, cpuCount); + + const sortedChunks = await Promise.all( + chunks.map((chunk) => sortChunkInWorker(chunk)) + ); + + // K-way merge + console.log(kWayMerge(sortedChunks)); + // process.exit(0) }; +function split(array, n) { + const chunks = [] + const size = Math.ceil(array.length / n) + for (let i = 0; i < array.length; i += size) { + chunks.push(array.slice(i, i + size)) + } + return chunks +} + +function kWayMerge(sortedArrays) { + const pointers = new Array(sortedArrays.length).fill(0); + const totalLength = sortedArrays.reduce((sum, arr) => sum + arr.length, 0); + const result = new Array(totalLength); + + for (let i = 0; i < totalLength; i++) { + let minVal = Infinity; + let minIdx = -1; + + for (let k = 0; k < sortedArrays.length; k++) { + if ( + pointers[k] < sortedArrays[k].length && + sortedArrays[k][pointers[k]] < minVal + ) { + minVal = sortedArrays[k][pointers[k]]; + minIdx = k; + } + } + + result[i] = minVal; + pointers[minIdx]++; + } + + return result; +} + +function sortChunkInWorker(chunk) { + const __filename = fileURLToPath(import.meta.url); + const __dirname = dirname(__filename); + + return new Promise((resolve, reject) => { + const worker = new Worker(join(__dirname, 'worker.js')) + + worker.on('message', (sorted) => resolve(sorted)) + + worker.on('error', (err) => reject(err)) + + worker.on('exit', (code) => { + if (code != 0) reject(new Error(`Worker exited with code ${code}`)) + }) + + worker.postMessage(chunk) + }) +} + + + await main(); diff --git a/src/wt/worker.js b/src/wt/worker.js index 15f42fc8..c8caee22 100644 --- a/src/wt/worker.js +++ b/src/wt/worker.js @@ -1,9 +1,10 @@ import { parentPort } from 'worker_threads'; - // Receive array from main thread // Sort in ascending order // Send back to main thread parentPort.on('message', (data) => { // Write your code here + const sorted = [...data].sort((a, b) => a - b) + parentPort.postMessage(sorted) }); diff --git a/src/zip/compressDir.js b/src/zip/compressDir.js index 3a3c5089..7ce2cf92 100644 --- a/src/zip/compressDir.js +++ b/src/zip/compressDir.js @@ -1,9 +1,82 @@ +import { createReadStream, createWriteStream } from 'node:fs'; +import { access, constants, mkdir, unlink, readdir, readFile } from 'node:fs/promises'; +import { createBrotliCompress } from 'node:zlib'; +import { pipeline } from 'node:stream'; +import { sep } from 'path'; + const compressDir = async () => { // Write your code here // Read all files from workspace/toCompress/ // Compress entire directory structure into archive.br // Save to workspace/compressed/ // Use Streams API + const folderName = 'workspace' + sep + 'toCompress' + + return access(folderName, constants.F_OK).then(d => { + return folderSnapshot(folderName) + }).then(() => { + return compressBr(`${folderName}_snapshot.json`) + }).catch(e => { + console.log(e) + throw new Error('FS operation failed'); + }) }; +async function folderSnapshot(folderName) { + var fileContents = [] + let files + try { + files = await readdir(folderName, { withFileTypes: true, recursive: true }); + } catch (error) { + console.log('FS operation failed', error) + process.exit(1) + } + + var promises = [] + for (const file of files) { + let relativePath = file.parentPath + sep + file.name + const relativeFilePath = relativePath.replace(folderName + sep, '') + if (file.isDirectory()) { + fileContents.push({ path: relativeFilePath, type: 'd' }) + } else { + const promise = readFile(relativePath) + .then(data => { + fileContents.push({ path: relativeFilePath, type: 'f', content: Buffer.from(data).toString('base64') }) + }) + promises.push(promise) + } + } + await Promise.all(promises) + + let shapshotContent = JSON.stringify(fileContents) + const output = createWriteStream(`${folderName}_snapshot.json`); + output.write(shapshotContent); + + return new Promise((resolve, reject) => { + output.on('finish', () => resolve('Ok')); + output.on('error', reject); + output.end(); + }); +} + +async function compressBr(tmpFile) { + const source = createReadStream(tmpFile); + return mkdir('workspace' + sep + 'compressed', { recursive: true }) + .then(f => { + const destination = createWriteStream('workspace' + sep + 'compressed/archive.br'); + const compressor = createBrotliCompress(); + pipeline(source, compressor, destination, (err) => { + if (err) { + console.log(err) + throw new Error('Something wrong happend while compressing') + } + }); + return Promise.resolve('Ok') + }).finally(err => { + return unlink(tmpFile) // delete snapshot + }); +} + + + await compressDir(); diff --git a/src/zip/decompressDir.js b/src/zip/decompressDir.js index d6e770f6..a8422bfd 100644 --- a/src/zip/decompressDir.js +++ b/src/zip/decompressDir.js @@ -1,8 +1,71 @@ +import { createReadStream, createWriteStream } from 'node:fs'; +import { access, constants, mkdir, unlink, readFile, writeFile } from 'node:fs/promises'; +import { createBrotliDecompress } from 'node:zlib'; +import { pipeline } from 'node:stream/promises'; +import { sep } from 'path'; + const decompressDir = async () => { // Write your code here // Read archive.br from workspace/compressed/ // Decompress and extract to workspace/decompressed/ // Use Streams API + + // uncompress br, + // restore structrues + const archiveFile = `workspace${sep}compressed${sep}archive.br` + const snapshotDestination = `workspace${sep}decompressed_snapshot.json` + return access(archiveFile, constants.F_OK) + // .catch(e => { + // //console.log(e) + // throw new Error('FS operation failed'); + // }) + .then(f => { + return deCompress(archiveFile, snapshotDestination) + }).then(p => { + return readFile(snapshotDestination, { encoding: 'utf8' }) + }).then(snapshotContent => { + return restore(snapshotContent) + }) + .finally(() => unlink(snapshotDestination)) + .catch(e => { + throw new Error('FS operation failed'); + }) }; +async function restore(snapshot) { + let entries + try { + entries = JSON.parse(snapshot) + } catch (err) { + console.log('Json content expected, please recreate archive') + process.exit(1) + } + + if (entries == undefined || entries.length == 0) { + return Promise.resolve('Ok') + } + + const basePath = 'workspace' + sep + 'decompressed' + + return mkdir(basePath, { recursive: true }) + .then(() => { + const promises = entries + .filter(f => f.type == 'd') + .map(d => mkdir(basePath + sep + d.path, { recursive: true })) + return Promise.all(promises); // restore all folders + }).then(() => { + const filePromises = entries + .filter(f => f.type == 'f') + .map(f => writeFile(basePath + sep + f.path, atob(f.content), 'utf8')) + return Promise.all(filePromises); // restore all files + }) +} + +async function deCompress(archiveFile, snapshotDestination) { + const source = createReadStream(archiveFile); + const destination = createWriteStream(snapshotDestination); + const decompressor = createBrotliDecompress(); + return await pipeline(source, decompressor, destination); +} + await decompressDir();