diff --git a/package.json b/package.json index 3108c6c969..5253a6db02 100644 --- a/package.json +++ b/package.json @@ -40,5 +40,8 @@ "bugs": { "url": "https://github.com/AlreadyBored/node-nodejs-basics/issues" }, - "homepage": "https://github.com/AlreadyBored/node-nodejs-basics#readme" + "homepage": "https://github.com/AlreadyBored/node-nodejs-basics#readme", + "devDependencies": { + "cross-env": "^10.1.0" + } } diff --git a/src/cli/args.js b/src/cli/args.js index 9e3622f791..73553c4647 100644 --- a/src/cli/args.js +++ b/src/cli/args.js @@ -1,5 +1,14 @@ +import { cliConstants } from '../common/constants.js'; +import { cliPrintArgument } from '../common/helpers.js'; + const parseArgs = () => { - // Write your code here + const { DELIMITER, RE_DELIMITER, SLICE_FROM, JOINER } = cliConstants.parseArgs; + process.argv + .join(DELIMITER) + .split(RE_DELIMITER) + .slice(SLICE_FROM) + .map((string) => string.split(DELIMITER)) + .forEach((pair) => cliPrintArgument(pair, JOINER)); }; parseArgs(); diff --git a/src/cli/env.js b/src/cli/env.js index e3616dc8e7..9e3f0073dd 100644 --- a/src/cli/env.js +++ b/src/cli/env.js @@ -1,5 +1,11 @@ +import { cliPrintArgument } from '../common/helpers.js'; +import { cliConstants } from '../common/constants.js'; + const parseEnv = () => { - // Write your code here + const { PATTERN, JOINER } = cliConstants.env; + for (const pair of Object.entries(process.env)) { + if (PATTERN.test(pair[0])) cliPrintArgument(pair, JOINER); + } }; parseEnv(); diff --git a/src/common/constants.js b/src/common/constants.js new file mode 100644 index 0000000000..0562a35e50 --- /dev/null +++ b/src/common/constants.js @@ -0,0 +1,102 @@ +const fsConstants = { + fsCreate: { + FILE_NAME: 'fresh.txt', + }, + fsCopy: { + FOLDER_NAME: 'files', + COPY_POSTFIX: '_copy', + CP_OPTIONS: { recursive: true }, + }, + fsRename: { + OLD_NAME: 'wrongFilename.txt', + NEW_NAME: 'properFilename.md', + }, + fsDelete: { + FILE_NAME: 'fileToRemove.txt', + }, + fsRead: { + FILE_NAME: 'fileToRead.txt', + }, + FS_PATHS: { + PATH_TO_FILES: 'src/fs/files/', + PATH_TO_FS: 'src/fs/', + }, + FS_CONDITIONS: { + PRESENT: true, + ABSENT: false, + }, + FS_ERRORS: { + ERROR_MESSAGE: 'FS operation failed', + }, +}; +const cliConstants = { + parseArgs: { + SLICE_FROM: 1, + DELIMITER: ' ', + RE_DELIMITER: /\s\-{2}/, + JOINER: ' is ', + }, + env: { + PATTERN: /RSS_/, + JOINER: '=', + }, +}; +const hashConstants = { + FILE_NAME: 'fileToCalculateHashFor.txt', + PATH_TO_FILE: 'src/hash/files/', + ALGORITHM: 'sha256', + ENCODING: 'hex', +}; +const streamsConstants = { + STREAMS_PATHS: { + PATH_TO_FILE: 'src/streams/files/', + }, + read: { + FILE_NAME: fsConstants.fsRead.FILE_NAME, + }, + write: { + FILE_NAME: 'fileToWrite.txt', + }, + transform: { + ENCODING: 'utf-8', + }, + NEW_LINE: '\n', +}; +const zipConstants = { + ZIP_PATHS: { + PATH_TO_FILE: 'src/zip/files', + }, + compress: { + FILE_NAME: 'fileToCompress.txt', + ARCHIVE_NAME: 'archive.gz', + }, +}; +const wtConstants = { + WT_PATHS: { + PATH_TO_WORKER: 'src/wt/', + WORKER_NAME: 'worker.js', + }, + FIB_BASE_N: 10, + RESULTS: { + SUCCESS: { + status: 'resolved', + }, + ERROR: { + status: 'error', + data: null, + }, + }, +}; +const cpConstants = { + CP_PATHS: { + PATH_TO_FILE: 'src/cp/files/', + SCRIPT_NAME: 'script.js', + }, + INTERPRETER: 'node', + TEST_ARGS: ['firstArg', null, 4, undefined], + OPTIONS: { + stdio: 'inherit', + }, +}; + +export { fsConstants, cliConstants, hashConstants, streamsConstants, zipConstants, wtConstants, cpConstants }; diff --git a/src/common/functionDescriptors.js b/src/common/functionDescriptors.js new file mode 100644 index 0000000000..c01688c513 --- /dev/null +++ b/src/common/functionDescriptors.js @@ -0,0 +1,122 @@ +import { fsPathResolver } from './helpers.js'; +import { cpus } from 'node:os'; +import { fsConstants, hashConstants, streamsConstants, zipConstants, wtConstants, cpConstants } from './constants.js'; + +const makeFsDescriptors = () => { + const { FS_CONDITIONS, FS_PATHS, fsCopy, fsCreate, fsDelete, fsRead, fsRename } = fsConstants; + const { PATH_TO_FILES, PATH_TO_FS } = FS_PATHS; + const pathToFilesBuilder = fsPathResolver(PATH_TO_FILES); + const pathToFSBuilder = fsPathResolver(PATH_TO_FS); + return { + create: { + text: 'I am fresh and young', + fileDescriptor: { + fullPath: pathToFilesBuilder(fsCreate.FILE_NAME), + condition: FS_CONDITIONS.PRESENT, + }, + }, + copy: { + sourceFileDescriptor: { + fullPath: pathToFSBuilder(fsCopy.FOLDER_NAME), + condition: FS_CONDITIONS.ABSENT, + }, + destinationFileDescriptor: { + fullPath: pathToFSBuilder(fsCopy.FOLDER_NAME.concat(fsCopy.COPY_POSTFIX)), + condition: FS_CONDITIONS.PRESENT, + }, + options: fsCopy.CP_OPTIONS, + }, + rename: { + oldFileDescriptor: { + fullPath: pathToFilesBuilder(fsRename.OLD_NAME), + condition: FS_CONDITIONS.ABSENT, + }, + newFileDescriptor: { + fullPath: pathToFilesBuilder(fsRename.NEW_NAME), + condition: FS_CONDITIONS.PRESENT, + }, + }, + delete: { + fileDescriptor: { + fullPath: pathToFilesBuilder(fsDelete.FILE_NAME), + condition: FS_CONDITIONS.ABSENT, + }, + }, + read: { + fileDescriptor: { + fullPath: pathToFilesBuilder(fsRead.FILE_NAME), + condition: FS_CONDITIONS.ABSENT, + }, + }, + list: { + directoryDescriptor: { + fullPath: pathToFilesBuilder(), + condition: FS_CONDITIONS.ABSENT, + }, + }, + }; +}; +const makeHashDescriptors = () => { + const { PATH_TO_FILE, FILE_NAME, ENCODING, ALGORITHM } = hashConstants; + const pathToHashBuilder = fsPathResolver(PATH_TO_FILE); + return { + fullPath: pathToHashBuilder(FILE_NAME), + algorithm: ALGORITHM, + encoding: ENCODING, + }; +}; +const makeStreamsDescriptors = () => { + const { PATH_TO_FILE } = streamsConstants.STREAMS_PATHS; + const { read, NEW_LINE, write } = streamsConstants; + const pathToStreamsBuilder = fsPathResolver(PATH_TO_FILE); + return { + read: { + fullPath: pathToStreamsBuilder(read.FILE_NAME), + inputEnd: NEW_LINE, + }, + write: { + fullPath: pathToStreamsBuilder(write.FILE_NAME), + }, + }; +}; +const makeZipDescriptors = () => { + const { PATH_TO_FILE } = zipConstants.ZIP_PATHS; + const { FILE_NAME, ARCHIVE_NAME } = zipConstants.compress; + const pathToZIPBuilder = fsPathResolver(PATH_TO_FILE); + return { + compress: { + fullPathToFile: pathToZIPBuilder(FILE_NAME), + fullPathToArchive: pathToZIPBuilder(ARCHIVE_NAME), + }, + }; +}; +const makeWtDescriptors = () => { + const { PATH_TO_WORKER, WORKER_NAME } = wtConstants.WT_PATHS; + const { FIB_BASE_N, RESULTS } = wtConstants; + const pathToWorkerBuilder = fsPathResolver(PATH_TO_WORKER); + return { + fullPathToWorker: pathToWorkerBuilder(WORKER_NAME), + baseN: FIB_BASE_N, + success: RESULTS.SUCCESS, + error: RESULTS.ERROR, + workersCount: cpus().length, + }; +}; +const makeCpDescriptors = () => { + const { PATH_TO_FILE, SCRIPT_NAME } = cpConstants.CP_PATHS; + const pathToCpBuilder = fsPathResolver(PATH_TO_FILE); + const { INTERPRETER, TEST_ARGS, OPTIONS } = cpConstants; + return { + interpreter: INTERPRETER, + argsArr: [pathToCpBuilder(SCRIPT_NAME)].concat(TEST_ARGS), + options: OPTIONS, + }; +}; +const fsDescriptors = makeFsDescriptors(); +const hashDescriptor = makeHashDescriptors(); +const streamsDescriptors = makeStreamsDescriptors(); +const zipDescriptors = makeZipDescriptors(); +const wtDescriptors = makeWtDescriptors(); +const cpDescriptor = makeCpDescriptors(); + +export { fsDescriptors, hashDescriptor, streamsDescriptors, zipDescriptors, wtDescriptors, cpDescriptor }; diff --git a/src/common/helpers.js b/src/common/helpers.js new file mode 100644 index 0000000000..fa7630f841 --- /dev/null +++ b/src/common/helpers.js @@ -0,0 +1,28 @@ +import { access } from 'node:fs/promises'; +import { resolve } from 'node:path'; +import { fsConstants } from './constants.js'; + +const fsPathExists = async (path) => { + try { + await access(path); + return true; + } catch { + return false; + } +}; +const fsChecker = async (...descriptors) => { + const { ERROR_MESSAGE } = fsConstants.FS_ERRORS; + for (const { fullPath, condition } of descriptors) { + if ((await fsPathExists(fullPath)) === condition) throw new Error(ERROR_MESSAGE); + } +}; +const fsPathResolver = + (basePath) => + (...pathParts) => + resolve(resolve(basePath), ...pathParts); +const cliPrintArgument = (argument, joiner) => { + const [property, value] = argument; + console.log(`${property}${joiner}${value}`); +}; + +export { fsChecker, fsPathResolver, cliPrintArgument }; diff --git a/src/cp/cp.js b/src/cp/cp.js index 72c6addc9c..3190998fc2 100644 --- a/src/cp/cp.js +++ b/src/cp/cp.js @@ -1,6 +1,11 @@ +import { spawn } from 'node:child_process'; +import { cpDescriptor } from '../common/functionDescriptors.js'; + +const { argsArr } = cpDescriptor; const spawnChildProcess = async (args) => { - // Write your code here + const { interpreter, options } = cpDescriptor; + spawn(interpreter, args, options); }; // Put your arguments in function call to test this functionality -spawnChildProcess( /* [someArgument1, someArgument2, ...] */); +spawnChildProcess(argsArr); diff --git a/src/fs/copy.js b/src/fs/copy.js index e226075b4c..c71e11a382 100644 --- a/src/fs/copy.js +++ b/src/fs/copy.js @@ -1,5 +1,13 @@ +import { cp } from 'node:fs/promises'; +import { fsChecker } from '../common/helpers.js'; +import { fsDescriptors } from '../common/functionDescriptors.js'; + const copy = async () => { - // Write your code here + const { sourceFileDescriptor, destinationFileDescriptor, options } = fsDescriptors.copy; + await fsChecker(sourceFileDescriptor, destinationFileDescriptor); + const { fullPath: source } = sourceFileDescriptor; + const { fullPath: destination } = destinationFileDescriptor; + await cp(source, destination, options); }; await copy(); diff --git a/src/fs/create.js b/src/fs/create.js index 6ede285599..ce8351b7e6 100644 --- a/src/fs/create.js +++ b/src/fs/create.js @@ -1,5 +1,11 @@ +import { appendFile } from 'node:fs/promises'; +import { fsChecker } from '../common/helpers.js'; +import { fsDescriptors } from '../common/functionDescriptors.js'; + const create = async () => { - // Write your code here + const { fileDescriptor, text } = fsDescriptors.create; + await fsChecker(fileDescriptor); + await appendFile(fileDescriptor.fullPath, text); }; await create(); diff --git a/src/fs/delete.js b/src/fs/delete.js index a70b13766c..9df4d21240 100644 --- a/src/fs/delete.js +++ b/src/fs/delete.js @@ -1,5 +1,11 @@ +import { rm } from 'fs/promises'; +import { fsChecker } from '../common/helpers.js'; +import { fsDescriptors } from '../common/functionDescriptors.js'; + const remove = async () => { - // Write your code here + const { fileDescriptor } = fsDescriptors.delete; + await fsChecker(fileDescriptor); + await rm(fileDescriptor.fullPath); }; await remove(); diff --git a/src/fs/list.js b/src/fs/list.js index 0c0fa21f7e..7f34e0cac7 100644 --- a/src/fs/list.js +++ b/src/fs/list.js @@ -1,5 +1,15 @@ +import { readdir } from 'node:fs/promises'; +import { parse } from 'node:path'; +import { fsChecker } from '../common/helpers.js'; +import { fsDescriptors } from '../common/functionDescriptors.js'; + const list = async () => { - // Write your code here + const { directoryDescriptor } = fsDescriptors.list; + await fsChecker(directoryDescriptor); + for (const file of await readdir(directoryDescriptor.fullPath)) { + const { name } = parse(file); + console.log(name); + } }; await list(); diff --git a/src/fs/read.js b/src/fs/read.js index e3938be563..25df6fb983 100644 --- a/src/fs/read.js +++ b/src/fs/read.js @@ -1,5 +1,11 @@ +import { readFile } from 'node:fs/promises'; +import { fsChecker } from '../common/helpers.js'; +import { fsDescriptors } from '../common/functionDescriptors.js'; + const read = async () => { - // Write your code here + const { fileDescriptor } = fsDescriptors.read; + await fsChecker(fileDescriptor); + console.log(await readFile(fileDescriptor.fullPath, { encoding: 'utf-8' })); }; await read(); diff --git a/src/fs/rename.js b/src/fs/rename.js index b1d65b0c86..8b645f2e3e 100644 --- a/src/fs/rename.js +++ b/src/fs/rename.js @@ -1,5 +1,13 @@ +import { rename as renameFile } from 'node:fs/promises'; +import { fsChecker } from '../common/helpers.js'; +import { fsDescriptors } from '../common/functionDescriptors.js'; + const rename = async () => { - // Write your code here + const { oldFileDescriptor, newFileDescriptor } = fsDescriptors.rename; + await fsChecker(oldFileDescriptor, newFileDescriptor); + const { fullPath: oldFilePath } = oldFileDescriptor; + const { fullPath: newFilePath } = newFileDescriptor; + await renameFile(oldFilePath, newFilePath); }; await rename(); diff --git a/src/hash/calcHash.js b/src/hash/calcHash.js index e37c17ed62..aa56c9198c 100644 --- a/src/hash/calcHash.js +++ b/src/hash/calcHash.js @@ -1,5 +1,21 @@ +import { createHash } from 'node:crypto'; +import { createReadStream } from 'node:fs'; +import { hashDescriptor } from '../common/functionDescriptors.js'; +import { stdout } from 'node:process'; + const calculateHash = async () => { - // Write your code here + const { fullPath, encoding, algorithm } = hashDescriptor; + const hash = createHash(algorithm); + hash.setEncoding(encoding); + const input = createReadStream(fullPath); + input.pipe(hash).pipe(stdout); + input.on('end', () => { + stdout.write('\n'); + }); + input.on('error', (err) => { + console.error(`Error reading file: ${err.message}`); + process.exit(1); + }); }; await calculateHash(); diff --git a/src/modules/cjsToEsm.cjs b/src/modules/cjsToEsm.cjs deleted file mode 100644 index 089bd2db13..0000000000 --- a/src/modules/cjsToEsm.cjs +++ /dev/null @@ -1,34 +0,0 @@ -const path = require('node:path'); -const { release, version } = require('node:os'); -const { createServer: createServerHttp } = require('node:http'); - -require('./files/c.cjs'); - -const random = Math.random(); - -const unknownObject = random > 0.5 ? require('./files/a.json') : require('./files/b.json'); - -console.log(`Release ${release()}`); -console.log(`Version ${version()}`); -console.log(`Path segment separator is "${path.sep}"`); - -console.log(`Path to current file is ${__filename}`); -console.log(`Path to current directory is ${__dirname}`); - -const myServer = createServerHttp((_, res) => { - res.end('Request accepted'); -}); - -const PORT = 3000; - -console.log(unknownObject); - -myServer.listen(PORT, () => { - console.log(`Server is listening on port ${PORT}`); - console.log('To terminate it, use Ctrl+C combination'); -}); - -module.exports = { - unknownObject, - myServer, -}; diff --git a/src/modules/esm.mjs b/src/modules/esm.mjs new file mode 100644 index 0000000000..e0430254f6 --- /dev/null +++ b/src/modules/esm.mjs @@ -0,0 +1,33 @@ +import { sep } from 'node:path'; +import { release, version } from 'node:os'; + +import { createServer as createServerHttp } from 'node:http'; + +import './files/c.cjs'; + +const random = Math.random(); + +export const unknownObject = + random > 0.5 + ? import('./files/a.json', { with: { type: 'json' } }) + : import('./files/b.json', { with: { type: 'json' } }); + +console.log(`Release ${release()}`); +console.log(`Version ${version()}`); +console.log(`Path segment separator is "${sep}"`); + +console.log(`Path to current file is ${import.meta.filename}`); +console.log(`Path to current directory is ${import.meta.dirname}`); + +export const myServer = createServerHttp((_, res) => { + res.end('Request accepted'); +}); + +const PORT = 3000; + +console.log(await unknownObject); + +myServer.listen(PORT, () => { + console.log(`Server is listening on port ${PORT}`); + console.log('To terminate it, use Ctrl+C combination'); +}); diff --git a/src/streams/read.js b/src/streams/read.js index e3938be563..b3f765330e 100644 --- a/src/streams/read.js +++ b/src/streams/read.js @@ -1,5 +1,13 @@ +import { createReadStream } from 'node:fs'; +import { streamsDescriptors } from '../common/functionDescriptors.js'; +import { stdout } from 'node:process'; + const read = async () => { - // Write your code here + const { fullPath, inputEnd } = streamsDescriptors.read; + const input = createReadStream(fullPath); + input.pipe(stdout); + input.on('end', () => stdout.write(inputEnd)); + input.on('error', (err) => console.error(err.message)); }; await read(); diff --git a/src/streams/transform.js b/src/streams/transform.js index 9e6c15fe84..4963e32b65 100644 --- a/src/streams/transform.js +++ b/src/streams/transform.js @@ -1,5 +1,15 @@ +import { stdin, stdout } from 'node:process'; +import { streamsConstants } from '../common/constants.js'; + const transform = async () => { - // Write your code here + const { NEW_LINE } = streamsConstants; + const { ENCODING } = streamsConstants.transform; + stdin.setEncoding(ENCODING); + stdin.on('data', (chunk) => { + const text = chunk.toString().trimEnd(); + const reversedText = text.split('').reverse().join(''); + stdout.write(`${reversedText}${NEW_LINE}`); + }); }; await transform(); diff --git a/src/streams/write.js b/src/streams/write.js index 84aa11e7cb..58a36fa20b 100644 --- a/src/streams/write.js +++ b/src/streams/write.js @@ -1,5 +1,12 @@ +import { stdin } from 'node:process'; +import { createWriteStream } from 'node:fs'; +import { streamsDescriptors } from '../common/functionDescriptors.js'; + const write = async () => { - // Write your code here + const { fullPath } = streamsDescriptors.write; + const output = createWriteStream(fullPath); + stdin.pipe(output); + output.on('error', (err) => console.error(err.message)); }; await write(); diff --git a/src/wt/main.js b/src/wt/main.js index e2ef054d41..61d0485151 100644 --- a/src/wt/main.js +++ b/src/wt/main.js @@ -1,5 +1,21 @@ +import { Worker } from 'node:worker_threads'; +import { wtDescriptors } from '../common/functionDescriptors.js'; + const performCalculations = async () => { - // Write your code here + const { fullPathToWorker, error, success, baseN, workersCount } = wtDescriptors; + const results = []; + for (let i = 0; i < workersCount; i++) { + const result = new Promise((resolve, reject) => { + const worker = new Worker(fullPathToWorker, { workerData: baseN + i }); + worker.on('message', (data) => resolve({ ...success, data })); + worker.on('error', () => reject(error)); + worker.on('exit', (code) => { + if (code !== 0) reject(new Error(`Worker stopped with code ${code}`)); + }); + }); + results.push(await result); + } + console.log(results); }; await performCalculations(); diff --git a/src/wt/worker.js b/src/wt/worker.js index 405595394d..2f5c550689 100644 --- a/src/wt/worker.js +++ b/src/wt/worker.js @@ -1,8 +1,8 @@ -// n should be received from main thread -const nthFibonacci = (n) => n < 2 ? n : nthFibonacci(n - 1) + nthFibonacci(n - 2); +import { parentPort, workerData } from 'node:worker_threads'; + +const nthFibonacci = (n) => (n < 2 ? n : nthFibonacci(n - 1) + nthFibonacci(n - 2)); const sendResult = () => { - // This function sends result of nthFibonacci computations to main thread + parentPort.postMessage(nthFibonacci(workerData)); }; - sendResult(); diff --git a/src/zip/compress.js b/src/zip/compress.js index d55209587e..d9249c459b 100644 --- a/src/zip/compress.js +++ b/src/zip/compress.js @@ -1,5 +1,14 @@ +import { createReadStream, createWriteStream } from 'node:fs'; +import { createGzip } from 'node:zlib'; +import { pipeline } from 'node:stream/promises'; +import { zipDescriptors } from '../common/functionDescriptors.js'; + const compress = async () => { - // Write your code here + const { fullPathToFile, fullPathToArchive } = zipDescriptors.compress; + const input = createReadStream(fullPathToFile); + const gzip = createGzip(); + const output = createWriteStream(fullPathToArchive); + await pipeline(input, gzip, output); }; await compress(); diff --git a/src/zip/decompress.js b/src/zip/decompress.js index 8aaf26c8a4..ae66b6e4e2 100644 --- a/src/zip/decompress.js +++ b/src/zip/decompress.js @@ -1,5 +1,11 @@ +import { createReadStream, createWriteStream } from 'node:fs'; +import { createGunzip } from 'node:zlib'; +import { pipeline } from 'node:stream/promises'; +import { zipDescriptors } from '../common/functionDescriptors.js'; + const decompress = async () => { - // Write your code here + const { fullPathToArchive, fullPathToFile } = zipDescriptors.compress; + await pipeline(createReadStream(fullPathToArchive), createGunzip(), createWriteStream(fullPathToFile)); }; await decompress();