Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions src/commands.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { countHandler } from './commands/count.js';
import { hashHandler } from './commands/hash.js';
import { hashCompareHandler } from './commands/hashCompare.js';
import { csvToJsonHandler } from './commands/csvToJson.js';
import { jsonToCsvHandler } from './commands/jsonToCsv.js';
import { upHandler, cdHandler, lsHandler } from './navigation.js';
import { logStatsHandler } from './commands/logStats.js';
import { encryptHandler } from './commands/encrypt.js';
import { decryptHandler } from './commands/decrypt.js';


export const COMMAND_HANDLERS_MAP = {
up: upHandler,
cd: cdHandler,
ls: lsHandler,
'csv-to-json': csvToJsonHandler,
'json-to-csv': jsonToCsvHandler,
count: countHandler,
hash: hashHandler,
'hash-compare': hashCompareHandler,
'log-stats': logStatsHandler,
encrypt: encryptHandler,
decrypt: decryptHandler,
}
48 changes: 48 additions & 0 deletions src/commands/count.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@

import { createReadStream } from 'node:fs';
import { argParser } from '../utils/argParser.js';
import { LINE_SEPARATOR, WORD_SEPARATOR } from '../constants.js';
import { validateFileExtention } from '../utils/validateFileExtention.js';


const getWordsFromString = (str) => str.split(WORD_SEPARATOR).filter(w => w.length > 0);
const getSum = (arr) => arr.reduce((acc, wc) => acc + wc, 0);

export const countHandler = async (args) => {
const parsedArgs = argParser(args, {
input: { type: 'path', required: true },
});

validateFileExtention(parsedArgs.input, '.txt');

const readableStream = createReadStream(parsedArgs.input, { encoding: 'utf-8' });

let buffer = '';
let linesCount = 0;
let wordsCount = 0;
let charsCount = 0;

for await (const chunk of readableStream) {
buffer += chunk;

const lines = buffer.split(LINE_SEPARATOR);
// Count all symbols in line and single line separator as 1 char
charsCount += getSum(lines.map((line) => line.length)) + (lines.length - 1);
buffer = lines.pop();

linesCount += lines.length;

wordsCount += getSum(lines.map(line => getWordsFromString(line).length));
}

// Add last line, even it's empty
linesCount++;

if (buffer.length) {
wordsCount += getWordsFromString(buffer).length;
}

console.log(`Lines: ${linesCount}`);
console.log(`Words: ${wordsCount}`);
console.log(`Characters: ${charsCount}`);
}
81 changes: 81 additions & 0 deletions src/commands/csvToJson.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
import { Transform } from 'node:stream';
import { fileConfersionHandler } from '../utils/fileConversionHandler.js';
import { LINE_SEPARATOR } from '../constants.js';


const EXTRA_HEADER_NAME = 'Extra';
const INDENT = 2;

const getCsvToJsonTransformSteam = () => {
let transformBuffer = '';
let headers = [];
let isFirstDataRow = true;

const getDataPrefix = () => {
let prefix = `,\n${' '.repeat(INDENT)}`;
if (isFirstDataRow) {
prefix = ' '.repeat(INDENT);
isFirstDataRow = false;
}
return prefix;
}

const transformDataToJsonString = (data) => {
const obj = {};
let unknownHeaderCounter = 1;
data.forEach((d, i) => {
let header;
if (headers[i]) {
header = headers[i];
} else {
header = `${EXTRA_HEADER_NAME}${unknownHeaderCounter}`;
unknownHeaderCounter++;
}
obj[header] = d;
});
return JSON.stringify(obj);
}

return new Transform({
transform(chunk, _, callback) {
transformBuffer += chunk;

const lines = transformBuffer.split(LINE_SEPARATOR);
transformBuffer = lines.pop();

lines.forEach((line) => {
if (!line.trim()) {
return;
}

const parsedLineData = line.split(',');

if (!headers.length) {
headers = parsedLineData.map(h => h.trim());
this.push('[\n');
} else {
this.push(`${getDataPrefix()}${transformDataToJsonString(parsedLineData)}`);
}
});

callback();
},
flush(callback) {
if (!transformBuffer.trim()) {
this.push('\n]');
return callback();
}

const parsedLineData = transformBuffer.split(',');
this.push(`${getDataPrefix()}${transformDataToJsonString(parsedLineData)}\n]\n`);

transformBuffer = '';
callback();
}
});
}

export const csvToJsonHandler = async (args) => {
const transformStream = getCsvToJsonTransformSteam();
return fileConfersionHandler(args, '.csv', '.json', transformStream);
}
53 changes: 53 additions & 0 deletions src/commands/decrypt.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import { createDecipheriv, scrypt } from 'node:crypto';
import { createReadStream, createWriteStream } from 'node:fs';
import { open, stat } from 'node:fs/promises';
import { pipeline } from 'node:stream/promises';
import { promisify } from 'node:util';
import { SALT_SIZE, IV_SIZE, AUTH_TAG_SIZE, KEY_SIZE, ENCRYPTION_ALGORITHM } from '../constants.js';
import { argParser } from '../utils/argParser.js';


const getDecryptEntities = async (inputFilePath, size) => {
const handle = await open(inputFilePath, 'r');

const salt = Buffer.alloc(SALT_SIZE);
const iv = Buffer.alloc(IV_SIZE);
const authTag = Buffer.alloc(AUTH_TAG_SIZE);

try {
await handle.read(salt, 0, SALT_SIZE, 0);
await handle.read(iv, 0, IV_SIZE, SALT_SIZE);
await handle.read(authTag, 0, AUTH_TAG_SIZE, size - AUTH_TAG_SIZE);
} finally {
if (handle) await handle.close();
}

return {
salt,
iv,
authTag,
}
}

export const decryptHandler = async (args) => {
const parsedArgs = argParser(args, {
input: { type: 'path', required: true },
output: { type: 'path', required: true },
password: { type: 'string', required: true },
});

const { size } = await stat(parsedArgs.input);
const { salt, iv, authTag } = await getDecryptEntities(parsedArgs.input, size);

const key = await promisify(scrypt)(parsedArgs.password, salt, KEY_SIZE);
const decipher = createDecipheriv(ENCRYPTION_ALGORITHM, key, iv);
decipher.setAuthTag(authTag);

const input = createReadStream(parsedArgs.input, {
start: SALT_SIZE + IV_SIZE,
end: size - AUTH_TAG_SIZE - 1
});
const output = createWriteStream(parsedArgs.output);

await pipeline(input, decipher, output);
};
32 changes: 32 additions & 0 deletions src/commands/encrypt.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import { createCipheriv, scrypt, randomBytes } from 'node:crypto';
import { createReadStream, createWriteStream } from 'node:fs';
import { pipeline, finished } from 'node:stream/promises';
import { promisify } from 'node:util';
import { ENCRYPTION_ALGORITHM, IV_SIZE, KEY_SIZE, SALT_SIZE } from '../constants.js';
import { argParser } from '../utils/argParser.js'

export const encryptHandler = async (args) => {
const parsedArgs = argParser(args, {
input: { type: 'path', required: true },
output: { type: 'path', required: true },
password: { type: 'string', required: true },
});

const salt = randomBytes(SALT_SIZE);
const iv = randomBytes(IV_SIZE);
const key = await promisify(scrypt)(parsedArgs.password, salt, KEY_SIZE);
const cipher = createCipheriv(ENCRYPTION_ALGORITHM, key, iv);

const output = createWriteStream(parsedArgs.output);

output.write(salt);
output.write(iv);

await pipeline(createReadStream(parsedArgs.input), cipher, output, { end: false });

const authTag = cipher.getAuthTag();
output.write(authTag);
output.end();

await finished(output);
}
34 changes: 34 additions & 0 deletions src/commands/hash.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import { createHash } from 'node:crypto';
import { pipeline } from 'node:stream/promises';
import { Readable } from 'node:stream';
import { createReadStream, createWriteStream } from 'node:fs';
import { argParser } from '../utils/argParser.js';
import { SUPPORTED_ALGORYTHMS } from '../constants.js';


export const hashHandler = async (args) => {
const parsedArgs = argParser(args, {
input: { type: 'path', required: true },
algorithm: { type: 'string', default: 'sha256' },
save: { type: 'boolean', default: false },
});

if (!SUPPORTED_ALGORYTHMS.includes(parsedArgs.algorithm)) {
throw new Error('Algorithm is not supported');
}

const hash = createHash(parsedArgs.algorithm);
const readableStream = createReadStream(parsedArgs.input);

await pipeline(readableStream, hash);

const hashedValue = hash.digest('hex');

if (parsedArgs.save) {
const outputPath = `${parsedArgs.input}.${parsedArgs.algorithm}`;

await pipeline(Readable.from(hashedValue), createWriteStream(outputPath));
}

console.log(`${parsedArgs.algorithm}: ${hashedValue}`);
}
35 changes: 35 additions & 0 deletions src/commands/hashCompare.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import { createHash } from 'node:crypto';
import { pipeline } from 'node:stream/promises';
import { readFile } from 'node:fs/promises';
import { createReadStream } from 'node:fs';
import { argParser } from '../utils/argParser.js';
import { SUPPORTED_ALGORYTHMS } from '../constants.js';
import { validateFileExtention } from '../utils/validateFileExtention.js';


export const hashCompareHandler = async (args) => {
const parsedArgs = argParser(args, {
input: { type: 'path', required: true },
hash: { type: 'path', required: true },
algorithm: { type: 'string', default: 'sha256' },
});

if (!SUPPORTED_ALGORYTHMS.includes(parsedArgs.algorithm)) {
throw new Error('Algorithm is not supported');
}

validateFileExtention(parsedArgs.hash, `.${parsedArgs.algorithm.toLowerCase()}`);

const hash = createHash(parsedArgs.algorithm);
const readableStream = createReadStream(parsedArgs.input);

await pipeline(readableStream, hash);

const hashedValue = hash.digest('hex');

const savedHash = await readFile(parsedArgs.hash, { encoding: 'utf-8' });

const isValid = hashedValue === savedHash.trim().toLowerCase();

console.log(isValid ? 'OK' : ' MISMATCH');
}
39 changes: 39 additions & 0 deletions src/commands/jsonToCsv.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import { Transform } from 'node:stream';
import { fileConfersionHandler } from '../utils/fileConversionHandler.js';


const getJsonToCsvTransformSteam = () => {
let jsonStringBuffer = '';

return new Transform({
transform(chunk, _, callback) {
jsonStringBuffer += chunk;
callback();
},
flush(callback) {
const jsonData = JSON.parse(jsonStringBuffer);
jsonStringBuffer = '';
if (!Array.isArray(jsonData)) {
throw new Error('Invalid json type');
}

const headerSet = new Set();
jsonData.map((row) => Object.keys(row).forEach(h => headerSet.add(h)));
const headers = Array.from(headerSet);

this.push(`${headers.join(',')}\n`)

jsonData.forEach(row => {
const valuesInHeadersOrder = headers.map((header) => row[header] || '');
this.push(`${valuesInHeadersOrder.join(',')}\n`);
})

callback();
}
});
}

export const jsonToCsvHandler = async (args) => {
const transformStream = getJsonToCsvTransformSteam();
return fileConfersionHandler(args, '.json', '.csv', transformStream);
}
Loading