diff --git a/README.md b/README.md index f2be878..45fac40 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,9 @@ -# @jy95/i18n-tools [![codecov](https://codecov.io/gh/jy95/i18n-tools/branch/master/graph/badge.svg?token=PQDE2R2GYR)](https://codecov.io/gh/jy95/i18n-tools) +# @jy95/i18n-tools [![codecov](https://codecov.io/gh/jy95/i18n-tools/branch/master/graph/badge.svg?token=PQDE2R2GYR)](https://codecov.io/gh/jy95/i18n-tools) [![Codacy Badge](https://app.codacy.com/project/badge/Grade/95593519673143d6a1e475c1d2c4332c)](https://www.codacy.com/gh/jy95/i18n-tools) CLI to make common operations around i18n files simpler. -- 👩‍💻 Export i18n files into something else (xlsx, ...) -- ✨ Turn a file (xlsx, ...) to i18n file(s) +- 👩‍💻 Export i18n files into something else (xlsx, csv, ...) +- ✨ Turn a file (xlsx, csv, ...) to i18n file(s) - 📜 Compare at least two i18n files and generate a report - ... diff --git a/package-lock.json b/package-lock.json index 2c8e47e..d33879d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16331,9 +16331,9 @@ } }, "typescript": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.3.5.tgz", - "integrity": "sha512-DqQgihaQ9cUrskJo9kIyW/+g0Vxsk8cDtZ52a3NGh0YNTfpUSArXSohyUGnvbPazEPLu398C0UxmKSOrPumUzA==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.4.3.tgz", + "integrity": "sha512-4xfscpisVgqqDfPaJo5vkd+Qd/ItkoagnHpufr+i2QCHBsNYp+G7UAoyFl8aPtx879u38wPV65rZ8qbGZijalA==", "dev": true }, "uglify-js": { diff --git a/package.json b/package.json index fb09b76..82fbd28 100644 --- a/package.json +++ b/package.json @@ -70,11 +70,11 @@ "@types/yargs": "^17.0.2", "fsify": "^4.0.2", "husky": "^7.0.2", + "semantic-release": "^17.4.7", "size-limit": "^5.0.3", "tsdx": "^0.14.1", "tslib": "^2.3.1", - "typescript": "^4.3.5", - "semantic-release": "^17.4.7" + "typescript": "^4.4.3" }, "dependencies": { "exceljs": "^4.2.1", diff --git a/src/checks/export/index.ts b/src/checks/export/index.ts index c03c9f3..3a54e0c 100644 --- a/src/checks/export/index.ts +++ b/src/checks/export/index.ts @@ -2,4 +2,8 @@ export * from "./export_common_checks"; // check for xlsx sub command -export * as XLSX from "./export_xlsx_checks"; \ No newline at end of file +export * as XLSX from "./export_xlsx_checks"; + +// check for csv sub command +// as it is identical (at that time) to xlsx, simply re-export same module +export * as CSV from "./export_xlsx_checks"; \ No newline at end of file diff --git a/src/checks/import/index.ts b/src/checks/import/index.ts index aac6539..e6ab476 100644 --- a/src/checks/import/index.ts +++ b/src/checks/import/index.ts @@ -2,4 +2,8 @@ export * from "./import_common_checks"; // check for xlsx sub command -export * as XLSX from "./import_xlsx_checks"; \ No newline at end of file +export * as XLSX from "./import_xlsx_checks"; + +// check for csv sub command +// as it is identical (at that time) to xlsx, simply re-export same module +export * as CSV from "./import_xlsx_checks"; \ No newline at end of file diff --git a/src/cmds/export.ts b/src/cmds/export.ts index 4f2fbf9..949c65a 100644 --- a/src/cmds/export.ts +++ b/src/cmds/export.ts @@ -1,5 +1,6 @@ // export command import export_xlsx from './export_cmds/export_xlsx'; +import export_csv from './export_cmds/export_csv'; // named exports export const command = 'export '; @@ -10,6 +11,7 @@ export const builder = function(y: any) { y // commandDir doesn't work very well in Typescript .command(export_xlsx) + .command(export_csv) ); }; /* istanbul ignore next */ diff --git a/src/cmds/export_cmds/export_csv.ts b/src/cmds/export_cmds/export_csv.ts new file mode 100644 index 0000000..345243a --- /dev/null +++ b/src/cmds/export_cmds/export_csv.ts @@ -0,0 +1,139 @@ +// for fs ops +import path from 'path'; +import Excel from 'exceljs'; + +// common fct +import { merge_i18n_files, setUpCommonsOptions } from './export_commons'; +import { parsePathToJSON } from '../../middlewares/middlewares'; + +// checks import +import { resolveChecksInOrder, EXPORT_CHECKS } from '../../checks/index'; + +// For typing +// eslint-disable-next-line +import { Argv } from "yargs"; +import { CSVExportArguments, I18N_Merged_Data } from '../../types/exportTypes'; + +// checks for this command +const CHECKS = [...EXPORT_CHECKS.CHECKS, ...EXPORT_CHECKS.CSV.CHECKS]; + +// named exports +export const command = 'to_csv'; +export const description = 'Export i18n files into a csv file'; + +export const builder = function(y: Argv) { + return ( + setUpCommonsOptions(y) // set up common options for export + .option('columns', { + description: + 'Absolute path to a JSON array of objects, to control the columns. Example : [{ "locale": "FR", "label": "French translation" }]', + demandOption: true, + }) + .option('delimiter', { + description: 'Specify an field delimiter such as | or \\t', + choices: [',', ';', '\t', ' ', '|'], + default: ';', + }) + .option('rowDelimiter', { + description: 'Specify an alternate row delimiter (i.e \\r\\n)', + type: 'string', + default: '\n', + }) + .option('quote', { + description: 'String to quote fields that contain a delimiter', + type: 'string', + default: '"', + }) + .option('escape', { + description: + 'The character to use when escaping a value that is quoted and contains a quote character that is not the end of the field', + type: 'string', + default: '"', + }) + .option('writeBOM', { + description: + 'Set to true if you want the first character written to the stream to be a utf-8 BOM character.', + type: 'boolean', + default: false, + }) + .option('quoteHeaders', { + description: 'If true then all headers will be quoted', + type: 'boolean', + default: true, + }) + // coerce columns into Object + .middleware(parsePathToJSON('columns'), true) + // validations + .check(resolveChecksInOrder(CHECKS)) + ); +}; + +export const handler = async function(argv: CSVExportArguments) { + try { + let data: I18N_Merged_Data = await merge_i18n_files(argv); + const CSV_FILE = path.resolve(argv.outputDir, argv.filename + '.csv'); + await export_as_csv(CSV_FILE, argv, data); + console.log(`${CSV_FILE} successfully written`); + return Promise.resolve(undefined); + } catch (/* istanbul ignore next */ err) { + return Promise.reject(err); + } +}; + +// write +async function export_as_csv( + CSV_FILE: string, + argv: CSVExportArguments, + data: I18N_Merged_Data +) { + console.log('Preparing CSV file ...'); + + // prepare data + const workbook = new Excel.Workbook(); + let worksheet = workbook.addWorksheet(); + + // Set up columns + worksheet.columns = [ + { header: 'Technical Key', key: 'technical_key' }, + ].concat( + argv.columns.map(({ label, locale }) => ({ + header: label, + key: `labels.${locale}`, + })) + ); + + // workaround as Exceljs doesn't support nested key + worksheet.addRows( + data.map(item => + argv.columns.reduce( + (acc: { [x: string]: string }, { locale }) => { + acc[`labels.${locale}`] = item['labels'][locale] || ''; + return acc; + }, + { technical_key: item['technical_key'] } + ) + ) + ); + + // finally write this file + const options = { + // https://c2fo.io/fast-csv/docs/formatting/options + formatterOptions: { + delimiter: argv.delimiter, + rowDelimiter: argv.rowDelimiter, + quote: argv.quote, + escape: argv.escape, + writeBOM: argv.writeBOM, + quoteHeaders: argv.quoteHeaders, + }, + }; + return workbook.csv.writeFile(CSV_FILE, options); +} + +// default export +export default { + command: command, + description: description, + builder: builder, + handler: handler, +}; diff --git a/src/cmds/import.ts b/src/cmds/import.ts index be37517..e74dc98 100644 --- a/src/cmds/import.ts +++ b/src/cmds/import.ts @@ -1,5 +1,6 @@ // import command import import_xlsx from './import_cmds/import_xlsx'; +import import_csv from './import_cmds/import_csv'; // named exports export const command = 'import '; @@ -10,6 +11,7 @@ export const builder = function(y: any) { y // commandDir doesn't work very well in Typescript .command(import_xlsx) + .command(import_csv) ); }; diff --git a/src/cmds/import_cmds/import_csv.ts b/src/cmds/import_cmds/import_csv.ts new file mode 100644 index 0000000..50c8300 --- /dev/null +++ b/src/cmds/import_cmds/import_csv.ts @@ -0,0 +1,141 @@ +import Excel from 'exceljs'; + +// common fct +import { setUpCommonsOptions, generate_i18n_filepaths, extractedTranslations_to_i18n_files } from "./import_commons"; +import { parsePathToJSON } from "../../middlewares/middlewares"; + +// lodash methods +import flattenDeep from "lodash/flattenDeep"; + +// checks import +import { + resolveChecksInOrder, + IMPORT_CHECKS +} from "../../checks/index"; + +// For typing +// eslint-disable-next-line +import type { Argv } from "yargs"; +import { CSVImportArguments } from "../../types/importTypes"; + +// checks for this command +const CHECKS = [...IMPORT_CHECKS.CHECKS, ...IMPORT_CHECKS.CSV.CHECKS]; + +// named exports +export const command = "from_csv"; +export const description = "Turn a csv file to i18n file(s)"; + +export const builder = function (y : Argv) { + return setUpCommonsOptions(y) // set up common options for import + .options("columns", { + describe: "Absolute path to a JSON object that describe headers of the excel columns used to store translations", + demandOption: true + }) + .option('delimiter', { + description: 'Specify an field delimiter such as | or \\t', + choices: [',', ';', '\t', ' ', '|'], + default: ';', + }) + .option('quote', { + description: 'String used to quote fields that contain a delimiter', + type: 'string', + default: '"', + }) + .option('escape', { + description: + 'The character used when escaping a value that is quoted and contains a quote character that is not the end of the field', + type: 'string', + default: '"', + }) + .option('encoding', { + description: "Input file encoding", + choices: ['utf8', 'utf16le', 'latin1'], + default: 'utf8' + }) + // coerce columns into Object + .middleware(parsePathToJSON("columns"), true) + // validations + .check(resolveChecksInOrder(CHECKS)) +} + +export const handler = async function (argv : CSVImportArguments) { + try { + const translations = await csv_2_translation_objects(argv); + const files = generate_i18n_filepaths(argv); + await extractedTranslations_to_i18n_files(files, translations); + console.log("Successfully exported found locale(s) to i18n json file(s)"); + return Promise.resolve(undefined); + } catch (error) { + return Promise.reject(error); + } +} + +// Extract translations from csv file +async function csv_2_translation_objects(argv : CSVImportArguments) { + const options = { + // https://c2fo.io/fast-csv/docs/parsing/options + parserOptions: { + delimiter: argv.delimiter, + quote: argv.quote, + escape: argv.escape, + encoding: argv.encoding + } + }; + const workbook = new Excel.Workbook(); + const worksheet = await workbook.csv.readFile(argv.input, options); + let rowCount = worksheet.rowCount; + + // columns properties to load + let columns = argv.columns; + + // retrieve the headers of the table + // Warning : Exceljs put for some reason a undefined value at the 0 index + let headers = worksheet.getRow(1).values as (undefined | string)[]; + // retrieve data of the table + let data = (worksheet.getRows(2, rowCount-1) || /* istanbul ignore next */ []).map(item => item.values); + + // find out where the technical key is + const technical_key_index = headers.findIndex(h => (h || '').includes(columns.technical_key)); + + if (technical_key_index === -1) { + return Promise.reject(new Error("Couldn't find index for technical_key with provided label")); + } + + // find out where the translations are positioned in the value + const locales_index = Object + .entries(columns.locales) + .map( ([key, value]) => ({ [key]: headers.findIndex(h => (h || '').includes(value)) })) + .reduce( (prev, curr) => Object.assign(prev, curr), {}) + + // Warn users if some locale translations couldn't be found + let missing_indexes = Object + .entries(locales_index) + .filter( ([_, idx]) => idx === -1); + + for(let [locale, ] of missing_indexes) { + /* istanbul ignore next Not worthy to create a test case for that*/ + console.warn(`Couldn't find index for ${locale} locale with provided label`) + } + + // build results + let results = data.map( + (row : any) => Object + .entries(locales_index) + // skip translation(s) where index couldn't be found + .filter( ([_, idx]) => idx !== -1) + .map( ([locale, localeIndex]) => ({ + "technical_key": row[technical_key_index], + "label": row[localeIndex], + "locale": locale + })) + ) + return Promise.resolve(flattenDeep(results)); +} + +// default export +export default { + command : command, + description: description, + builder : builder, + handler: handler +} \ No newline at end of file diff --git a/src/cmds/import_cmds/import_xlsx.ts b/src/cmds/import_cmds/import_xlsx.ts index 1ec917f..25ccd48 100644 --- a/src/cmds/import_cmds/import_xlsx.ts +++ b/src/cmds/import_cmds/import_xlsx.ts @@ -1,4 +1,3 @@ - import Excel from 'exceljs'; // common fct @@ -17,7 +16,7 @@ import { // For typing // eslint-disable-next-line import type { Argv } from "yargs"; -import { XLSXExportArguments } from "../../types/importTypes"; +import { XLSXImportArguments } from "../../types/importTypes"; // checks for this command const CHECKS = [...IMPORT_CHECKS.CHECKS, ...IMPORT_CHECKS.XLSX.CHECKS]; @@ -38,7 +37,7 @@ export const builder = function (y : Argv) { .check(resolveChecksInOrder(CHECKS)) } -export const handler = async function (argv : XLSXExportArguments) { +export const handler = async function (argv : XLSXImportArguments) { try { const translations = await xlsx_2_translation_objects(argv); const files = generate_i18n_filepaths(argv); @@ -51,7 +50,7 @@ export const handler = async function (argv : XLSXExportArguments) { } // Extract translations from xlsx file -async function xlsx_2_translation_objects(argv : XLSXExportArguments) { +async function xlsx_2_translation_objects(argv : XLSXImportArguments) { let workbook = await new Excel.Workbook() .xlsx .readFile(argv.input); diff --git a/src/types/exportTypes.ts b/src/types/exportTypes.ts index 571ce07..77a6eb5 100644 --- a/src/types/exportTypes.ts +++ b/src/types/exportTypes.ts @@ -18,6 +18,19 @@ export interface XLSXExportArguments extends CommonExportArguments { worksheetName: string; worksheetCustomizer?: string; } +// Yargs export arguments for TO_CSV command +export interface CSVExportArguments extends CommonExportArguments { + columns: { + locale: string; + label: string; + }[]; + delimiter: ',' | ';' | '\t' | ' ' | '|'; + rowDelimiter: string; + quote: '"' | string; + escape: '"' | string; + writeBOM: boolean; + quoteHeaders: boolean; +} // Result after extract of multiple i18n files export type I18N_Merged_Data = { diff --git a/src/types/importTypes.ts b/src/types/importTypes.ts index 6fcda22..7d0474e 100644 --- a/src/types/importTypes.ts +++ b/src/types/importTypes.ts @@ -10,7 +10,7 @@ export interface CommonImportArguments extends Argv { } // Yargs import arguments for FROM_XLSX command -export interface XLSXExportArguments extends CommonImportArguments { +export interface XLSXImportArguments extends CommonImportArguments { columns: { technical_key: string; locales: { @@ -19,6 +19,20 @@ export interface XLSXExportArguments extends CommonImportArguments { }; } +// Yargs import arguments for FROM_CSV command +export interface CSVImportArguments extends CommonImportArguments { + columns: { + technical_key: string; + locales: { + [locale: string]: string; + }; + }; + delimiter: ',' | ';' | '\t' | ' ' | '|'; + quote: '"' | string; + escape: '"' | string; + encoding: 'utf8' | 'utf16le' | 'latin1' +} + // Result after extract of input file export interface extractedTranslation { technical_key: string; diff --git a/test/export/export-csv.test.ts b/test/export/export-csv.test.ts new file mode 100644 index 0000000..9ce9798 --- /dev/null +++ b/test/export/export-csv.test.ts @@ -0,0 +1,499 @@ +import os from 'os'; +import path from 'path'; +import yargs from 'yargs'; +// export command +import { + command, + description as describeText, + builder, +} from '../../src/cmds/export'; +// CSV description +import { description as csv_description } from '../../src/cmds/export_cmds/export_csv'; + +// temp folder +const TEMP_FOLDER = os.tmpdir(); +// test folders constants +const ROOT_TEST_FOLDER = 'tests-for-export-csv'; +const [VALID_TEST_FOLDER, USELESS_TEST_FOLDER] = [ + 'correct', // folder where every file are correct + 'useless', // folder where file has an useless content ([]) +]; + +// initialise fsify +const fsify: { + [x: string]: any; + DIRECTORY: any; + FILE: any; + (_: { [x: string]: any }): Promise; +} = require('fsify')({ + cwd: TEMP_FOLDER, + persistent: false, + force: true, +}); + +// Translations keys for test +const TRANSLATIONS_KEYS = ['FR', 'NL', 'DE']; +const KEYS_LABEL: { [key: string]: string } = { + FR: 'French', + NL: 'Dutch', + DE: 'German', +}; +const locale_label = (locale: string) => `${KEYS_LABEL[locale]} translation`; + +// i18n example +const generate_i18n = (locale: string) => ({ + commons: { + myNestedKey: `Hello world ${locale}`, + myNestedArray: ['1', '2', '3'].map(item => `${item} ${locale}`) + }, + array: ['1', '2', '3'].map(item => `${item} ${locale}`), + simpleKey: `[${locale}] not setted key`, + "Key with spaces": [ {"test": "42 is the answer"} ], + "Missing key in DE": (locale !== TRANSLATIONS_KEYS[2]) ? "present" : undefined +}); + +// Export files +const generate_files = ( + locales: string[], + fnMapper: (locale: string) => string +) => + locales.reduce((acc: { [x: string]: string }, locale: string) => { + acc[locale] = fnMapper(locale); + return acc; + }, {}); + +// Export columns +const EXPORT_COLUMNS = (locales: string[]) => + locales.map(locale => ({ + locale, + label: locale_label(locale), + })); + +// flat operation +const flat = (arr: any[]) => [].concat(...arr); + +// type for fsify structure +type fsify_structure = { + type: any; + name: string; + contents: string | fsify_structure; +}[]; + +// to access easier the paths of test file paths +const test_files_list = [ + // correct files + 'columns.json', + 'files.json', + 'settings1.json', + 'settings2.json', + // wrong files + 'emptyObject.json', + 'emptyArray.json', + 'files-duplicatedValues.json', + 'columns-missingLabelProp.json', + 'columns-wrongPropValue.json', + 'columns-duplicatedValues.json', + 'columns-missingKey.json', +] as const; +const [ + TEST_FILE_EXPORT_COLUMNS, + TEST_FILE_FILES, + TEST_FILE_SETTINGS1, + TEST_FILE_SETTINGS2, + TEST_FILE_EMPTY_OBJECT, + TEST_FILE_EMPTY_ARRAY, + TEST_FILE_FILES_DUP, + TEST_FILE_EXPORT_COLUMNS_MISS_PROP, + TEST_FILE_EXPORT_COLUMNS_WRONG_PROP, + TEST_FILE_EXPORT_COLUMNS_DUP_VALS, + TEST_FILE_EXPORT_COLUMNS_MISS_KEY, +] = test_files_list; +type test_files_type = typeof test_files_list[number]; + +// file structure for fsify, in order to run the tests +const structure: fsify_structure = [ + { + type: fsify.DIRECTORY, + name: ROOT_TEST_FOLDER, + contents: [ + // In this folder, everything in correct + { + type: fsify.DIRECTORY, + name: VALID_TEST_FOLDER, + contents: flat([ + // 3 i18n files + TRANSLATIONS_KEYS.map(locale => ({ + type: fsify.FILE, + name: `${locale.toLowerCase()}.json`, + contents: JSON.stringify(generate_i18n(locale)), + })), + // the columns.json + { + type: fsify.FILE, + name: TEST_FILE_EXPORT_COLUMNS, + contents: JSON.stringify(EXPORT_COLUMNS(TRANSLATIONS_KEYS)), + }, + // the files.json + { + type: fsify.FILE, + name: TEST_FILE_FILES, + contents: JSON.stringify( + generate_files(TRANSLATIONS_KEYS, locale => + path.resolve( + TEMP_FOLDER, + ROOT_TEST_FOLDER, + VALID_TEST_FOLDER, + `${locale.toLowerCase()}.json` + ) + ) + ), + }, + // First format of settings.json (Path) + { + type: fsify.FILE, + name: TEST_FILE_SETTINGS1, + contents: JSON.stringify({ + files: path.resolve( + TEMP_FOLDER, + ROOT_TEST_FOLDER, + VALID_TEST_FOLDER, + TEST_FILE_FILES + ), + columns: path.resolve( + TEMP_FOLDER, + ROOT_TEST_FOLDER, + VALID_TEST_FOLDER, + TEST_FILE_EXPORT_COLUMNS + ), + filename: 'settings1-output', + outputDir: TEMP_FOLDER, + }), + }, + // Second format of settings.json (Object/Array instead of Paths) + { + type: fsify.FILE, + name: TEST_FILE_SETTINGS2, + contents: JSON.stringify({ + files: generate_files(TRANSLATIONS_KEYS, locale => + path.resolve( + TEMP_FOLDER, + ROOT_TEST_FOLDER, + VALID_TEST_FOLDER, + `${locale.toLowerCase()}.json` + ) + ), + columns: EXPORT_COLUMNS(TRANSLATIONS_KEYS), + filename: 'settings2-output', + outputDir: TEMP_FOLDER, + }), + }, + ]), + }, + // In this folder, files used for validations + { + type: fsify.DIRECTORY, + name: USELESS_TEST_FOLDER, + contents: [ + // An empty object + { + type: fsify.FILE, + name: TEST_FILE_EMPTY_OBJECT, + contents: JSON.stringify({}), + }, + // An empty array + { + type: fsify.FILE, + name: TEST_FILE_EMPTY_ARRAY, + contents: JSON.stringify([]), + }, + // files.json with duplicated values + { + type: fsify.FILE, + name: TEST_FILE_FILES_DUP, + contents: JSON.stringify( + generate_files(TRANSLATIONS_KEYS, _ => + path.resolve( + TEMP_FOLDER, + ROOT_TEST_FOLDER, + VALID_TEST_FOLDER, + `${TRANSLATIONS_KEYS[0].toLowerCase()}.json` + ) + ) + ), + }, + // columns.json with missing property (label) + { + type: fsify.FILE, + name: TEST_FILE_EXPORT_COLUMNS_MISS_PROP, + contents: JSON.stringify([{ locale: 'FR' }]), + }, + // columns.json with wrong property type + { + type: fsify.FILE, + name: TEST_FILE_EXPORT_COLUMNS_WRONG_PROP, + contents: JSON.stringify([{ locale: 'FR', label: 42 }]), + }, + // columns.json with duplicated value + { + type: fsify.FILE, + name: TEST_FILE_EXPORT_COLUMNS_DUP_VALS, + contents: JSON.stringify([ + { locale: 'FR', label: 'Hello World' }, + { locale: 'NL', label: 'Hello World' }, + ]), + }, + // columns.json with missing key for files.json + { + type: fsify.FILE, + name: TEST_FILE_EXPORT_COLUMNS_MISS_KEY, + contents: JSON.stringify([ + { locale: 'FR', label: 'French translation' }, + ]), + }, + ], + }, + ], + }, +]; + +// files path +const TEST_FILES: { [x in test_files_type]: string } = test_files_list.reduce( + (acc: any, curr: test_files_type, idx: number) => { + // improvement for later : handle generically nested stuff + let arr = [ + TEMP_FOLDER, + ROOT_TEST_FOLDER, + idx < 4 ? VALID_TEST_FOLDER : USELESS_TEST_FOLDER, + curr, + ]; + acc[curr] = path.resolve(...arr); + return acc; + }, + {} +); + +beforeAll(() => { + // write temporary files + return fsify(structure); +}); + +// Build the parser used for that command +const parser = yargs.command(command, describeText, builder).help(); + +// return the output of a given command to the parser +function fetchOutput(cmd: string): Promise { + return new Promise(resolve => { + parser.parse(cmd, (_err: Error | undefined, _argv: any, output: string) => { + resolve(output); + }); + }); +} + +// makes assertions on errors +async function expectError(cmd: string, ...messages: string[]) { + // error to be retrieve + let error: any = undefined; + // In tests, I had to make sure yargs doesn't override error for the following reason : + // Even when validation failed, it somehow can go to handler() + let isFirstError = true; + + // add fail() handler + // Because of problem explained above, I had to ignore if an error occurs afterwards + try { + await parser + .fail((_, e) => { + if (isFirstError) { + isFirstError = false; + error = e; + } + }) + .parseAsync(cmd); + } catch (_) {} + // check if error was set + expect(error).not.toEqual(undefined); + // check if it is an error Object + expect(error).toHaveProperty('message'); + // check if error message contains expected element + for (let expectedStr of messages) { + expect((error as Error).message).toMatch(expectedStr); + } +} + +// to concat faster command +type concat_cmd_type = (args: string[]) => string; +type prepare_mandatory_args_type = (...args: string[]) => string[]; +const concat_cmd: concat_cmd_type = (args: string[]) => + `export to_csv ${args.join(' ')}`; +const prepare_mandatory_args: prepare_mandatory_args_type = ( + ...args: string[] +) => ['--files', `"${args[0]}"`, '--columns', `"${args[1]}"`]; + +// test scenarios for validations +const VALIDATIONS_SCENARIOS : [ + string, + string[], + ...string[] +][] = [ + [ + // Test out the message : "Error: test.csv has an extension : Remove it please" + 'Filename with extension should be rejected', + [ + TEST_FILE_FILES, + TEST_FILE_EXPORT_COLUMNS, + '--filename', + `"test.csv"`, + ], + 'test.csv', + 'extension', + ], + [ + // Test out the message : "Option files is not a JSON Object" + 'Option files - unexpected file should be rejected', + [TEST_FILE_EMPTY_ARRAY, TEST_FILE_EXPORT_COLUMNS], + 'not a JSON Object' + ], + [ + // Test out the message : "Option files should have at least one entry" + 'Option files - empty object should be rejected', + [TEST_FILE_EMPTY_OBJECT, TEST_FILE_EXPORT_COLUMNS], + 'at least one entry', + ], + [ + // Test out the message : "At least a duplicated value in files JSON object was detected" + 'Option files - Duplicated values should be rejected', + [TEST_FILE_FILES_DUP, TEST_FILE_EXPORT_COLUMNS], + 'duplicated value', + ], + [ + // Test out the message : "columns is not a JSON Array" + 'Option columns - unexpected file should be rejected', + [TEST_FILE_FILES, TEST_FILE_EMPTY_OBJECT], + 'not a JSON Array', + ], + [ + // Test out the message : "Option columns should have at least one entry" + 'Option columns - empty array should be rejected', + [TEST_FILE_FILES, TEST_FILE_EMPTY_ARRAY], + 'at least one entry', + ], + [ + // Test out the message : `At least one item in columns array doesn't have "${prop}" property` + 'Option columns - missing property in array should be rejected', + [TEST_FILE_FILES, TEST_FILE_EXPORT_COLUMNS_MISS_PROP], + "doesn't have", + 'property', + ], + [ + // Test out the message : `At least one item in columns array doesn't have "${prop}" property with a String value` + 'Option columns - unexpected property type should be rejected', + [TEST_FILE_FILES, TEST_FILE_EXPORT_COLUMNS_WRONG_PROP], + "doesn't have", + 'property with a String value', + ], + [ + // Test out the message : `At least a duplicated value in columns array in prop "${prop}" was detected` + 'Option columns - duplicated value should be rejected', + [TEST_FILE_FILES, TEST_FILE_EXPORT_COLUMNS_DUP_VALS], + 'duplicated value', + ], + [ + // Test out the message : 'At least one key differs between files and columns options' + 'Options files & columns - incompatibles files should be rejected', + [TEST_FILE_FILES, TEST_FILE_EXPORT_COLUMNS_MISS_KEY], + 'between files and columns', + ] +]; + +describe('[export_csv command]', () => { + describe('Check command availability', () => { + it('Should list to_csv in export command', async () => { + const output = await fetchOutput('export --help'); + expect(output).toMatch('to_csv'); + }); + + it('Should display to_csv help output', async () => { + const output = await fetchOutput('export to_csv --help'); + expect(output).toMatch(csv_description); + }); + }); + + describe('Validations', () => { + // mock console.log + let consoleLog: any; + beforeAll(() => { + consoleLog = jest.spyOn(console, 'log').mockImplementation(); + }); + + // restore console.log + afterAll(() => { + if (consoleLog !== undefined) { + consoleLog.mockRestore(); + } + }); + + test.each(VALIDATIONS_SCENARIOS)('%s', async (_title: string, args: string[], ...messages: string[]) => { + let [files, columns, ...otherArgs] = args; + let test_cmd = concat_cmd([ + // mandatory args + ...prepare_mandatory_args( + TEST_FILES[files as test_files_type], + TEST_FILES[columns as test_files_type] + ), + // optional args + ...otherArgs, + ]); + //console.warn(test_cmd); + // Test out if error message is thrown + await expectError(test_cmd, ...messages); + }); + }); + + + describe('E2E successful scenarios', () => { + // mock console.log + let consoleLog: any; + beforeAll(() => { + consoleLog = jest.spyOn(console, 'log').mockImplementation(); + }); + + // clear mock after each call + afterEach(() => { + consoleLog.mockClear(); + }); + + // reenable console.log + afterAll(() => { + // restore console.log + if (consoleLog !== undefined) { + consoleLog.mockRestore(); + } + }); + + test.each([ + ['(Paths)', TEST_FILE_SETTINGS1], + ['(Object/Array instead of Paths)', TEST_FILE_SETTINGS2] + ])( + 'settings.json %s', + async (_title: string, settingsFile: test_files_type) => { + let test_cmd = concat_cmd([ + '--settings', + `"${TEST_FILES[settingsFile]}"`, + ]); + // example : 'settings1-output' + let expectedFile = path.resolve( + TEMP_FOLDER, + `${settingsFile.substring(0, settingsFile.length - 5)}-output.csv` + ); + // run command + //console.warn(test_cmd); + await parser.parseAsync(test_cmd); + + expect(consoleLog).toHaveBeenCalledWith('Preparing CSV file ...'); + expect(consoleLog).toHaveBeenCalledWith( + `${expectedFile} successfully written` + ); + } + ); + }); + +}); \ No newline at end of file diff --git a/test/export-xlsx.test.ts b/test/export/export-xlsx.test.ts similarity index 98% rename from test/export-xlsx.test.ts rename to test/export/export-xlsx.test.ts index 0a89a39..ed9ece7 100644 --- a/test/export-xlsx.test.ts +++ b/test/export/export-xlsx.test.ts @@ -6,14 +6,14 @@ import { command, description as describeText, builder, -} from '../src/cmds/export'; +} from '../../src/cmds/export'; // XLSX description -import { description as xlsx_description } from '../src/cmds/export_cmds/export_xlsx'; +import { description as xlsx_description } from '../../src/cmds/export_cmds/export_xlsx'; // temp folder const TEMP_FOLDER = os.tmpdir(); // test folders constants -const ROOT_TEST_FOLDER = 'tests-for-export'; +const ROOT_TEST_FOLDER = 'tests-for-export-xlsx'; const [VALID_TEST_FOLDER, USELESS_TEST_FOLDER] = [ 'correct', // folder where every file are correct 'useless', // folder where file has an useless content ([]) @@ -215,6 +215,7 @@ const structure: fsify_structure = [ ), worksheetCustomizer: path.resolve( __dirname, + "..", "fixtures/export-xlsx", "worksheetCustomizer-dynamic.js" ), diff --git a/test/fixtures/import-csv/export-csv.csv b/test/fixtures/import-csv/export-csv.csv new file mode 100644 index 0000000..5c9b37c --- /dev/null +++ b/test/fixtures/import-csv/export-csv.csv @@ -0,0 +1,11 @@ +Technical Key;French translation;Dutch translation;German translation +Key with spaces[0].test;42 is the answer;42 is the answer;42 is the answer +Missing key in DE;present;present; +array[0];1 FR;1 NL;1 DE +array[1];2 FR;2 NL;2 DE +array[2];3 FR;3 NL;3 DE +commons.myNestedArray[0];1 FR;1 NL;1 DE +commons.myNestedArray[1];2 FR;2 NL;2 DE +commons.myNestedArray[2];3 FR;3 NL;3 DE +commons.myNestedKey;Hello world FR;Hello world NL;Hello world DE +simpleKey;[FR] not setted key;[NL] not setted key;[DE] not setted key \ No newline at end of file diff --git a/test/import/import-csv.test.ts b/test/import/import-csv.test.ts new file mode 100644 index 0000000..2d648a3 --- /dev/null +++ b/test/import/import-csv.test.ts @@ -0,0 +1,390 @@ +import os from 'os'; +import path from 'path'; +import yargs from 'yargs'; +// import command +import { + command, + description as describeText, + builder, +} from '../../src/cmds/import'; +// CSV description +import { description as csv_description } from '../../src/cmds/import_cmds/import_csv'; + +// temp folder +const TEMP_FOLDER = os.tmpdir(); +// test folders constants +const ROOT_TEST_FOLDER = 'tests-for-import-csv'; +const [VALID_TEST_FOLDER, USELESS_TEST_FOLDER] = [ + 'correct', // folder where every file are correct + 'useless', // folder where file has an useless content +]; + +// initialise fsify +const fsify: { + [x: string]: any; + DIRECTORY: any; + FILE: any; + (_: { [x: string]: any }): Promise; +} = require('fsify')({ + cwd: TEMP_FOLDER, + persistent: false, + force: true, +}); + +// Build the parser used for that command +const parser = yargs.command(command, describeText, builder).help(); + +// to concat faster command +type concat_cmd_type = (args: string[]) => string; +type prepare_mandatory_args_type = (...args: [string, string, string[]]) => string[]; +const concat_cmd: concat_cmd_type = (args: string[]) => + `import from_csv ${args.join(' ')}`; +const prepare_mandatory_args: prepare_mandatory_args_type = ( + ...[input, columns, ...locales] +) => ['--input', `"${input}"`, '--columns', `"${columns}"`, '--locales', locales.join(' ')]; + +// return the output of a given command to the parser +function fetchOutput(cmd: string): Promise { + return new Promise(resolve => { + parser.parse(cmd, (_err: Error | undefined, _argv: any, output: string) => { + resolve(output); + }); + }); +} + +// makes assertions on errors +async function expectError(cmd: string, ...messages: string[]) { + // error to be retrieve + let error: any = undefined; + // In tests, I had to make sure yargs doesn't override error for the following reason : + // Even when validation failed, it somehow can go to handler() + let isFirstError = true; + + // add fail() handler + // Because of problem explained above, I had to ignore if an error occurs afterwards + try { + await parser + .fail((_, e) => { + if (isFirstError) { + isFirstError = false; + error = e; + } + }) + .parseAsync(cmd); + } catch (_) {} + // check if error was set + expect(error).not.toEqual(undefined); + // check if it is an error Object + expect(error).toHaveProperty('message'); + // check if error message contains expected element + for (let expectedStr of messages) { + expect((error as Error).message).toMatch(expectedStr); + } +} + +// to access easier the paths of test file paths +const test_files_list = [ + // inpput file + 'export-csv.csv', + // correct files + 'columns.json', + 'settings1.json', + 'settings2.json', + // wrong files + 'emptyObject.json', + 'emptyArray.json', + // wrong columns.json + 'columns-technicalKeyNotString.json', + 'columns-localesNotAObject.json', + 'columns-localesValuesNotString.json' +] as const; +const [ + TEST_FILE_INPUT, + TEST_FILE_COLUMNS, + TEST_FILE_SETTINGS1, + TEST_FILE_SETTINGS2, + TEST_FILE_EMPTY_OBJECT, + TEST_FILE_EMPTY_ARRAY, + TEST_FILE_COLUMNS_TKNS, + TEST_FILE_COLUMNS_LNAO, + TEST_FILE_COLUMNS_LVNS +] = test_files_list; +type test_files_type = typeof test_files_list[number]; + +// files path +const TEST_FILES: { [x in test_files_type]: string } = test_files_list.reduce( + (acc: any, curr: test_files_type, idx: number) => { + let arr = + (idx === 0) + ? [ + __dirname, + "..", + "fixtures", + "import-csv", + curr, + ] + : [ + TEMP_FOLDER, + ROOT_TEST_FOLDER, + (idx > 0 && idx < 4) ? VALID_TEST_FOLDER : USELESS_TEST_FOLDER, + curr, + ] + ; + acc[curr] = path.resolve(...arr); + return acc; + }, + {} +); + +// test scenarios for validations +const VALIDATIONS_SCENARIOS : [ + string, + [test_files_type, test_files_type, string[], ...string[]], + ...string[] +][] = [ + [ + // Test out the message : "locales options doesn't contain uniq values" + 'Option locales - Duplicated values should be rejected', + [TEST_FILE_INPUT, TEST_FILE_COLUMNS, ["FR", "FR"]], + // I have to disable the error message check as yargs is buggy atm + //"doesn't contain uniq values" + ], + [ + // Test out the message : 'columns is not a JSON Object' + 'Option columns - unexpected file should be rejected', + [TEST_FILE_INPUT, TEST_FILE_EMPTY_ARRAY, ["FR", "NL"]], + "columns is not a JSON Object" + ], + [ + // Test out the message : `${missingProp} couldn't be found in columns object` + 'Option columns - missing property should be rejected', + [TEST_FILE_INPUT, TEST_FILE_EMPTY_OBJECT, ["FR", "NL"]], + "couldn't be found in columns object" + ], + [ + // Test out the message : "technical_key in columns object isn't a String" + 'Option columns - unexpected technical_key value should be reject', + [TEST_FILE_INPUT, TEST_FILE_COLUMNS_TKNS, ["FR", "NL"]], + "technical_key in columns object isn't a String" + ], + [ + // Test out the message : "locales key in columns object is not a JSON Object", + 'Option columns - unexpected locales value should be rejected', + [TEST_FILE_INPUT, TEST_FILE_COLUMNS_LNAO, ["FR", "NL"]], + "locales key in columns object is not a JSON Object" + ], + [ + // Test out the message : "At least one value for locales key in columns object isn't a string" + 'Option columns - unexpected value(s) for locales should be rejected', + [TEST_FILE_INPUT, TEST_FILE_COLUMNS_LVNS, ["FR", "NL"]] + ] +]; + +// file structure for fsify, in order to run the tests +// type for fsify structure +type fsify_structure = { + type: any; + name: string; + contents: string | fsify_structure; +}[]; + +// file structure for fsify, in order to run the tests +const structure: fsify_structure = [ + { + type: fsify.DIRECTORY, + name: ROOT_TEST_FOLDER, + contents: [ + // In this folder, everything in correct + { + type: fsify.DIRECTORY, + name: VALID_TEST_FOLDER, + contents: [ + // columns file + { + type: fsify.FILE, + name: TEST_FILE_COLUMNS, + contents: JSON.stringify({ + technical_key: "Technical Key", + locales: { + "FR": "French translation", + "NL": "Dutch translation", + "DE": "German translation" + } + }) + }, + // First format of settings.json (Path) + { + type: fsify.FILE, + name: TEST_FILE_SETTINGS1, + contents: JSON.stringify({ + input: TEST_FILES[TEST_FILE_INPUT], + columns: TEST_FILES[TEST_FILE_COLUMNS], + locales: ["FR", "NL", "DE"], + outputDir: path.resolve(TEMP_FOLDER, ROOT_TEST_FOLDER), + suffix: "_settings1" + }) + }, + // Second format of settings.json (Object/Array instead of Paths) + { + type: fsify.FILE, + name: TEST_FILE_SETTINGS2, + contents: JSON.stringify({ + input: TEST_FILES[TEST_FILE_INPUT], + columns: { + technical_key: "Technical Key", + locales: { + "FR": "French translation", + "NL": "Dutch translation", + "DE": "German translation" + } + }, + locales: ["FR", "NL", "DE"], + outputDir: path.resolve(TEMP_FOLDER, ROOT_TEST_FOLDER), + suffix: "_settings2" + }) + }, + ] + }, + // In this folder, files used for validations + { + type: fsify.DIRECTORY, + name: USELESS_TEST_FOLDER, + contents: [ + // An empty object + { + type: fsify.FILE, + name: TEST_FILE_EMPTY_OBJECT, + contents: JSON.stringify({}), + }, + // An empty array + { + type: fsify.FILE, + name: TEST_FILE_EMPTY_ARRAY, + contents: JSON.stringify([]), + }, + // columns option - technical_key not a string + { + type: fsify.FILE, + name: TEST_FILE_COLUMNS_TKNS, + contents: JSON.stringify({ + technical_key: 42.0, + locales: {} + }) + }, + // columns option - locales not a object + { + type: fsify.FILE, + name: TEST_FILE_COLUMNS_LNAO, + contents: JSON.stringify({ + technical_key: "something", + locales: [] + }) + }, + // columns option - locales values not string + { + type: fsify.FILE, + name: TEST_FILE_COLUMNS_LVNS, + contents: JSON.stringify({ + technical_key: "something", + locales: { + "FR": 42.0, + "NL": null + } + }) + }, + ] + } + ] + } +]; + +beforeAll(() => { + // write temporary files + return fsify(structure); +}); + +describe('[import_csv command]', () => { + + describe('Check command availability', () => { + it('Should list from_csv in import command', async () => { + const output = await fetchOutput('import --help'); + expect(output).toMatch('from_csv'); + }); + + it('Should display from_csv help output', async () => { + const output = await fetchOutput('import from_csv --help'); + expect(output).toMatch(csv_description); + }); + }); + + describe('Validations', () => { + // mock console.log + let consoleLog: any; + beforeAll(() => { + consoleLog = jest.spyOn(console, 'log').mockImplementation(); + }); + + // restore console.log + afterAll(() => { + if (consoleLog !== undefined) { + consoleLog.mockRestore(); + } + }); + + test.each(VALIDATIONS_SCENARIOS)('%s', async (_title: string, args: [test_files_type, test_files_type, string[], ...string[]], ...messages: string[]) => { + let [input, columns, locales, ...otherArgs] = args; + let test_cmd = concat_cmd([ + // mandatory args + ...prepare_mandatory_args( + TEST_FILES[input], + TEST_FILES[columns], + locales + ), + // optional args + ...otherArgs, + ]); + //console.warn(test_cmd); + // Test out if error message is thrown + await expectError(test_cmd, ...messages); + }); + }); + + describe('E2E successful scenarios', () => { + // mock console.log + let consoleLog: any; + beforeAll(() => { + consoleLog = jest.spyOn(console, 'log').mockImplementation(); + }); + + // clear mock after each call + afterEach(() => { + consoleLog.mockClear(); + }); + + // reenable console.log + afterAll(() => { + // restore console.log + if (consoleLog !== undefined) { + consoleLog.mockRestore(); + } + }); + + test.each([ + ['(Paths)', TEST_FILE_SETTINGS1], + ['(Object/Array instead of Paths)', TEST_FILE_SETTINGS2], + ])( + 'settings.json %s', + async (_title: string, settingsFile: test_files_type) => { + let test_cmd = concat_cmd([ + '--settings', + `"${TEST_FILES[settingsFile]}"`, + ]); + // run command + //console.warn(test_cmd); + await parser.parseAsync(test_cmd); + + expect(consoleLog).toHaveBeenCalledWith('Successfully exported found locale(s) to i18n json file(s)'); + } + ); + }); +}); \ No newline at end of file diff --git a/test/import-xlsx.test.ts b/test/import/import-xlsx.test.ts similarity index 98% rename from test/import-xlsx.test.ts rename to test/import/import-xlsx.test.ts index 42d601b..6a139e8 100644 --- a/test/import-xlsx.test.ts +++ b/test/import/import-xlsx.test.ts @@ -6,14 +6,14 @@ import { command, description as describeText, builder, -} from '../src/cmds/import'; +} from '../../src/cmds/import'; // XLSX description -import { description as xlsx_description } from '../src/cmds/import_cmds/import_xlsx'; +import { description as xlsx_description } from '../../src/cmds/import_cmds/import_xlsx'; // temp folder const TEMP_FOLDER = os.tmpdir(); // test folders constants -const ROOT_TEST_FOLDER = 'tests-for-import'; +const ROOT_TEST_FOLDER = 'tests-for-import-xlsx'; const [VALID_TEST_FOLDER, USELESS_TEST_FOLDER] = [ 'correct', // folder where every file are correct 'useless', // folder where file has an useless content @@ -118,6 +118,7 @@ const TEST_FILES: { [x in test_files_type]: string } = test_files_list.reduce( (idx === 0) ? [ __dirname, + "..", "fixtures", "import-xlsx", curr, diff --git a/website/docs/commands/export/assets/export-csv.csv b/website/docs/commands/export/assets/export-csv.csv new file mode 100644 index 0000000..5c9b37c --- /dev/null +++ b/website/docs/commands/export/assets/export-csv.csv @@ -0,0 +1,11 @@ +Technical Key;French translation;Dutch translation;German translation +Key with spaces[0].test;42 is the answer;42 is the answer;42 is the answer +Missing key in DE;present;present; +array[0];1 FR;1 NL;1 DE +array[1];2 FR;2 NL;2 DE +array[2];3 FR;3 NL;3 DE +commons.myNestedArray[0];1 FR;1 NL;1 DE +commons.myNestedArray[1];2 FR;2 NL;2 DE +commons.myNestedArray[2];3 FR;3 NL;3 DE +commons.myNestedKey;Hello world FR;Hello world NL;Hello world DE +simpleKey;[FR] not setted key;[NL] not setted key;[DE] not setted key \ No newline at end of file diff --git a/website/docs/commands/export/export to_csv.mdx b/website/docs/commands/export/export to_csv.mdx new file mode 100644 index 0000000..a3cd2e4 --- /dev/null +++ b/website/docs/commands/export/export to_csv.mdx @@ -0,0 +1,217 @@ +--- +sidebar_position: 2 +sidebar_label: export to_csv +--- + +# export to_csv + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +> Export i18n files into a csv file + +## Command + +```bash +# Display help for export to_csv +npx @jy95/i18n-tools export to_csv --help +``` + +## Purpose + +Suppose you have several i18n locales such as : + + + + +```json title="fr.json" +{ + "commons":{ + "myNestedKey":"Hello world FR", + "myNestedArray":[ + "1 FR", + "2 FR", + "3 FR" + ] + }, + "array":[ + "1 FR", + "2 FR", + "3 FR" + ], + "simpleKey":"[FR] not setted key", + "Key with spaces":[ + { + "test":"42 is the answer" + } + ], + "Missing key in DE":"present" +} +``` + + + + +```json title="nl.json" +{ + "commons":{ + "myNestedKey":"Hello world NL", + "myNestedArray":[ + "1 NL", + "2 NL", + "3 NL" + ] + }, + "array":[ + "1 NL", + "2 NL", + "3 NL" + ], + "simpleKey":"[NL] not setted key", + "Key with spaces":[ + { + "test":"42 is the answer" + } + ], + "Missing key in DE":"present" +} +``` + + + + +```json title="de.json" +{ + "commons":{ + "myNestedKey":"Hello world DE", + "myNestedArray":[ + "1 DE", + "2 DE", + "3 DE" + ] + }, + "array":[ + "1 DE", + "2 DE", + "3 DE" + ], + "simpleKey":"[DE] not setted key", + "Key with spaces":[ + { + "test":"42 is the answer" + } + ] +} +``` + + + + +This command helps you to turn them into a single csv file such as [this one](./assets/export-csv.csv). + +```csv title="export-csv.csv" +Technical Key;French translation;Dutch translation;German translation +Key with spaces[0].test;42 is the answer;42 is the answer;42 is the answer +Missing key in DE;present;present; +array[0];1 FR;1 NL;1 DE +array[1];2 FR;2 NL;2 DE +array[2];3 FR;3 NL;3 DE +commons.myNestedArray[0];1 FR;1 NL;1 DE +commons.myNestedArray[1];2 FR;2 NL;2 DE +commons.myNestedArray[2];3 FR;3 NL;3 DE +commons.myNestedKey;Hello world FR;Hello world NL;Hello world DE +simpleKey;[FR] not setted key;[NL] not setted key;[DE] not setted key +``` + +## Examples of settings + + + + + +```bash +npx @jy95/i18n-tools export to_csv --settings "/absolutePath/to/settings1.json" +``` + +```json title="settings1.json" +{ + "files":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\files.json", + "columns":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\columns.json", + "filename":"settings1-output", + "outputDir":"D:\\TEMP\\TEMP" +} +``` + +```json title="files.json" +{ + "FR":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\fr.json", + "NL":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\nl.json", + "DE":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\de.json" +} +``` + +```json title="columns.json" +[ + { + "locale":"FR", + "label":"French translation" + }, + { + "locale":"NL", + "label":"Dutch translation" + }, + { + "locale":"DE", + "label":"German translation" + } +] +``` + + + + +```bash +npx @jy95/i18n-tools export to_csv --settings "/absolutePath/to/settings2.json" +``` + +```json title="settings2.json" +{ + "files":{ + "FR":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\fr.json", + "NL":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\nl.json", + "DE":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\de.json" + }, + "columns":[ + { + "locale":"FR", + "label":"French translation" + }, + { + "locale":"NL", + "label":"Dutch translation" + }, + { + "locale":"DE", + "label":"German translation" + } + ], + "filename":"settings2-output", + "outputDir":"D:\\TEMP\\TEMP" +} +``` + + + \ No newline at end of file diff --git a/website/docs/commands/import/import from_csv.mdx b/website/docs/commands/import/import from_csv.mdx new file mode 100644 index 0000000..59e442a --- /dev/null +++ b/website/docs/commands/import/import from_csv.mdx @@ -0,0 +1,205 @@ +--- +sidebar_position: 2 +sidebar_label: import from_csv +--- + +# import from_csv + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +> Turn a csv file to i18n file(s) + +## Command + +```bash +# Display help for import from_csv +npx @jy95/i18n-tools import from_csv --help +``` + +## Purpose + +Suppose you have a [csv file](../export/assets/export-csv.csv) structured as : + +```csv title="export-csv.csv" +Technical Key;French translation;Dutch translation;German translation +Key with spaces[0].test;42 is the answer;42 is the answer;42 is the answer +Missing key in DE;present;present; +array[0];1 FR;1 NL;1 DE +array[1];2 FR;2 NL;2 DE +array[2];3 FR;3 NL;3 DE +commons.myNestedArray[0];1 FR;1 NL;1 DE +commons.myNestedArray[1];2 FR;2 NL;2 DE +commons.myNestedArray[2];3 FR;3 NL;3 DE +commons.myNestedKey;Hello world FR;Hello world NL;Hello world DE +simpleKey;[FR] not setted key;[NL] not setted key;[DE] not setted key +``` + +This command helps you to turn this into several i18n json files : + + + + +```json title="fr.json" +{ + "commons":{ + "myNestedKey":"Hello world FR", + "myNestedArray":[ + "1 FR", + "2 FR", + "3 FR" + ] + }, + "array":[ + "1 FR", + "2 FR", + "3 FR" + ], + "simpleKey":"[FR] not setted key", + "Key with spaces":[ + { + "test":"42 is the answer" + } + ], + "Missing key in DE":"present" +} +``` + + + + +```json title="nl.json" +{ + "commons":{ + "myNestedKey":"Hello world NL", + "myNestedArray":[ + "1 NL", + "2 NL", + "3 NL" + ] + }, + "array":[ + "1 NL", + "2 NL", + "3 NL" + ], + "simpleKey":"[NL] not setted key", + "Key with spaces":[ + { + "test":"42 is the answer" + } + ], + "Missing key in DE":"present" +} +``` + + + + +```json title="de.json" +{ + "commons":{ + "myNestedKey":"Hello world DE", + "myNestedArray":[ + "1 DE", + "2 DE", + "3 DE" + ] + }, + "array":[ + "1 DE", + "2 DE", + "3 DE" + ], + "simpleKey":"[DE] not setted key", + "Key with spaces":[ + { + "test":"42 is the answer" + } + ] +} +``` + + + + +## Examples of settings + + + + + +```bash +npx @jy95/i18n-tools import from_csv --settings "/absolutePath/to/settings1.json" +``` + +```json title="settings1.json" +{ + "input":"D:\\workspace\\i18n-tools\\test\\fixtures\\import-csv\\export-csv.csv", + "columns":"D:\\TEMP\\TEMP\\tests-for-import\\correct\\columns.json", + "locales":[ + "FR", + "NL", + "DE" + ], + "outputDir":"D:\\TEMP\\TEMP\\tests-for-import", + "suffix":"_settings1" +} +``` + +```json title="columns.json" +{ + "technical_key":"Technical Key", + "locales":{ + "FR":"French translation", + "NL":"Dutch translation", + "DE":"German translation" + } +} +``` + + + + + +```bash +npx @jy95/i18n-tools import from_csv --settings "/absolutePath/to/settings2.json" +``` + +```json title="settings2.json" +{ + "input":"D:\\workspace\\i18n-tools\\test\\fixtures\\import-csv\\export-csv.csv", + "columns":{ + "technical_key":"Technical Key", + "locales":{ + "FR":"French translation", + "NL":"Dutch translation", + "DE":"German translation" + } + }, + "locales":[ + "FR", + "NL", + "DE" + ], + "outputDir":"D:\\TEMP\\TEMP\\tests-for-import", + "suffix":"_settings2" +} +``` + + + + \ No newline at end of file diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index 1a95288..fc13fbc 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -69,6 +69,7 @@ const darkCodeTheme = require('prism-react-renderer/themes/dracula'); prism: { theme: lightCodeTheme, darkTheme: darkCodeTheme, + additionalLanguages: ['csv'] }, }), }); diff --git a/website/src/components/HomepageFeatures.tsx b/website/src/components/HomepageFeatures.tsx index 8bd4d08..8a5e7e2 100644 --- a/website/src/components/HomepageFeatures.tsx +++ b/website/src/components/HomepageFeatures.tsx @@ -21,7 +21,7 @@ const FeatureList: FeatureItem[] = [ image: '/img/export.svg', description: ( <> - Export i18n files into something else (xlsx, ...) + Export i18n files into something else (xlsx, csv, ...) ), }, @@ -30,7 +30,7 @@ const FeatureList: FeatureItem[] = [ image: '/img/import.svg', description: ( <> - Turn a file (xlsx, ...) to i18n file(s) + Turn a file (xlsx, csv, ...) to i18n file(s) ), },