diff --git a/.eslintrc.js b/.eslintrc.js deleted file mode 100644 index e1178e9..0000000 --- a/.eslintrc.js +++ /dev/null @@ -1,27 +0,0 @@ -module.exports = { - env: { - commonjs: true, - es2021: true, - node: true - }, - extends: 'standard', - overrides: [ - { - env: { - node: true - }, - files: [ - '.eslintrc.{js,cjs}' - ], - parserOptions: { - sourceType: 'script' - } - } - ], - parserOptions: { - ecmaVersion: 'latest' - }, - rules: { - semi: ['error', 'always'] - } -}; diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 0000000..30149ce --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,14 @@ +{ + "env": { + "es2021": true, + "node": true + }, + "extends": "standard", + "parserOptions": { + "ecmaVersion": "latest", + "sourceType": "module" + }, + "rules": { + "semi": ["error", "always"] + } +} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2e1c02f..8dbeb79 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,9 +10,8 @@ jobs: fail-fast: false matrix: node-version: - - 14 - - 16 - 18 + - 20 os: - ubuntu-latest - windows-latest diff --git a/index.js b/index.js index bdd22e2..b1cc611 100644 --- a/index.js +++ b/index.js @@ -1,12 +1,15 @@ -const { parse, stream } = require('./lib/poparser'); +import * as poParser from './lib/poparser.js'; +import poCompiler from './lib/pocompiler.js'; +import moParser from './lib/moparser.js'; +import moCompiler from './lib/mocompiler.js'; -module.exports.po = { - parse, - createParseStream: stream, - compile: require('./lib/pocompiler') +export const po = { + parse: poParser.parse, + createParseStream: poParser.stream, + compile: poCompiler }; -module.exports.mo = { - parse: require('./lib/moparser'), - compile: require('./lib/mocompiler') +export const mo = { + parse: moParser, + compile: moCompiler }; diff --git a/lib/mocompiler.js b/lib/mocompiler.js index 72bf9d2..9715d63 100644 --- a/lib/mocompiler.js +++ b/lib/mocompiler.js @@ -1,7 +1,7 @@ -const { Buffer } = require('safe-buffer'); -const encoding = require('encoding'); -const sharedFuncs = require('./shared'); -const contentType = require('content-type'); +import { Buffer } from 'safe-buffer'; +import encoding from 'encoding'; +import { HEADERS, formatCharset, generateHeader, compareMsgid } from './shared.js'; +import contentType from 'content-type'; /** * Exposes general compiler function. Takes a translation @@ -10,7 +10,7 @@ const contentType = require('content-type'); * @param {Object} table Translation object * @return {Buffer} Compiled binary MO object */ -module.exports = function (table) { +export default function (table) { const compiler = new Compiler(table); return compiler.compile(); @@ -30,10 +30,10 @@ function Compiler (table = {}) { headers = Object.keys(headers).reduce((result, key) => { const lowerKey = key.toLowerCase(); - if (sharedFuncs.HEADERS.has(lowerKey)) { + if (HEADERS.has(lowerKey)) { // POT-Creation-Date is removed in MO (see https://savannah.gnu.org/bugs/?49654) if (lowerKey !== 'pot-creation-date') { - result[sharedFuncs.HEADERS.get(lowerKey)] = headers[key]; + result[HEADERS.get(lowerKey)] = headers[key]; } } else { result[key] = headers[key]; @@ -83,11 +83,11 @@ Compiler.prototype.MAGIC = 0x950412de; Compiler.prototype._handleCharset = function () { const ct = contentType.parse(this._table.headers['Content-Type'] || 'text/plain'); - const charset = sharedFuncs.formatCharset(this._table.charset || ct.parameters.charset || 'utf-8'); + const charset = formatCharset(this._table.charset || ct.parameters.charset || 'utf-8'); // clean up content-type charset independently using fallback if missing if (ct.parameters.charset) { - ct.parameters.charset = sharedFuncs.formatCharset(ct.parameters.charset); + ct.parameters.charset = formatCharset(ct.parameters.charset); } this._table.charset = charset; @@ -105,7 +105,7 @@ Compiler.prototype._generateList = function () { list.push({ msgid: Buffer.alloc(0), - msgstr: encoding.convert(sharedFuncs.generateHeader(this._table.headers), this._table.charset) + msgstr: encoding.convert(generateHeader(this._table.headers), this._table.charset) }); Object.keys(this._table.translations).forEach(msgctxt => { @@ -245,7 +245,7 @@ Compiler.prototype.compile = function () { const list = this._generateList(); const size = this._calculateSize(list); - list.sort(sharedFuncs.compareMsgid); + list.sort(compareMsgid); return this._build(list, size); }; diff --git a/lib/moparser.js b/lib/moparser.js index a85e81a..09c10f8 100644 --- a/lib/moparser.js +++ b/lib/moparser.js @@ -1,5 +1,5 @@ -const encoding = require('encoding'); -const sharedFuncs = require('./shared'); +import encoding from 'encoding'; +import { formatCharset, parseHeader } from './shared.js'; /** * Parses a binary MO object into translation table @@ -8,7 +8,7 @@ const sharedFuncs = require('./shared'); * @param {String} [defaultCharset] Default charset to use * @return {Object} Translation object */ -module.exports = function (buffer, defaultCharset) { +export default function (buffer, defaultCharset) { const parser = new Parser(buffer, defaultCharset); return parser.parse(); @@ -122,13 +122,13 @@ Parser.prototype._handleCharset = function (headers) { let match; if ((match = headersStr.match(/[; ]charset\s*=\s*([\w-]+)/i))) { - this._charset = this._table.charset = sharedFuncs.formatCharset(match[1], this._charset); + this._charset = this._table.charset = formatCharset(match[1], this._charset); } headers = encoding.convert(headers, 'utf-8', this._charset) .toString('utf8'); - this._table.headers = sharedFuncs.parseHeader(headers); + this._table.headers = parseHeader(headers); }; /** diff --git a/lib/pocompiler.js b/lib/pocompiler.js index 9e5c2ad..f4baa0a 100644 --- a/lib/pocompiler.js +++ b/lib/pocompiler.js @@ -1,7 +1,7 @@ -const { Buffer } = require('safe-buffer'); -const encoding = require('encoding'); -const sharedFuncs = require('./shared'); -const contentType = require('content-type'); +import { Buffer } from 'safe-buffer'; +import encoding from 'encoding'; +import { HEADERS, foldLine, compareMsgid, formatCharset, generateHeader } from './shared.js'; +import contentType from 'content-type'; /** * Exposes general compiler function. Takes a translation @@ -10,7 +10,7 @@ const contentType = require('content-type'); * @param {Object} table Translation object * @return {Buffer} Compiled PO object */ -module.exports = function (table, options) { +export default function (table, options) { const compiler = new Compiler(table, options); return compiler.compile(); @@ -33,8 +33,8 @@ function Compiler (table = {}, options = {}) { headers = Object.keys(headers).reduce((result, key) => { const lowerKey = key.toLowerCase(); - if (sharedFuncs.HEADERS.has(lowerKey)) { - result[sharedFuncs.HEADERS.get(lowerKey)] = headers[key]; + if (HEADERS.has(lowerKey)) { + result[HEADERS.get(lowerKey)] = headers[key]; } else { result[key] = headers[key]; } @@ -178,7 +178,7 @@ Compiler.prototype._addPOString = function (key = '', value = '', obsolete = fal } if (foldLength > 0) { - lines = sharedFuncs.foldLine(value, foldLength); + lines = foldLine(value, foldLength); } else { // split only on new lines if (escapeCharacters) { @@ -205,11 +205,11 @@ Compiler.prototype._addPOString = function (key = '', value = '', obsolete = fal Compiler.prototype._handleCharset = function () { const ct = contentType.parse(this._table.headers['Content-Type'] || 'text/plain'); - const charset = sharedFuncs.formatCharset(this._table.charset || ct.parameters.charset || 'utf-8'); + const charset = formatCharset(this._table.charset || ct.parameters.charset || 'utf-8'); // clean up content-type charset independently using fallback if missing if (ct.parameters.charset) { - ct.parameters.charset = sharedFuncs.formatCharset(ct.parameters.charset); + ct.parameters.charset = formatCharset(ct.parameters.charset); } this._table.charset = charset; @@ -249,7 +249,7 @@ Compiler.prototype._prepareSection = function (section) { if (typeof sort === 'function') { response = response.sort(sort); } else { - response = response.sort(sharedFuncs.compareMsgid); + response = response.sort(compareMsgid); } } @@ -278,7 +278,7 @@ Compiler.prototype.compile = function () { const { eol } = this._options; response.unshift(this._drawBlock(headerBlock, { - msgstr: sharedFuncs.generateHeader(this._table.headers) + msgstr: generateHeader(this._table.headers) })); if (this._table.charset === 'utf-8' || this._table.charset === 'ascii') { diff --git a/lib/poparser.js b/lib/poparser.js index 6bc4490..77b8b12 100644 --- a/lib/poparser.js +++ b/lib/poparser.js @@ -1,10 +1,7 @@ -const encoding = require('encoding'); -const sharedFuncs = require('./shared'); -const Transform = require('readable-stream').Transform; -const util = require('util'); - -module.exports.parse = parse; -module.exports.stream = stream; +import encoding from 'encoding'; +import { formatCharset, parseNPluralFromHeadersSafely, parseHeader } from './shared.js'; +import { Transform } from 'readable-stream'; +import util from 'util'; /** * Parses a PO object into translation table @@ -13,7 +10,7 @@ module.exports.stream = stream; * @param {string | Buffer} input PO object * @param {Options} [options] Optional options with defaultCharset and validation */ -function parse (input, options = {}) { +export function parse (input, options = {}) { const parser = new Parser(input, options); return parser.parse(); @@ -26,7 +23,7 @@ function parse (input, options = {}) { * @param {Options} [options] Optional options with defaultCharset and validation * @param {import('readable-stream').TransformOptions} [transformOptions] Optional stream options */ -function stream (options = {}, transformOptions = {}) { +export function stream (options = {}, transformOptions = {}) { return new PoParserTransform(options, transformOptions); }; @@ -85,7 +82,7 @@ Parser.prototype._handleCharset = function (buf = '') { } if ((match = headers.match(/[; ]charset\s*=\s*([\w-]+)(?:[\s;]|\\n)*"\s*$/mi))) { - this._charset = sharedFuncs.formatCharset(match[1], this._charset); + this._charset = formatCharset(match[1], this._charset); } if (this._charset === 'utf-8') { @@ -486,8 +483,8 @@ Parser.prototype._normalize = function (tokens) { } if (!table.headers && !msgctxt && !tokens[i].msgid) { - table.headers = sharedFuncs.parseHeader(tokens[i].msgstr[0]); - nplurals = sharedFuncs.parseNPluralFromHeadersSafely(table.headers, nplurals); + table.headers = parseHeader(tokens[i].msgstr[0]); + nplurals = parseNPluralFromHeadersSafely(table.headers, nplurals); } this._validateToken(tokens[i], table.translations, msgctxt, nplurals); diff --git a/lib/shared.js b/lib/shared.js index d3bff61..8cf706c 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -1,13 +1,6 @@ -module.exports.parseHeader = parseHeader; -module.exports.parseNPluralFromHeadersSafely = parseNPluralFromHeadersSafely; -module.exports.generateHeader = generateHeader; -module.exports.formatCharset = formatCharset; -module.exports.foldLine = foldLine; -module.exports.compareMsgid = compareMsgid; - // see https://www.gnu.org/software/gettext/manual/html_node/Header-Entry.html const PLURAL_FORMS = 'Plural-Forms'; -const HEADERS = new Map([ +export const HEADERS = new Map([ ['project-id-version', 'Project-Id-Version'], ['report-msgid-bugs-to', 'Report-Msgid-Bugs-To'], ['pot-creation-date', 'POT-Creation-Date'], @@ -20,8 +13,6 @@ const HEADERS = new Map([ ['plural-forms', PLURAL_FORMS] ]); -module.exports.HEADERS = HEADERS; - const PLURAL_FORM_HEADER_NPLURALS_REGEX = /nplurals\s*=\s*(?\d+)/; /** @@ -30,7 +21,7 @@ const PLURAL_FORM_HEADER_NPLURALS_REGEX = /nplurals\s*=\s*(?\d+)/; * @param {String} str Header string * @return {Object} An object of key-value pairs */ -function parseHeader (str = '') { +export function parseHeader (str = '') { return str.split('\n') .reduce((headers, line) => { const parts = line.split(':'); @@ -54,7 +45,7 @@ function parseHeader (str = '') { * @param {Object} [headers = {}] An object with parsed headers * @returns {number} Parsed result */ -function parseNPluralFromHeadersSafely (headers = {}, fallback = 1) { +export function parseNPluralFromHeadersSafely (headers = {}, fallback = 1) { const pluralForms = headers[PLURAL_FORMS]; if (!pluralForms) { @@ -74,7 +65,7 @@ function parseNPluralFromHeadersSafely (headers = {}, fallback = 1) { * @param {Object} header Object of key value pairs * @return {String} Header string */ -function generateHeader (header = {}) { +export function generateHeader (header = {}) { const keys = Object.keys(header) .filter(key => !!key); @@ -94,7 +85,7 @@ function generateHeader (header = {}) { * @param {String} charset Charset name * @return {String} Normalized charset name */ -function formatCharset (charset = 'iso-8859-1', defaultCharset = 'iso-8859-1') { +export function formatCharset (charset = 'iso-8859-1', defaultCharset = 'iso-8859-1') { return charset.toString() .toLowerCase() .replace(/^utf[-_]?(\d+)$/, 'utf-$1') @@ -112,7 +103,7 @@ function formatCharset (charset = 'iso-8859-1', defaultCharset = 'iso-8859-1') { * @param {Number} [maxLen=76] Maximum allowed length for folded lines * @return {Array} An array of lines */ -function foldLine (str, maxLen = 76) { +export function foldLine (str, maxLen = 76) { const lines = []; const len = str.length; let curLine = ''; @@ -157,7 +148,7 @@ function foldLine (str, maxLen = 76) { * @param {Object} object with msgid next * @returns {number} comparator index */ -function compareMsgid ({ msgid: left }, { msgid: right }) { +export function compareMsgid ({ msgid: left }, { msgid: right }) { if (left < right) { return -1; } diff --git a/package.json b/package.json index 270140e..d0429c2 100644 --- a/package.json +++ b/package.json @@ -13,6 +13,7 @@ "type": "git", "url": "http://github.com/smhg/gettext-parser.git" }, + "type": "module", "engines": { "node": ">=18" }, diff --git a/test/.eslintrc.js b/test/.eslintrc.js deleted file mode 100644 index 8a46c38..0000000 --- a/test/.eslintrc.js +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = { - 'env': { - 'mocha': true - } -} diff --git a/test/.eslintrc.json b/test/.eslintrc.json new file mode 100644 index 0000000..7eeefc3 --- /dev/null +++ b/test/.eslintrc.json @@ -0,0 +1,5 @@ +{ + "env": { + "mocha": true + } +} diff --git a/test/mo-compiler-test.js b/test/mo-compiler-test.js index c735d01..014e3e4 100644 --- a/test/mo-compiler-test.js +++ b/test/mo-compiler-test.js @@ -1,8 +1,14 @@ -const chai = require('chai'); -const { promisify } = require('util'); -const path = require('path'); -const { mo: { compile } } = require('..'); -const readFile = promisify(require('fs').readFile); +import chai from 'chai'; +import { promisify } from 'util'; +import path from 'path'; +import { mo } from '../index.js'; +import { readFile as fsReadFile } from 'fs'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const readFile = promisify(fsReadFile); const expect = chai.expect; chai.config.includeStack = true; @@ -10,27 +16,27 @@ chai.config.includeStack = true; describe('MO Compiler', () => { describe('UTF-8', () => { it('should compile', async () => { - const [json, mo] = await Promise.all([ + const [json, moData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/utf8.mo')) ]); - const compiled = compile(JSON.parse(json)); + const compiled = mo.compile(JSON.parse(json)); - expect(compiled.toString('utf8')).to.deep.equal(mo.toString('utf8')); + expect(compiled.toString('utf8')).to.deep.equal(moData.toString('utf8')); }); }); describe('Latin-13', () => { it('should compile', async () => { - const [json, mo] = await Promise.all([ + const [json, moData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/latin13-po.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/latin13.mo')) ]); - const compiled = compile(JSON.parse(json)); + const compiled = mo.compile(JSON.parse(json)); - expect(compiled.toString('utf8')).to.equal(mo.toString('utf8')); + expect(compiled.toString('utf8')).to.equal(moData.toString('utf8')); }); }); }); diff --git a/test/mo-parser-test.js b/test/mo-parser-test.js index 44cd010..aa2ad48 100644 --- a/test/mo-parser-test.js +++ b/test/mo-parser-test.js @@ -1,8 +1,14 @@ -const chai = require('chai'); -const { promisify } = require('util'); -const path = require('path'); -const { mo: { parse } } = require('..'); -const readFile = promisify(require('fs').readFile); +import chai from 'chai'; +import { promisify } from 'util'; +import path from 'path'; +import { mo } from '../index.js'; +import { readFile as fsReadFile } from 'fs'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const readFile = promisify(fsReadFile); const expect = chai.expect; chai.config.includeStack = true; @@ -10,12 +16,12 @@ chai.config.includeStack = true; describe('MO Parser', () => { describe('UTF-8', () => { it('should parse', async () => { - const [mo, json] = await Promise.all([ + const [moData, json] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8.mo')), readFile(path.join(__dirname, 'fixtures/utf8-mo.json'), 'utf8') ]); - const parsed = parse(mo); + const parsed = mo.parse(moData); expect(parsed).to.deep.equal(JSON.parse(json)); }); @@ -23,12 +29,12 @@ describe('MO Parser', () => { describe('Latin-13', () => { it('should parse', async () => { - const [mo, json] = await Promise.all([ + const [moData, json] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/latin13.mo')), readFile(path.join(__dirname, 'fixtures/latin13-mo.json'), 'utf8') ]); - const parsed = parse(mo); + const parsed = mo.parse(moData); expect(parsed).to.deep.equal(JSON.parse(json)); }); diff --git a/test/po-compiler-test.js b/test/po-compiler-test.js index 0667ec8..3b62d7e 100644 --- a/test/po-compiler-test.js +++ b/test/po-compiler-test.js @@ -1,9 +1,15 @@ -const { EOL } = require('os'); -const { promisify } = require('util'); -const path = require('path'); -const readFile = promisify(require('fs').readFile); -const chai = require('chai'); -const { po: { compile } } = require('..'); +import { EOL } from 'os'; +import { promisify } from 'util'; +import path from 'path'; +import { readFile as fsReadFile } from 'fs'; +import chai from 'chai'; +import { po } from '../index.js'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const readFile = promisify(fsReadFile); const expect = chai.expect; chai.config.includeStack = true; @@ -11,44 +17,44 @@ chai.config.includeStack = true; describe('PO Compiler', () => { describe('Headers', () => { it('should keep tile casing', async () => { - const [json, po] = await Promise.all([ + const [json, poData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/headers-case.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/headers-case.po'), 'utf8') ]); - const compiled = compile(JSON.parse(json), { eol: EOL }) + const compiled = po.compile(JSON.parse(json), { eol: EOL }) .toString('utf8'); - expect(compiled).to.equal(po); + expect(compiled).to.equal(poData); }); }); describe('UTF-8', () => { it('should compile', async () => { - const [json, po] = await Promise.all([ + const [json, poData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/utf8.po'), 'utf8') ]); - const compiled = compile(JSON.parse(json), { eol: EOL }) + const compiled = po.compile(JSON.parse(json), { eol: EOL }) .toString('utf8'); - expect(compiled).to.equal(po); + expect(compiled).to.equal(poData); }); }); describe('Latin-13', () => { it('should compile', async () => { - const [json, po] = await Promise.all([ + const [json, poData] = await Promise.all([ // gettext-parser can only handle utf8 input (output will be the specified charset) readFile(path.join(__dirname, 'fixtures/latin13-po.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/latin13.po'), 'latin1') ]); - const compiled = compile(JSON.parse(json), { eol: EOL }) + const compiled = po.compile(JSON.parse(json), { eol: EOL }) .toString('latin1'); - expect(compiled).to.equal(po); + expect(compiled).to.equal(poData); }); }); @@ -59,7 +65,7 @@ describe('PO Compiler', () => { readFile(path.join(__dirname, 'fixtures/plural.pot'), 'utf8') ]); - const compiled = compile(JSON.parse(json), { eol: EOL }) + const compiled = po.compile(JSON.parse(json), { eol: EOL }) .toString('utf8'); expect(compiled).to.equal(pot); @@ -68,27 +74,27 @@ describe('PO Compiler', () => { describe('Message folding', () => { it('should compile without folding', async () => { - const [json, po] = await Promise.all([ + const [json, poData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/utf8-no-folding.po'), 'utf8') ]); - const compiled = compile(JSON.parse(json), { foldLength: 0, eol: EOL }) + const compiled = po.compile(JSON.parse(json), { foldLength: 0, eol: EOL }) .toString('utf8'); - expect(compiled).to.equal(po); + expect(compiled).to.equal(poData); }); it('should compile with different folding', async () => { - const [json, po] = await Promise.all([ + const [json, poData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/utf8-folding-100.po'), 'utf8') ]); - const compiled = compile(JSON.parse(json), { foldLength: 100, eol: EOL }) + const compiled = po.compile(JSON.parse(json), { foldLength: 100, eol: EOL }) .toString('utf8'); - expect(compiled).to.equal(po); + expect(compiled).to.equal(poData); }); }); @@ -99,7 +105,7 @@ describe('PO Compiler', () => { readFile(path.join(__dirname, 'fixtures/sort-test.pot'), 'utf8') ]); - const compiled = compile(JSON.parse(json), { sort: true, eol: EOL }) + const compiled = po.compile(JSON.parse(json), { sort: true, eol: EOL }) .toString('utf8'); expect(compiled).to.equal(pot); @@ -132,9 +138,9 @@ describe('PO Compiler', () => { readFile(path.join(__dirname, 'fixtures/sort-with-msgctxt-test.pot'), 'utf8') ]); - const compiled1 = compile(JSON.parse(json1), { sort: compareMsgidAndMsgctxt, eol: EOL }) + const compiled1 = po.compile(JSON.parse(json1), { sort: compareMsgidAndMsgctxt, eol: EOL }) .toString('utf8'); - const compiled2 = compile(JSON.parse(json2), { sort: compareMsgidAndMsgctxt, eol: EOL }) + const compiled2 = po.compile(JSON.parse(json2), { sort: compareMsgidAndMsgctxt, eol: EOL }) .toString('utf8'); expect(compiled1).to.equal(compiled2); @@ -145,15 +151,15 @@ describe('PO Compiler', () => { describe('Skip escaping characters', () => { it('should compile without escaping characters', async () => { - const [json, po] = await Promise.all([ + const [json, poData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8-skip-escape-characters.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/utf8-skip-escape-characters.po'), 'utf8') ]); - const compiled = compile(JSON.parse(json), { escapeCharacters: false, foldLength: 0, eol: EOL }) + const compiled = po.compile(JSON.parse(json), { escapeCharacters: false, foldLength: 0, eol: EOL }) .toString('utf8'); - expect(compiled).to.equal(po); + expect(compiled).to.equal(poData); }); }); }); diff --git a/test/po-obsolete-test.js b/test/po-obsolete-test.js index 2698cdc..61b6bf2 100644 --- a/test/po-obsolete-test.js +++ b/test/po-obsolete-test.js @@ -1,9 +1,13 @@ -const { EOL } = require('os'); -const chai = require('chai'); -const { promisify } = require('util'); -const path = require('path'); -const fs = require('fs'); -const gettextParser = require('..'); +import { EOL } from 'os'; +import chai from 'chai'; +import { promisify } from 'util'; +import path from 'path'; +import fs from 'fs'; +import * as gettextParser from '../index.js'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); const readFile = promisify(fs.readFile); diff --git a/test/po-parser-test.js b/test/po-parser-test.js index 0bdd08c..4bdd7c8 100644 --- a/test/po-parser-test.js +++ b/test/po-parser-test.js @@ -1,8 +1,12 @@ -const chai = require('chai'); -const { promisify } = require('util'); -const path = require('path'); -const fs = require('fs'); -const gettextParser = require('..'); +import chai from 'chai'; +import { promisify } from 'util'; +import path from 'path'; +import fs from 'fs'; +import * as gettextParser from '../index.js'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); const readFile = promisify(fs.readFile); diff --git a/test/shared.js b/test/shared.js index b4e3cc7..3712006 100644 --- a/test/shared.js +++ b/test/shared.js @@ -1,10 +1,16 @@ 'use strict'; -const chai = require('chai'); -const { promisify } = require('util'); -const path = require('path'); -const { formatCharset, parseHeader, generateHeader, foldLine, parseNPluralFromHeadersSafely } = require('../lib/shared'); -const readFile = promisify(require('fs').readFile); +import chai from 'chai'; +import { promisify } from 'util'; +import path from 'path'; +import { formatCharset, parseHeader, generateHeader, foldLine, parseNPluralFromHeadersSafely } from '../lib/shared.js'; +import { readFile as fsReadFile } from 'fs'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const readFile = promisify(fsReadFile); const expect = chai.expect; chai.config.includeStack = true;