From 2ed0b087dffd517ae9b6204ef0969b33be7a4546 Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Wed, 31 May 2023 21:26:29 +0100 Subject: [PATCH 01/14] Schema folder converted to compressed tarball file --- .../db-export/compress-files.service.js | 42 ----- app/services/db-export/db-export.service.js | 10 +- .../export-compressed-table.service.js | 11 +- .../db-export/schema-export.service.js | 25 ++- .../db-export/send-to-s3-bucket.service.js | 70 +++----- package-lock.json | 162 +++++++++++++++++- package.json | 3 +- .../db-export/compress-files.service.test.js | 49 ------ 8 files changed, 213 insertions(+), 159 deletions(-) delete mode 100644 app/services/db-export/compress-files.service.js delete mode 100644 test/services/db-export/compress-files.service.test.js diff --git a/app/services/db-export/compress-files.service.js b/app/services/db-export/compress-files.service.js deleted file mode 100644 index 80d4e5675c..0000000000 --- a/app/services/db-export/compress-files.service.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -/** - * Compresses a file at a specified path using gzip - * @module CompressFilesService - */ - -const fs = require('node:fs') -const { pipeline } = require('node:stream') -const { promisify } = require('node:util') -const zlib = require('node:zlib') - -/** - * Compresses a file using gzip and writes the compressed data to a new file with a '.gz' extension - * - * @param {String} filePath A string containing the file path that will be compressed - * - * @returns {Boolean} True if the file is compressed successfully and false if not - */ -async function go (filePath) { - if (!fs.existsSync(filePath)) { - return false - } - - await _compressFile(filePath) - - return `${filePath}.gz` -} - -async function _compressFile (filePath) { - const readStream = fs.createReadStream(filePath) - const writeStream = fs.createWriteStream(`${filePath}.gz`) - const compress = zlib.createGzip() - - const pipe = promisify(pipeline) - - await pipe(readStream, compress, writeStream) -} - -module.exports = { - go -} diff --git a/app/services/db-export/db-export.service.js b/app/services/db-export/db-export.service.js index 29a94ee80f..1410ca0b3c 100644 --- a/app/services/db-export/db-export.service.js +++ b/app/services/db-export/db-export.service.js @@ -11,11 +11,13 @@ const SchemaExportService = require('../db-export/schema-export.service.js') * Calls SchemaExportService giving it a schemaName */ async function go () { - const schemaNames = ['water', 'returns', 'crm', 'crm_v2', 'idm', 'permit'] + // const schemaNames = ['water', 'returns', 'crm', 'crm_v2', 'idm', 'permit'] - for (const schemaName of schemaNames) { - await SchemaExportService.go(schemaName) - } + // for (const schemaName of schemaNames) { + // await SchemaExportService.go(schemaName) + // } + + await SchemaExportService.go('water') } module.exports = { diff --git a/app/services/db-export/export-compressed-table.service.js b/app/services/db-export/export-compressed-table.service.js index 0c1dd61575..dd047dd434 100644 --- a/app/services/db-export/export-compressed-table.service.js +++ b/app/services/db-export/export-compressed-table.service.js @@ -1,15 +1,12 @@ 'use strict' /** - * Exports a table from the db, converts it to CSV and compresses it. The CSV file - * is then deleted whilst the compressed file remains, ready to be sent to our S3 bucket + * Exports a table from the db, converts it to CSV format and saves it to a file * * @module ExportCompressedTableService */ const ConvertToCSVService = require('./convert-to-csv.service.js') -const CompressFilesService = require('./compress-files.service.js') -const DeleteFileService = require('./delete-file.service.js') const ExportDataFilesService = require('./export-data-files.service.js') const FetchTableService = require('./fetch-table.service.js') @@ -26,11 +23,7 @@ async function go (tableName, schemaFolderPath, schemaName) { const tableConvertedToCsv = ConvertToCSVService.go(data.headers, data.rows) - const filePath = await ExportDataFilesService.go(tableConvertedToCsv, data.tableName, schemaFolderPath) - - await CompressFilesService.go(filePath) - - await DeleteFileService.go(filePath) + await ExportDataFilesService.go(tableConvertedToCsv, data.tableName, schemaFolderPath) } module.exports = { diff --git a/app/services/db-export/schema-export.service.js b/app/services/db-export/schema-export.service.js index 1208a4c5e4..22fee6463a 100644 --- a/app/services/db-export/schema-export.service.js +++ b/app/services/db-export/schema-export.service.js @@ -7,6 +7,7 @@ const path = require('path') const os = require('os') +const tar = require('tar') const DeleteFolderService = require('./delete-folder.service.js') const ExportCompressedTableService = require('./export-compressed-table.service.js') @@ -15,7 +16,7 @@ const SendToS3BucketService = require('../db-export/send-to-s3-bucket.service.js /** * Exports the specific schema by fetching table names, exporting each table, - * and uploading the schema folder to an S3 bucket + * uploading the schema folder to an S3 bucket and finally deleting the folder * * @param {String} schemaName The name of the database to export */ @@ -23,11 +24,14 @@ async function go (schemaName) { const tableNames = await FetchTableNames.go(schemaName) const schemaFolderPath = _folderToUpload(schemaName) + for (const tableName of tableNames) { await ExportCompressedTableService.go(tableName, schemaFolderPath, schemaName) } - await SendToS3BucketService.go(schemaFolderPath) + const tarSchemaPath = await _createTarFile(schemaName, schemaFolderPath) + + await SendToS3BucketService.go(tarSchemaPath) await DeleteFolderService.go(schemaFolderPath) } @@ -45,6 +49,23 @@ function _folderToUpload (schemaName) { return path.join(temporaryFilePath, schemaName) } +/** + * Create a compressed tarball (.tgz) from a given schema folder + * @param {String} schemaFolderPath + * + * @returns {String} The path to the created tarball file + */ +async function _createTarFile (schemaFolderPath) { + await tar.create( + { + gzip: true, + file: `${schemaFolderPath}.tgz` + }, + [schemaFolderPath] + ) + return `${schemaFolderPath}.tgz` +} + module.exports = { go } diff --git a/app/services/db-export/send-to-s3-bucket.service.js b/app/services/db-export/send-to-s3-bucket.service.js index 51d09947fa..712141bc99 100644 --- a/app/services/db-export/send-to-s3-bucket.service.js +++ b/app/services/db-export/send-to-s3-bucket.service.js @@ -1,79 +1,55 @@ 'use strict' /** - * Uploads a folders worth of files to our S3 bucket + * Sends a file to our AWS S3 bucket * @module SendToS3BucketService */ -const fsPromises = require('fs').promises +const fs = require('fs') const path = require('path') const { PutObjectCommand, S3Client } = require('@aws-sdk/client-s3') - const S3Config = require('../../../config/s3.config.js') /** - * Sends a schema folder with table files in to our AWS S3 Bucket using the folderPath that it receives + * Sends a file to our AWS S3 Bucket using the filePath that it receives * - * @param {String} folderPath A string containing the path of the folder to send to the S3 bucket + * @param {String} filePath A string containing the path of the file to send to the S3 bucket * - * @returns {Boolean} True if the folder is uploaded successfully and false if not + * @returns {Boolean} True if the file is uploaded successfully and false if not */ -async function go (folderPath) { +async function go (filePath) { const bucketName = S3Config.s3.bucket - const folderName = path.basename(folderPath) - - const files = await _getFilesFromFolder(folderPath) - - for (const file of files) { - try { - await _uploadToBucket(bucketName, folderName, file) - } catch (error) { - return false - } + const fileName = path.basename(filePath) + const fileContent = fs.readFileSync(filePath) + const params = { + Bucket: bucketName, + Key: `export/${fileName}`, + Body: fileContent } - return true -} - -/** - * Retrieves all the files within a folder - * - * @param {String} folderPath A string containing the path of the folder - * - * @returns {[]} An array of file paths within the folder - */ -async function _getFilesFromFolder (folderPath) { - const files = await fsPromises.readdir(folderPath) - return files.map((file) => { - return path.join(folderPath, file) - }) + return _uploadToBucket(params, fileName) } /** * Uploads a file to an Amazon S3 bucket using the given parameters * - * @param {Object} bucketName The name of the bucket we want to upload to - * @param {String} folderName The name of the folder to upload - * @param {String} filePath The path of the individual file to upload + * @param {Object} params The parameters to use when uploading the file + * @param {String} fileName The name of the file to upload * * @returns {Boolean} True if the file is uploaded successfully and false if not */ -async function _uploadToBucket (bucketName, folderName, filePath) { - const fileName = path.basename(filePath) - const fileContent = await fsPromises.readFile(filePath) - - const params = { - Bucket: bucketName, - Key: `export/${folderName}/${fileName}`, - Body: fileContent - } - +async function _uploadToBucket (params) { const s3Client = new S3Client() const command = new PutObjectCommand(params) - await s3Client.send(command) -} + try { + await s3Client.send(command) + return true + } catch (error) { + return false + } +} module.exports = { go } diff --git a/package-lock.json b/package-lock.json index 4d9a463811..ccaf10ade0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -27,7 +27,8 @@ "nunjucks": "^3.2.3", "objection": "^3.0.1", "pg": "^8.8.0", - "sass": "^1.56.2" + "sass": "^1.56.2", + "tar": "^6.1.15" }, "devDependencies": { "@hapi/code": "^9.0.2", @@ -3361,6 +3362,14 @@ "node": ">= 6" } }, + "node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "engines": { + "node": ">=10" + } + }, "node_modules/clone": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", @@ -4593,6 +4602,28 @@ "node": ">= 14.17" } }, + "node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -5806,6 +5837,48 @@ "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", "dev": true }, + "node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/mo-walk": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/mo-walk/-/mo-walk-1.2.0.tgz", @@ -7317,6 +7390,22 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/tar": { + "version": "6.1.15", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.15.tgz", + "integrity": "sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==", + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/tarn": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/tarn/-/tarn-3.0.2.tgz", @@ -7659,8 +7748,7 @@ "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/yocto-queue": { "version": "0.1.0", @@ -10448,6 +10536,11 @@ } } }, + "chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==" + }, "clone": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", @@ -11353,6 +11446,24 @@ "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.3.tgz", "integrity": "sha512-KqU0nnPMgIJcCOFTNJFEA8epcseEaoox4XZffTgy8jlI6pL/5EFyR54NRG7CnCJN0biY7q52DO3MH6/sJ/TKlQ==" }, + "fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "requires": { + "minipass": "^3.0.0" + }, + "dependencies": { + "minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "requires": { + "yallist": "^4.0.0" + } + } + } + }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -12214,6 +12325,35 @@ "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", "dev": true }, + "minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==" + }, + "minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "requires": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "dependencies": { + "minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "requires": { + "yallist": "^4.0.0" + } + } + } + }, + "mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==" + }, "mo-walk": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/mo-walk/-/mo-walk-1.2.0.tgz", @@ -13295,6 +13435,19 @@ "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" }, + "tar": { + "version": "6.1.15", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.15.tgz", + "integrity": "sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==", + "requires": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + } + }, "tarn": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/tarn/-/tarn-3.0.2.tgz", @@ -13557,8 +13710,7 @@ "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "yocto-queue": { "version": "0.1.0", diff --git a/package.json b/package.json index bfef19b233..77b68c607e 100644 --- a/package.json +++ b/package.json @@ -41,7 +41,8 @@ "nunjucks": "^3.2.3", "objection": "^3.0.1", "pg": "^8.8.0", - "sass": "^1.56.2" + "sass": "^1.56.2", + "tar": "^6.1.15" }, "devDependencies": { "@hapi/code": "^9.0.2", diff --git a/test/services/db-export/compress-files.service.test.js b/test/services/db-export/compress-files.service.test.js deleted file mode 100644 index 9a1ba2de24..0000000000 --- a/test/services/db-export/compress-files.service.test.js +++ /dev/null @@ -1,49 +0,0 @@ -'use strict' - -// Test framework dependencies -const Lab = require('@hapi/lab') -const Code = require('@hapi/code') - -const { describe, it, beforeEach, afterEach } = exports.lab = Lab.script() -const { expect } = Code - -// Test helpers -const fs = require('fs') - -// Thing under test -const CompressFilesService = require('../../../app/services/db-export/compress-files.service.js') - -describe('Compress files service', () => { - let filePath - - describe('when successful', () => { - beforeEach(() => { - filePath = 'test/fixtures/compress-files.service.csv' - }) - - afterEach(() => { - // Delete the new file - fs.unlinkSync(`${filePath}.gz`) - }) - - it('compresses the csv file to a .gz file', async () => { - const result = await CompressFilesService.go(filePath) - - expect(result).to.equal(`${filePath}.gz`) - expect(fs.existsSync(`${filePath}.gz`)).to.equal(true) - }) - }) - - describe('when unsuccessful because the CSV file does not exist', () => { - beforeEach(() => { - filePath = '' - }) - - it('returns an error', async () => { - const result = await CompressFilesService.go(filePath) - - expect(result).to.equal(false) - expect(fs.existsSync((`${filePath}.gz`))).to.equal(false) - }) - }) -}) From 1f68eeb563f978d9ecae807db1924293e54347f2 Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Wed, 31 May 2023 21:39:55 +0100 Subject: [PATCH 02/14] Fixing broken tests --- app/services/db-export/db-export.service.js | 10 ++++------ .../export-compressed-table.service.test.js | 8 -------- .../send-to-s3-bucket.service.test.js | 18 +++++++++--------- 3 files changed, 13 insertions(+), 23 deletions(-) diff --git a/app/services/db-export/db-export.service.js b/app/services/db-export/db-export.service.js index 1410ca0b3c..29a94ee80f 100644 --- a/app/services/db-export/db-export.service.js +++ b/app/services/db-export/db-export.service.js @@ -11,13 +11,11 @@ const SchemaExportService = require('../db-export/schema-export.service.js') * Calls SchemaExportService giving it a schemaName */ async function go () { - // const schemaNames = ['water', 'returns', 'crm', 'crm_v2', 'idm', 'permit'] + const schemaNames = ['water', 'returns', 'crm', 'crm_v2', 'idm', 'permit'] - // for (const schemaName of schemaNames) { - // await SchemaExportService.go(schemaName) - // } - - await SchemaExportService.go('water') + for (const schemaName of schemaNames) { + await SchemaExportService.go(schemaName) + } } module.exports = { diff --git a/test/services/db-export/export-compressed-table.service.test.js b/test/services/db-export/export-compressed-table.service.test.js index c7e9cddf80..381c02dd77 100644 --- a/test/services/db-export/export-compressed-table.service.test.js +++ b/test/services/db-export/export-compressed-table.service.test.js @@ -10,8 +10,6 @@ const { expect } = Code // Things we need to stub const ConvertToCSVService = require('../../../app/services/db-export/convert-to-csv.service.js') -const CompressFilesService = require('../../../app/services/db-export/compress-files.service.js') -const DeleteFileService = require('../../../app/services/db-export/delete-file.service.js') const ExportDataFilesService = require('../../../app/services/db-export/export-data-files.service.js') const FetchTableService = require('../../../app/services/db-export/fetch-table.service.js') @@ -20,8 +18,6 @@ const ExportCompressedTableService = require('../../../app/services/db-export/ex describe('Table Export service', () => { let convertToCSVServiceStub - let compressFilesServiceStub - let deleteFileServiceStub let exportDataFilesServiceStub let fetchTableServiceStub @@ -29,8 +25,6 @@ describe('Table Export service', () => { fetchTableServiceStub = Sinon.stub(FetchTableService, 'go').resolves({ headers: [], rows: [] }) convertToCSVServiceStub = Sinon.stub(ConvertToCSVService, 'go').resolves('csvData') exportDataFilesServiceStub = Sinon.stub(ExportDataFilesService, 'go').resolves('filePath') - compressFilesServiceStub = Sinon.stub(CompressFilesService, 'go').resolves('compressedFilePath') - deleteFileServiceStub = Sinon.stub(DeleteFileService, 'go').resolves() }) afterEach(() => { @@ -41,8 +35,6 @@ describe('Table Export service', () => { await ExportCompressedTableService.go() expect(convertToCSVServiceStub.called).to.be.true() - expect(compressFilesServiceStub.called).to.be.true() - expect(deleteFileServiceStub.called).to.be.true() expect(exportDataFilesServiceStub.called).to.be.true() expect(fetchTableServiceStub.called).to.be.true() }) diff --git a/test/services/db-export/send-to-s3-bucket.service.test.js b/test/services/db-export/send-to-s3-bucket.service.test.js index 0203c2b301..d8efaeba33 100644 --- a/test/services/db-export/send-to-s3-bucket.service.test.js +++ b/test/services/db-export/send-to-s3-bucket.service.test.js @@ -27,10 +27,10 @@ describe('Send to S3 bucket service', () => { Sinon.restore() }) - const folderPath = 'test/fixtures' + const filePath = 'test/fixtures/compress-files.service.csv' it('uploads a file to the S3 bucket', async () => { - await SendToS3BucketService.go(folderPath) + await SendToS3BucketService.go(filePath) // Test that the S3 Client was called once expect(s3Stub.calledOnce).to.be.true() @@ -41,21 +41,21 @@ describe('Send to S3 bucket service', () => { }) it('returns true', async () => { - const result = await SendToS3BucketService.go(folderPath) + const result = await SendToS3BucketService.go(filePath) expect(result).to.be.true() }) }) describe('when unsuccessful', () => { - describe('because an invalid folder name is given', () => { - const folderName = 'FakeFolder' + describe('because an invalid file name is given', () => { + const fileName = 'FakeFolder' it('throws an error', async () => { - const result = await expect(SendToS3BucketService.go(folderName)).to.reject() + const result = await expect(SendToS3BucketService.go(fileName)).to.reject() expect(result).to.be.an.error() - expect(result.message).to.equal(`ENOENT: no such file or directory, scandir '${folderName}'`) + expect(result.message).to.startWith('ENOENT') }) }) @@ -65,10 +65,10 @@ describe('Send to S3 bucket service', () => { s3Stub = Sinon.stub(S3Client.prototype, 'send').rejects() }) - const folderPath = 'test/fixtures' + const filePath = 'test/fixtures/compress-files.service.csv' it('returns false', async () => { - const result = await SendToS3BucketService.go(folderPath) + const result = await SendToS3BucketService.go(filePath) expect(result).to.be.false() }) From 0c3b0e3ff4cca08f5a5111b80ac1ed74b46dc4d4 Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Wed, 31 May 2023 21:46:17 +0100 Subject: [PATCH 03/14] Fix code bugs --- app/services/db-export/schema-export.service.js | 2 +- app/services/db-export/send-to-s3-bucket.service.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/app/services/db-export/schema-export.service.js b/app/services/db-export/schema-export.service.js index 22fee6463a..2b6d3f4f4f 100644 --- a/app/services/db-export/schema-export.service.js +++ b/app/services/db-export/schema-export.service.js @@ -29,7 +29,7 @@ async function go (schemaName) { await ExportCompressedTableService.go(tableName, schemaFolderPath, schemaName) } - const tarSchemaPath = await _createTarFile(schemaName, schemaFolderPath) + const tarSchemaPath = await _createTarFile(schemaFolderPath) await SendToS3BucketService.go(tarSchemaPath) diff --git a/app/services/db-export/send-to-s3-bucket.service.js b/app/services/db-export/send-to-s3-bucket.service.js index 712141bc99..dfb78b6f77 100644 --- a/app/services/db-export/send-to-s3-bucket.service.js +++ b/app/services/db-export/send-to-s3-bucket.service.js @@ -27,7 +27,7 @@ async function go (filePath) { Body: fileContent } - return _uploadToBucket(params, fileName) + return _uploadToBucket(params) } /** From 927cdd6033314b8eb3546e62a70f71ceb640cfd1 Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Thu, 1 Jun 2023 10:52:47 +0100 Subject: [PATCH 04/14] Make tarball its own service --- .../db-export/compressed-tarball.service.js | 29 ++++++++ .../db-export/fetch-table-names.service.js | 2 +- .../db-export/schema-export.service.js | 25 +------ .../db-export/schema-export.service.test.js | 73 +++++++++++++++++++ 4 files changed, 107 insertions(+), 22 deletions(-) create mode 100644 app/services/db-export/compressed-tarball.service.js create mode 100644 test/services/db-export/schema-export.service.test.js diff --git a/app/services/db-export/compressed-tarball.service.js b/app/services/db-export/compressed-tarball.service.js new file mode 100644 index 0000000000..bed05b29b6 --- /dev/null +++ b/app/services/db-export/compressed-tarball.service.js @@ -0,0 +1,29 @@ +'use strict' + +/** + * Creates a compressed tarball (.tgz) from a given schema folder + * @module CompressedTarballService + */ + +const tar = require('tar') + +/** + * Create a compressed tarball (.tgz) from a given schema folder + * @param {String} schemaFolderPath + * + * @returns {String} The path to the created tarball file + */ +async function go (schemaFolderPath) { + await tar.create( + { + gzip: true, + file: `${schemaFolderPath}.tgz` + }, + [schemaFolderPath] + ) + return `${schemaFolderPath}.tgz` +} + +module.exports = { + go +} diff --git a/app/services/db-export/fetch-table-names.service.js b/app/services/db-export/fetch-table-names.service.js index 7e3db83890..a8846abfef 100644 --- a/app/services/db-export/fetch-table-names.service.js +++ b/app/services/db-export/fetch-table-names.service.js @@ -2,7 +2,7 @@ /** * Fetches all the table names from a given schema - * @module FetchTableNames + * @module FetchTableNamesService */ const { db } = require('../../../db/db.js') diff --git a/app/services/db-export/schema-export.service.js b/app/services/db-export/schema-export.service.js index 2b6d3f4f4f..32e564e7ec 100644 --- a/app/services/db-export/schema-export.service.js +++ b/app/services/db-export/schema-export.service.js @@ -7,11 +7,11 @@ const path = require('path') const os = require('os') -const tar = require('tar') +const CompressedTarballService = require('../db-export/compressed-tarball.service.js') const DeleteFolderService = require('./delete-folder.service.js') const ExportCompressedTableService = require('./export-compressed-table.service.js') -const FetchTableNames = require('../db-export/fetch-table-names.service.js') +const FetchTableNamesService = require('../db-export/fetch-table-names.service.js') const SendToS3BucketService = require('../db-export/send-to-s3-bucket.service.js') /** @@ -21,7 +21,7 @@ const SendToS3BucketService = require('../db-export/send-to-s3-bucket.service.js * @param {String} schemaName The name of the database to export */ async function go (schemaName) { - const tableNames = await FetchTableNames.go(schemaName) + const tableNames = await FetchTableNamesService.go(schemaName) const schemaFolderPath = _folderToUpload(schemaName) @@ -29,7 +29,7 @@ async function go (schemaName) { await ExportCompressedTableService.go(tableName, schemaFolderPath, schemaName) } - const tarSchemaPath = await _createTarFile(schemaFolderPath) + const tarSchemaPath = await CompressedTarballService.go(schemaFolderPath) await SendToS3BucketService.go(tarSchemaPath) @@ -49,23 +49,6 @@ function _folderToUpload (schemaName) { return path.join(temporaryFilePath, schemaName) } -/** - * Create a compressed tarball (.tgz) from a given schema folder - * @param {String} schemaFolderPath - * - * @returns {String} The path to the created tarball file - */ -async function _createTarFile (schemaFolderPath) { - await tar.create( - { - gzip: true, - file: `${schemaFolderPath}.tgz` - }, - [schemaFolderPath] - ) - return `${schemaFolderPath}.tgz` -} - module.exports = { go } diff --git a/test/services/db-export/schema-export.service.test.js b/test/services/db-export/schema-export.service.test.js new file mode 100644 index 0000000000..369298efc1 --- /dev/null +++ b/test/services/db-export/schema-export.service.test.js @@ -0,0 +1,73 @@ +'use strict' + +// Test framework dependencies +const Lab = require('@hapi/lab') +const Code = require('@hapi/code') +const Sinon = require('sinon') + +const { describe, it, beforeEach, afterEach } = exports.lab = Lab.script() +const { expect } = Code + +// Things we need to stub +const CompressedTarBallService = require('../../../app/services/db-export/compressed-tarball.service.js') +const DeleteFolderService = require('../../../app/services/db-export/delete-folder.service.js') +const ExportCompressedTableService = require('../../../app/services/db-export/export-compressed-table.service.js') +const FetchTableNamesService = require('../../../app/services/db-export/fetch-table-names.service.js') +const SendToS3BucketService = require('../../../app/services/db-export/send-to-s3-bucket.service.js') + +// Thing under test +const SchemaExportService = require('../../../app/services/db-export/schema-export.service.js') + +describe('Schema export service', () => { + let FetchTableNamesServiceStub + let CompressedTarballServiceStub + let SendToS3BucketServiceStub + let DeleteFolderServiceStub + let ExportCompressedTableServiceStub + + beforeEach(() => { + FetchTableNamesServiceStub = Sinon.stub(FetchTableNamesService, 'go').resolves([]) + CompressedTarballServiceStub = Sinon.stub(CompressedTarBallService, 'go').resolves('/tmp/water') + SendToS3BucketServiceStub = Sinon.stub(SendToS3BucketService, 'go').resolves() + DeleteFolderServiceStub = Sinon.stub(DeleteFolderService, 'go').resolves() + ExportCompressedTableServiceStub = Sinon.stub(ExportCompressedTableService, 'go').resolves() + }) + + afterEach(() => { + Sinon.restore() + }) + + it('calls the different services that export a schema', async () => { + await SchemaExportService.go('water') + + expect(FetchTableNamesServiceStub.called).to.be.true() + expect(CompressedTarballServiceStub.called).to.be.true() + expect(SendToS3BucketServiceStub.called).to.be.true() + expect(DeleteFolderServiceStub.called).to.be.true() + }) + + it('calls the ExportCompressedTableService with the different table names as arguments', async () => { + const tableNames = [] + + await SchemaExportService.go('water') + + const allArgs = ExportCompressedTableServiceStub.getCalls().flatMap((call) => { + return call.args + }) + + expect(allArgs).to.equal(tableNames) + }) + + it('creates a folder name for the schema table files to be saved in', async () => { + const schemaName = 'water' + const expectedFolderPath = ['/tmp/water'] + + await SchemaExportService.go(schemaName) + + const args = SendToS3BucketServiceStub.getCalls().flatMap((call) => { + return call.args + }) + + expect(args).to.equal(expectedFolderPath) + }) +}) From 38387ed40d093b14015fe76373f9837de8d50b40 Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Thu, 1 Jun 2023 11:29:02 +0100 Subject: [PATCH 05/14] Add testing for tarball service --- .../compressed-tarball.service.test.js | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 test/services/db-export/compressed-tarball.service.test.js diff --git a/test/services/db-export/compressed-tarball.service.test.js b/test/services/db-export/compressed-tarball.service.test.js new file mode 100644 index 0000000000..f2e9b7f0a6 --- /dev/null +++ b/test/services/db-export/compressed-tarball.service.test.js @@ -0,0 +1,37 @@ +'use strict' + +// Test framework dependencies +const Lab = require('@hapi/lab') +const Code = require('@hapi/code') +const Sinon = require('sinon') + +const { describe, it, beforeEach, afterEach } = exports.lab = Lab.script() +const { expect } = Code + +// Things we need to stub +const tar = require('tar') + +// Thing under test +const CompressedTarBallService = require('../../../app/services/db-export/compressed-tarball.service') + +describe('Compressed tarball service', () => { + let tarCreateStub + + beforeEach(() => { + tarCreateStub = Sinon.stub(tar, 'create').resolves() + }) + + afterEach(() => { + Sinon.restore() + }) + + it('creates a compressed tarball from the given schema folder', async () => { + const schemaFolderPath = '/tmp/water' + const expectedTarballPath = '/tmp/water.tgz' + + const result = await CompressedTarBallService.go(schemaFolderPath) + + expect(tarCreateStub.calledOnce).to.be.true() + expect(result).to.equal(expectedTarballPath) + }) +}) From 6fa4e0948a5a232e91c879f6223604ed95680d64 Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Thu, 1 Jun 2023 16:55:21 +0100 Subject: [PATCH 06/14] Code Refactor --- ...ce.js => compress-schema-folder.service.js} | 6 ++++-- ...able.service.js => export-table.service.js} | 2 +- .../db-export/schema-export.service.js | 8 ++++---- .../db-export/send-to-s3-bucket.service.js | 6 +++--- ... => compress-schema-folder.service.test.js} | 6 +++--- ...ce.test.js => export-table.service.test.js} | 4 ++-- .../db-export/schema-export.service.test.js | 18 +++++++++--------- 7 files changed, 26 insertions(+), 24 deletions(-) rename app/services/db-export/{compressed-tarball.service.js => compress-schema-folder.service.js} (83%) rename app/services/db-export/{export-compressed-table.service.js => export-table.service.js} (96%) rename test/services/db-export/{compressed-tarball.service.test.js => compress-schema-folder.service.test.js} (75%) rename test/services/db-export/{export-compressed-table.service.test.js => export-table.service.test.js} (89%) diff --git a/app/services/db-export/compressed-tarball.service.js b/app/services/db-export/compress-schema-folder.service.js similarity index 83% rename from app/services/db-export/compressed-tarball.service.js rename to app/services/db-export/compress-schema-folder.service.js index bed05b29b6..593e7fe89e 100644 --- a/app/services/db-export/compressed-tarball.service.js +++ b/app/services/db-export/compress-schema-folder.service.js @@ -2,7 +2,7 @@ /** * Creates a compressed tarball (.tgz) from a given schema folder - * @module CompressedTarballService + * @module CompressSchemaFolderService */ const tar = require('tar') @@ -14,10 +14,12 @@ const tar = require('tar') * @returns {String} The path to the created tarball file */ async function go (schemaFolderPath) { + const file = `${schemaFolderPath}.tgz` + await tar.create( { gzip: true, - file: `${schemaFolderPath}.tgz` + file }, [schemaFolderPath] ) diff --git a/app/services/db-export/export-compressed-table.service.js b/app/services/db-export/export-table.service.js similarity index 96% rename from app/services/db-export/export-compressed-table.service.js rename to app/services/db-export/export-table.service.js index dd047dd434..d2a4bafd2a 100644 --- a/app/services/db-export/export-compressed-table.service.js +++ b/app/services/db-export/export-table.service.js @@ -3,7 +3,7 @@ /** * Exports a table from the db, converts it to CSV format and saves it to a file * - * @module ExportCompressedTableService + * @module ExportTableService */ const ConvertToCSVService = require('./convert-to-csv.service.js') diff --git a/app/services/db-export/schema-export.service.js b/app/services/db-export/schema-export.service.js index 32e564e7ec..f23861035b 100644 --- a/app/services/db-export/schema-export.service.js +++ b/app/services/db-export/schema-export.service.js @@ -8,9 +8,9 @@ const path = require('path') const os = require('os') -const CompressedTarballService = require('../db-export/compressed-tarball.service.js') +const CompressSchemaFolderService = require('../db-export/compress-schema-folder.service.js') const DeleteFolderService = require('./delete-folder.service.js') -const ExportCompressedTableService = require('./export-compressed-table.service.js') +const ExportTableService = require('./export-table.service.js') const FetchTableNamesService = require('../db-export/fetch-table-names.service.js') const SendToS3BucketService = require('../db-export/send-to-s3-bucket.service.js') @@ -26,10 +26,10 @@ async function go (schemaName) { const schemaFolderPath = _folderToUpload(schemaName) for (const tableName of tableNames) { - await ExportCompressedTableService.go(tableName, schemaFolderPath, schemaName) + await ExportTableService.go(tableName, schemaFolderPath, schemaName) } - const tarSchemaPath = await CompressedTarballService.go(schemaFolderPath) + const tarSchemaPath = await CompressSchemaFolderService.go(schemaFolderPath) await SendToS3BucketService.go(tarSchemaPath) diff --git a/app/services/db-export/send-to-s3-bucket.service.js b/app/services/db-export/send-to-s3-bucket.service.js index dfb78b6f77..cb5e487ce8 100644 --- a/app/services/db-export/send-to-s3-bucket.service.js +++ b/app/services/db-export/send-to-s3-bucket.service.js @@ -5,7 +5,7 @@ * @module SendToS3BucketService */ -const fs = require('fs') +const fsPromises = require('fs').promises const path = require('path') const { PutObjectCommand, S3Client } = require('@aws-sdk/client-s3') const S3Config = require('../../../config/s3.config.js') @@ -20,14 +20,14 @@ const S3Config = require('../../../config/s3.config.js') async function go (filePath) { const bucketName = S3Config.s3.bucket const fileName = path.basename(filePath) - const fileContent = fs.readFileSync(filePath) + const fileContent = await fsPromises.readFile(filePath) const params = { Bucket: bucketName, Key: `export/${fileName}`, Body: fileContent } - return _uploadToBucket(params) + return await _uploadToBucket(params) } /** diff --git a/test/services/db-export/compressed-tarball.service.test.js b/test/services/db-export/compress-schema-folder.service.test.js similarity index 75% rename from test/services/db-export/compressed-tarball.service.test.js rename to test/services/db-export/compress-schema-folder.service.test.js index f2e9b7f0a6..060cbb8824 100644 --- a/test/services/db-export/compressed-tarball.service.test.js +++ b/test/services/db-export/compress-schema-folder.service.test.js @@ -12,9 +12,9 @@ const { expect } = Code const tar = require('tar') // Thing under test -const CompressedTarBallService = require('../../../app/services/db-export/compressed-tarball.service') +const CompressSchemaFolderService = require('../../../app/services/db-export/compress-schema-folder.service.js') -describe('Compressed tarball service', () => { +describe('Compressed schema folder service', () => { let tarCreateStub beforeEach(() => { @@ -29,7 +29,7 @@ describe('Compressed tarball service', () => { const schemaFolderPath = '/tmp/water' const expectedTarballPath = '/tmp/water.tgz' - const result = await CompressedTarBallService.go(schemaFolderPath) + const result = await CompressSchemaFolderService.go(schemaFolderPath) expect(tarCreateStub.calledOnce).to.be.true() expect(result).to.equal(expectedTarballPath) diff --git a/test/services/db-export/export-compressed-table.service.test.js b/test/services/db-export/export-table.service.test.js similarity index 89% rename from test/services/db-export/export-compressed-table.service.test.js rename to test/services/db-export/export-table.service.test.js index 381c02dd77..200864c06a 100644 --- a/test/services/db-export/export-compressed-table.service.test.js +++ b/test/services/db-export/export-table.service.test.js @@ -14,7 +14,7 @@ const ExportDataFilesService = require('../../../app/services/db-export/export-d const FetchTableService = require('../../../app/services/db-export/fetch-table.service.js') // Thing under test -const ExportCompressedTableService = require('../../../app/services/db-export/export-compressed-table.service.js') +const ExportTableService = require('../../../app/services/db-export/export-table.service.js') describe('Table Export service', () => { let convertToCSVServiceStub @@ -32,7 +32,7 @@ describe('Table Export service', () => { }) it('runs the db export services', async () => { - await ExportCompressedTableService.go() + await ExportTableService.go() expect(convertToCSVServiceStub.called).to.be.true() expect(exportDataFilesServiceStub.called).to.be.true() diff --git a/test/services/db-export/schema-export.service.test.js b/test/services/db-export/schema-export.service.test.js index 369298efc1..b2c3588b7e 100644 --- a/test/services/db-export/schema-export.service.test.js +++ b/test/services/db-export/schema-export.service.test.js @@ -9,9 +9,9 @@ const { describe, it, beforeEach, afterEach } = exports.lab = Lab.script() const { expect } = Code // Things we need to stub -const CompressedTarBallService = require('../../../app/services/db-export/compressed-tarball.service.js') +const CompressSchemaFolderService = require('../../../app/services/db-export/compress-schema-folder.service.js') const DeleteFolderService = require('../../../app/services/db-export/delete-folder.service.js') -const ExportCompressedTableService = require('../../../app/services/db-export/export-compressed-table.service.js') +const ExportTableService = require('../../../app/services/db-export/export-table.service.js') const FetchTableNamesService = require('../../../app/services/db-export/fetch-table-names.service.js') const SendToS3BucketService = require('../../../app/services/db-export/send-to-s3-bucket.service.js') @@ -20,17 +20,17 @@ const SchemaExportService = require('../../../app/services/db-export/schema-expo describe('Schema export service', () => { let FetchTableNamesServiceStub - let CompressedTarballServiceStub + let CompressSchemaFolderServiceStub let SendToS3BucketServiceStub let DeleteFolderServiceStub - let ExportCompressedTableServiceStub + let ExportTableServiceStub beforeEach(() => { FetchTableNamesServiceStub = Sinon.stub(FetchTableNamesService, 'go').resolves([]) - CompressedTarballServiceStub = Sinon.stub(CompressedTarBallService, 'go').resolves('/tmp/water') + CompressSchemaFolderServiceStub = Sinon.stub(CompressSchemaFolderService, 'go').resolves('/tmp/water') SendToS3BucketServiceStub = Sinon.stub(SendToS3BucketService, 'go').resolves() DeleteFolderServiceStub = Sinon.stub(DeleteFolderService, 'go').resolves() - ExportCompressedTableServiceStub = Sinon.stub(ExportCompressedTableService, 'go').resolves() + ExportTableServiceStub = Sinon.stub(ExportTableService, 'go').resolves() }) afterEach(() => { @@ -41,17 +41,17 @@ describe('Schema export service', () => { await SchemaExportService.go('water') expect(FetchTableNamesServiceStub.called).to.be.true() - expect(CompressedTarballServiceStub.called).to.be.true() + expect(CompressSchemaFolderServiceStub.called).to.be.true() expect(SendToS3BucketServiceStub.called).to.be.true() expect(DeleteFolderServiceStub.called).to.be.true() }) - it('calls the ExportCompressedTableService with the different table names as arguments', async () => { + it('calls the ExportTableService with the different table names as arguments', async () => { const tableNames = [] await SchemaExportService.go('water') - const allArgs = ExportCompressedTableServiceStub.getCalls().flatMap((call) => { + const allArgs = ExportTableServiceStub.getCalls().flatMap((call) => { return call.args }) From e56871c66745eff9fe066d15818c552a8b873d33 Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Thu, 1 Jun 2023 20:31:41 +0100 Subject: [PATCH 07/14] Code refactor --- app/services/db-export/compress-schema-folder.service.js | 2 +- test/services/db-export/compress-schema-folder.service.test.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/app/services/db-export/compress-schema-folder.service.js b/app/services/db-export/compress-schema-folder.service.js index 593e7fe89e..71e6bc6350 100644 --- a/app/services/db-export/compress-schema-folder.service.js +++ b/app/services/db-export/compress-schema-folder.service.js @@ -23,7 +23,7 @@ async function go (schemaFolderPath) { }, [schemaFolderPath] ) - return `${schemaFolderPath}.tgz` + return file } module.exports = { diff --git a/test/services/db-export/compress-schema-folder.service.test.js b/test/services/db-export/compress-schema-folder.service.test.js index 060cbb8824..9eab2b2c4c 100644 --- a/test/services/db-export/compress-schema-folder.service.test.js +++ b/test/services/db-export/compress-schema-folder.service.test.js @@ -14,7 +14,7 @@ const tar = require('tar') // Thing under test const CompressSchemaFolderService = require('../../../app/services/db-export/compress-schema-folder.service.js') -describe('Compressed schema folder service', () => { +describe('Compress schema folder service', () => { let tarCreateStub beforeEach(() => { From 70cc5ae1d18132cd4f7503a07334b692fc7aad5e Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Thu, 1 Jun 2023 20:35:16 +0100 Subject: [PATCH 08/14] Adding line break --- app/services/db-export/send-to-s3-bucket.service.js | 1 + 1 file changed, 1 insertion(+) diff --git a/app/services/db-export/send-to-s3-bucket.service.js b/app/services/db-export/send-to-s3-bucket.service.js index cb5e487ce8..d73dc84da7 100644 --- a/app/services/db-export/send-to-s3-bucket.service.js +++ b/app/services/db-export/send-to-s3-bucket.service.js @@ -8,6 +8,7 @@ const fsPromises = require('fs').promises const path = require('path') const { PutObjectCommand, S3Client } = require('@aws-sdk/client-s3') + const S3Config = require('../../../config/s3.config.js') /** From 8a371425a736c9f845e4cd940b4d925b8d3d0526 Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Thu, 1 Jun 2023 20:56:26 +0100 Subject: [PATCH 09/14] Adding error handling to the top service --- .../compress-schema-folder.service.js | 1 + .../db-export/schema-export.service.js | 24 +++++++++++-------- .../db-export/send-to-s3-bucket.service.js | 11 +++------ 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/app/services/db-export/compress-schema-folder.service.js b/app/services/db-export/compress-schema-folder.service.js index 71e6bc6350..275470ee60 100644 --- a/app/services/db-export/compress-schema-folder.service.js +++ b/app/services/db-export/compress-schema-folder.service.js @@ -23,6 +23,7 @@ async function go (schemaFolderPath) { }, [schemaFolderPath] ) + return file } diff --git a/app/services/db-export/schema-export.service.js b/app/services/db-export/schema-export.service.js index f23861035b..f5706dfc94 100644 --- a/app/services/db-export/schema-export.service.js +++ b/app/services/db-export/schema-export.service.js @@ -9,6 +9,7 @@ const path = require('path') const os = require('os') const CompressSchemaFolderService = require('../db-export/compress-schema-folder.service.js') +const DeleteFileService = require('./delete-file.service.js') const DeleteFolderService = require('./delete-folder.service.js') const ExportTableService = require('./export-table.service.js') const FetchTableNamesService = require('../db-export/fetch-table-names.service.js') @@ -21,19 +22,22 @@ const SendToS3BucketService = require('../db-export/send-to-s3-bucket.service.js * @param {String} schemaName The name of the database to export */ async function go (schemaName) { - const tableNames = await FetchTableNamesService.go(schemaName) - - const schemaFolderPath = _folderToUpload(schemaName) - - for (const tableName of tableNames) { - await ExportTableService.go(tableName, schemaFolderPath, schemaName) + try { + const tableNames = await FetchTableNamesService.go(schemaName) + const schemaFolderPath = _folderToUpload(schemaName) + + for (const tableName of tableNames) { + await ExportTableService.go(tableName, schemaFolderPath, schemaName) + } + + const tarSchemaPath = await CompressSchemaFolderService.go(schemaFolderPath) + await SendToS3BucketService.go(tarSchemaPath) + } catch (error) { + global.GlobalNotifier.omfg(`Error: Failed to export schema ${schemaName}`, error.message) } - const tarSchemaPath = await CompressSchemaFolderService.go(schemaFolderPath) - - await SendToS3BucketService.go(tarSchemaPath) - await DeleteFolderService.go(schemaFolderPath) + await DeleteFileService.go(tarSchemaPath) } /** diff --git a/app/services/db-export/send-to-s3-bucket.service.js b/app/services/db-export/send-to-s3-bucket.service.js index d73dc84da7..b4e37e6245 100644 --- a/app/services/db-export/send-to-s3-bucket.service.js +++ b/app/services/db-export/send-to-s3-bucket.service.js @@ -28,7 +28,7 @@ async function go (filePath) { Body: fileContent } - return await _uploadToBucket(params) + await _uploadToBucket(params) } /** @@ -43,14 +43,9 @@ async function _uploadToBucket (params) { const s3Client = new S3Client() const command = new PutObjectCommand(params) - try { - await s3Client.send(command) - - return true - } catch (error) { - return false - } + await s3Client.send(command) } + module.exports = { go } From 1817518af5b67cc8807a0b4c09d6433081a3b91f Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Thu, 1 Jun 2023 21:21:44 +0100 Subject: [PATCH 10/14] Fixing error handling and tests --- app/services/db-export/delete-file.service.js | 4 +- .../db-export/schema-export.service.js | 4 +- .../db-export/send-to-s3-bucket.service.js | 7 ++- .../db-export/delete-file.service.test.js | 9 ++-- .../send-to-s3-bucket.service.test.js | 44 +++++-------------- 5 files changed, 26 insertions(+), 42 deletions(-) diff --git a/app/services/db-export/delete-file.service.js b/app/services/db-export/delete-file.service.js index dd3a2c81d2..6ee0c2882f 100644 --- a/app/services/db-export/delete-file.service.js +++ b/app/services/db-export/delete-file.service.js @@ -12,7 +12,9 @@ const fs = require('fs') * @param {String} filePath The file path that we want to delete */ async function go (filePath) { - fs.unlinkSync(filePath) + if (fs.existsSync(filePath)) { + fs.unlinkSync(filePath) + } } module.exports = { diff --git a/app/services/db-export/schema-export.service.js b/app/services/db-export/schema-export.service.js index f5706dfc94..785b4932f4 100644 --- a/app/services/db-export/schema-export.service.js +++ b/app/services/db-export/schema-export.service.js @@ -22,9 +22,11 @@ const SendToS3BucketService = require('../db-export/send-to-s3-bucket.service.js * @param {String} schemaName The name of the database to export */ async function go (schemaName) { + const schemaFolderPath = _folderToUpload(schemaName) + let tarSchemaPath + try { const tableNames = await FetchTableNamesService.go(schemaName) - const schemaFolderPath = _folderToUpload(schemaName) for (const tableName of tableNames) { await ExportTableService.go(tableName, schemaFolderPath, schemaName) diff --git a/app/services/db-export/send-to-s3-bucket.service.js b/app/services/db-export/send-to-s3-bucket.service.js index b4e37e6245..6f00db8a52 100644 --- a/app/services/db-export/send-to-s3-bucket.service.js +++ b/app/services/db-export/send-to-s3-bucket.service.js @@ -5,7 +5,8 @@ * @module SendToS3BucketService */ -const fsPromises = require('fs').promises +const fs = require('fs') +const fsPromises = fs.promises const path = require('path') const { PutObjectCommand, S3Client } = require('@aws-sdk/client-s3') @@ -19,6 +20,10 @@ const S3Config = require('../../../config/s3.config.js') * @returns {Boolean} True if the file is uploaded successfully and false if not */ async function go (filePath) { + if (!fs.existsSync(filePath)) { + throw new Error() + } + const bucketName = S3Config.s3.bucket const fileName = path.basename(filePath) const fileContent = await fsPromises.readFile(filePath) diff --git a/test/services/db-export/delete-file.service.test.js b/test/services/db-export/delete-file.service.test.js index b3002ef065..9fd3c32c67 100644 --- a/test/services/db-export/delete-file.service.test.js +++ b/test/services/db-export/delete-file.service.test.js @@ -41,14 +41,11 @@ describe('Delete File service', () => { }) }) - describe('When an error occurs', () => { - it('throws an error', async () => { + describe('When a file does not exist', () => { + it('returns without throwing an error', async () => { const fakeFile = 'FAKE_FILE' - const result = await expect(DeleteFileService.go(fakeFile)).to.reject() - - expect(result).to.be.an.error() - expect(result.message).to.equal(`ENOENT: no such file or directory, unlink '${fakeFile}'`) + await expect(DeleteFileService.go(fakeFile)).not.to.reject() }) }) }) diff --git a/test/services/db-export/send-to-s3-bucket.service.test.js b/test/services/db-export/send-to-s3-bucket.service.test.js index d8efaeba33..c5f20d515e 100644 --- a/test/services/db-export/send-to-s3-bucket.service.test.js +++ b/test/services/db-export/send-to-s3-bucket.service.test.js @@ -17,16 +17,16 @@ const SendToS3BucketService = require('../../../app/services/db-export/send-to-s describe('Send to S3 bucket service', () => { let s3Stub - describe('when successful', () => { - beforeEach(() => { - // Stub the S3 Client's send method, which is used to run the 'put object' command - s3Stub = Sinon.stub(S3Client.prototype, 'send') - }) + beforeEach(() => { + // Stub the S3 Client's send method, which is used to run the 'put object' command + s3Stub = Sinon.stub(S3Client.prototype, 'send') + }) - afterEach(() => { - Sinon.restore() - }) + afterEach(() => { + Sinon.restore() + }) + describe('when successful', () => { const filePath = 'test/fixtures/compress-files.service.csv' it('uploads a file to the S3 bucket', async () => { @@ -39,38 +39,16 @@ describe('Send to S3 bucket service', () => { const calledCommand = s3Stub.getCall(0).firstArg expect(calledCommand).to.be.an.instanceof(PutObjectCommand) }) - - it('returns true', async () => { - const result = await SendToS3BucketService.go(filePath) - - expect(result).to.be.true() - }) }) describe('when unsuccessful', () => { describe('because an invalid file name is given', () => { const fileName = 'FakeFolder' - it('throws an error', async () => { - const result = await expect(SendToS3BucketService.go(fileName)).to.reject() - - expect(result).to.be.an.error() - expect(result.message).to.startWith('ENOENT') - }) - }) - - describe('because there is an issue with the upload', () => { - beforeEach(() => { - // Stub the S3 Client's send method, which is used to run the 'put object' command - s3Stub = Sinon.stub(S3Client.prototype, 'send').rejects() - }) - - const filePath = 'test/fixtures/compress-files.service.csv' - - it('returns false', async () => { - const result = await SendToS3BucketService.go(filePath) + it('does not upload a file to the S3 bucket', async () => { + await expect(SendToS3BucketService.go(fileName)).to.reject() - expect(result).to.be.false() + expect(s3Stub.called).to.be.false() }) }) }) From ac61d2d445ee389195496a21468c478f1ac0cd0a Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Thu, 1 Jun 2023 22:24:01 +0100 Subject: [PATCH 11/14] Fixed error handling for clean up services and added tests for schema export service --- .../db-export/delete-folder.service.js | 2 +- .../db-export/delete-folder.service.test.js | 9 +- .../fetch-table-names.service.test.js | 6 +- .../db-export/schema-export.service.test.js | 103 +++++++++++++----- 4 files changed, 80 insertions(+), 40 deletions(-) diff --git a/app/services/db-export/delete-folder.service.js b/app/services/db-export/delete-folder.service.js index 725153a220..9dcd29e9d5 100644 --- a/app/services/db-export/delete-folder.service.js +++ b/app/services/db-export/delete-folder.service.js @@ -12,7 +12,7 @@ const fsPromises = require('fs').promises * @param {String} folderPath The folder path that we want to delete */ async function go (folderPath) { - await fsPromises.rm(folderPath, { recursive: true }) + await fsPromises.rm(folderPath, { recursive: true, force: true }) } module.exports = { diff --git a/test/services/db-export/delete-folder.service.test.js b/test/services/db-export/delete-folder.service.test.js index 12610335f4..7540fbbba2 100644 --- a/test/services/db-export/delete-folder.service.test.js +++ b/test/services/db-export/delete-folder.service.test.js @@ -50,14 +50,11 @@ describe('Delete Folder service', () => { }) }) - describe('When an error occurs', () => { - it('throws an error', async () => { + describe('When a folder does not exist', () => { + it('returns without throwing an error', async () => { const fakeFolder = 'FAKE_FILE' - const result = await expect(DeleteFolderService.go(fakeFolder)).to.reject() - - expect(result).to.be.an.error() - expect(result.message).to.startsWith('ENOENT') + await expect(DeleteFolderService.go(fakeFolder)).not.to.reject() }) }) }) diff --git a/test/services/db-export/fetch-table-names.service.test.js b/test/services/db-export/fetch-table-names.service.test.js index 3bc120e17a..64f4f153f7 100644 --- a/test/services/db-export/fetch-table-names.service.test.js +++ b/test/services/db-export/fetch-table-names.service.test.js @@ -8,12 +8,12 @@ const { describe, it } = exports.lab = Lab.script() const { expect } = Code // Thing under test -const FetchTableNames = require('../../../app/services/db-export/fetch-table-names.service') +const FetchTableNamesService = require('../../../app/services/db-export/fetch-table-names.service') describe('Fetch table names', () => { describe('when given a schema name', () => { it('returns a list of the schemas table names', async () => { - const result = await FetchTableNames.go('water') + const result = await FetchTableNamesService.go('water') expect(result).to.include('billing_charge_categories') expect(result).to.include('charge_purposes') @@ -23,7 +23,7 @@ describe('Fetch table names', () => { describe('when not given a schema name', () => { it('throws an error', async () => { - const result = await expect(FetchTableNames.go()).to.reject() + const result = await expect(FetchTableNamesService.go()).to.reject() expect(result).to.be.an.error() expect(result.message).to.equal('Error: Unable to fetch table names') diff --git a/test/services/db-export/schema-export.service.test.js b/test/services/db-export/schema-export.service.test.js index b2c3588b7e..fc8cd49697 100644 --- a/test/services/db-export/schema-export.service.test.js +++ b/test/services/db-export/schema-export.service.test.js @@ -10,6 +10,7 @@ const { expect } = Code // Things we need to stub const CompressSchemaFolderService = require('../../../app/services/db-export/compress-schema-folder.service.js') +const DeleteFileService = require('../../../app/services/db-export/delete-file.service.js') const DeleteFolderService = require('../../../app/services/db-export/delete-folder.service.js') const ExportTableService = require('../../../app/services/db-export/export-table.service.js') const FetchTableNamesService = require('../../../app/services/db-export/fetch-table-names.service.js') @@ -24,50 +25,92 @@ describe('Schema export service', () => { let SendToS3BucketServiceStub let DeleteFolderServiceStub let ExportTableServiceStub + let DeleteFileServiceStub + + describe('when successful', () => { + beforeEach(() => { + FetchTableNamesServiceStub = Sinon.stub(FetchTableNamesService, 'go').resolves([]) + CompressSchemaFolderServiceStub = Sinon.stub(CompressSchemaFolderService, 'go').resolves('/tmp/water') + SendToS3BucketServiceStub = Sinon.stub(SendToS3BucketService, 'go').resolves() + DeleteFolderServiceStub = Sinon.stub(DeleteFolderService, 'go').resolves() + ExportTableServiceStub = Sinon.stub(ExportTableService, 'go').resolves() + DeleteFileServiceStub = Sinon.stub(DeleteFileService, 'go').resolves() + }) - beforeEach(() => { - FetchTableNamesServiceStub = Sinon.stub(FetchTableNamesService, 'go').resolves([]) - CompressSchemaFolderServiceStub = Sinon.stub(CompressSchemaFolderService, 'go').resolves('/tmp/water') - SendToS3BucketServiceStub = Sinon.stub(SendToS3BucketService, 'go').resolves() - DeleteFolderServiceStub = Sinon.stub(DeleteFolderService, 'go').resolves() - ExportTableServiceStub = Sinon.stub(ExportTableService, 'go').resolves() - }) + afterEach(() => { + Sinon.restore() + }) - afterEach(() => { - Sinon.restore() - }) + it('calls the different services that export a schema', async () => { + await SchemaExportService.go('water') - it('calls the different services that export a schema', async () => { - await SchemaExportService.go('water') + expect(FetchTableNamesServiceStub.called).to.be.true() + expect(CompressSchemaFolderServiceStub.called).to.be.true() + expect(SendToS3BucketServiceStub.called).to.be.true() + expect(DeleteFolderServiceStub.called).to.be.true() + expect(DeleteFileServiceStub.called).to.be.true() + }) - expect(FetchTableNamesServiceStub.called).to.be.true() - expect(CompressSchemaFolderServiceStub.called).to.be.true() - expect(SendToS3BucketServiceStub.called).to.be.true() - expect(DeleteFolderServiceStub.called).to.be.true() - }) + it('calls the ExportTableService with the different table names as arguments', async () => { + const tableNames = [] - it('calls the ExportTableService with the different table names as arguments', async () => { - const tableNames = [] + await SchemaExportService.go('water') - await SchemaExportService.go('water') + const allArgs = ExportTableServiceStub.getCalls().flatMap((call) => { + return call.args + }) - const allArgs = ExportTableServiceStub.getCalls().flatMap((call) => { - return call.args + expect(allArgs).to.equal(tableNames) }) - expect(allArgs).to.equal(tableNames) + it('creates a folder name for the schema table files to be saved in', async () => { + const schemaName = 'water' + const expectedFolderPath = ['/tmp/water'] + + await SchemaExportService.go(schemaName) + + const args = SendToS3BucketServiceStub.getCalls().flatMap((call) => { + return call.args + }) + + expect(args).to.equal(expectedFolderPath) + }) }) - it('creates a folder name for the schema table files to be saved in', async () => { - const schemaName = 'water' - const expectedFolderPath = ['/tmp/water'] + describe('when an error is thrown', () => { + let notifierStub + + beforeEach(() => { + notifierStub = { omg: Sinon.stub(), omfg: Sinon.stub() } + global.GlobalNotifier = notifierStub - await SchemaExportService.go(schemaName) + FetchTableNamesServiceStub = Sinon.stub(FetchTableNamesService, 'go') + SendToS3BucketServiceStub = Sinon.stub(SendToS3BucketService, 'go') + CompressSchemaFolderServiceStub = Sinon.stub(CompressSchemaFolderService, 'go') + DeleteFileServiceStub = Sinon.stub(DeleteFileService, 'go').resolves() + DeleteFolderServiceStub = Sinon.stub(DeleteFolderService, 'go').resolves() + }) - const args = SendToS3BucketServiceStub.getCalls().flatMap((call) => { - return call.args + afterEach(() => { + Sinon.restore() + delete global.GlobalNotifier }) - expect(args).to.equal(expectedFolderPath) + it('catches the error', async () => { + FetchTableNamesServiceStub.rejects(new Error()) + + await SchemaExportService.go('water') + + expect(notifierStub.omfg.calledWith(('Error: Failed to export schema water'))).to.be.true() + expect(SendToS3BucketServiceStub.called).to.be.false() + expect(CompressSchemaFolderServiceStub.called).to.be.false() + }) + + it('cleans up the files', async () => { + await SchemaExportService.go('water') + + expect(DeleteFileServiceStub.called).to.be.true() + expect(DeleteFolderServiceStub.called).to.be.true() + }) }) }) From a51d9f7c2b530d56f3c75812fae2ac19e2157886 Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Thu, 1 Jun 2023 22:35:42 +0100 Subject: [PATCH 12/14] Updated comment --- app/services/db-export/schema-export.service.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/app/services/db-export/schema-export.service.js b/app/services/db-export/schema-export.service.js index 785b4932f4..02b58530a4 100644 --- a/app/services/db-export/schema-export.service.js +++ b/app/services/db-export/schema-export.service.js @@ -16,8 +16,9 @@ const FetchTableNamesService = require('../db-export/fetch-table-names.service.j const SendToS3BucketService = require('../db-export/send-to-s3-bucket.service.js') /** - * Exports the specific schema by fetching table names, exporting each table, - * uploading the schema folder to an S3 bucket and finally deleting the folder + * Exports the specific schema by fetching table names, exporting each table to a schema folder + * converting the folder into a compressed tarball file and uploading this to the S3 bucket + * Finally deleting the schema folder and the schema.tgz file * * @param {String} schemaName The name of the database to export */ From 28a3bfaa3fb05eae6041050c2234eb06a8c7a119 Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Fri, 2 Jun 2023 12:25:19 +0100 Subject: [PATCH 13/14] Refactor code, add time logging --- app/controllers/data/data.controller.js | 2 +- .../compress-schema-folder.service.js | 1 + app/services/db-export/db-export.service.js | 14 +++++ app/services/db-export/delete-file.service.js | 22 -------- .../db-export/delete-files.service.js | 21 ++++++++ .../db-export/delete-folder.service.js | 20 -------- .../db-export/export-table.service.js | 12 ++--- .../db-export/schema-export.service.js | 31 ++++++----- .../db-export/send-to-s3-bucket.service.js | 12 +---- .../db-export/delete-file.service.test.js | 51 ------------------- ...e.test.js => delete-files.service.test.js} | 27 ++++++++-- .../db-export/schema-export.service.test.js | 18 +++---- 12 files changed, 87 insertions(+), 144 deletions(-) delete mode 100644 app/services/db-export/delete-file.service.js create mode 100644 app/services/db-export/delete-files.service.js delete mode 100644 app/services/db-export/delete-folder.service.js delete mode 100644 test/services/db-export/delete-file.service.test.js rename test/services/db-export/{delete-folder.service.test.js => delete-files.service.test.js} (59%) diff --git a/app/controllers/data/data.controller.js b/app/controllers/data/data.controller.js index 3b177aa5e3..5df15aea1b 100644 --- a/app/controllers/data/data.controller.js +++ b/app/controllers/data/data.controller.js @@ -25,7 +25,7 @@ async function tearDown (_request, h) { */ async function dbExport (_request, h) { try { - await DbExportService.go() + DbExportService.go() return h.response().code(204) } catch (error) { diff --git a/app/services/db-export/compress-schema-folder.service.js b/app/services/db-export/compress-schema-folder.service.js index 275470ee60..6715123025 100644 --- a/app/services/db-export/compress-schema-folder.service.js +++ b/app/services/db-export/compress-schema-folder.service.js @@ -9,6 +9,7 @@ const tar = require('tar') /** * Create a compressed tarball (.tgz) from a given schema folder + * * @param {String} schemaFolderPath * * @returns {String} The path to the created tarball file diff --git a/app/services/db-export/db-export.service.js b/app/services/db-export/db-export.service.js index 29a94ee80f..60b915bd9c 100644 --- a/app/services/db-export/db-export.service.js +++ b/app/services/db-export/db-export.service.js @@ -11,11 +11,25 @@ const SchemaExportService = require('../db-export/schema-export.service.js') * Calls SchemaExportService giving it a schemaName */ async function go () { + // Mark the start time for later logging + const startTime = process.hrtime.bigint() + const schemaNames = ['water', 'returns', 'crm', 'crm_v2', 'idm', 'permit'] for (const schemaName of schemaNames) { await SchemaExportService.go(schemaName) } + + // Log how long the process took + _calculateAndLogTime(startTime) +} + +function _calculateAndLogTime (startTime) { + const endTime = process.hrtime.bigint() + const timeTakenNs = endTime - startTime + const timeTakenMs = timeTakenNs / 1000000n + + global.GlobalNotifier.omg(`Time taken to export the db: ${timeTakenMs}ms`) } module.exports = { diff --git a/app/services/db-export/delete-file.service.js b/app/services/db-export/delete-file.service.js deleted file mode 100644 index 6ee0c2882f..0000000000 --- a/app/services/db-export/delete-file.service.js +++ /dev/null @@ -1,22 +0,0 @@ -'use strict' - -/** - * @module DeleteFileService - */ - -const fs = require('fs') - -/** - * Deleting a file - * - * @param {String} filePath The file path that we want to delete - */ -async function go (filePath) { - if (fs.existsSync(filePath)) { - fs.unlinkSync(filePath) - } -} - -module.exports = { - go -} diff --git a/app/services/db-export/delete-files.service.js b/app/services/db-export/delete-files.service.js new file mode 100644 index 0000000000..fc86ba03c5 --- /dev/null +++ b/app/services/db-export/delete-files.service.js @@ -0,0 +1,21 @@ +'use strict' + +/** + * Deletes a folder and its content or an individual file + * @module DeleteFilesService + */ + +const fsPromises = require('fs').promises + +/** + * Deletes a folder and its content or an individual file + * + * @param {String} path The folder or file path that we want to delete + */ +async function go (path) { + await fsPromises.rm(path, { recursive: true, force: true }) +} + +module.exports = { + go +} diff --git a/app/services/db-export/delete-folder.service.js b/app/services/db-export/delete-folder.service.js deleted file mode 100644 index 9dcd29e9d5..0000000000 --- a/app/services/db-export/delete-folder.service.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -/** - * @module DeleteFolderService - */ - -const fsPromises = require('fs').promises - -/** - * Deleting a folder and its content - * - * @param {String} folderPath The folder path that we want to delete - */ -async function go (folderPath) { - await fsPromises.rm(folderPath, { recursive: true, force: true }) -} - -module.exports = { - go -} diff --git a/app/services/db-export/export-table.service.js b/app/services/db-export/export-table.service.js index d2a4bafd2a..ac3b0ba7bd 100644 --- a/app/services/db-export/export-table.service.js +++ b/app/services/db-export/export-table.service.js @@ -2,7 +2,6 @@ /** * Exports a table from the db, converts it to CSV format and saves it to a file - * * @module ExportTableService */ @@ -11,8 +10,10 @@ const ExportDataFilesService = require('./export-data-files.service.js') const FetchTableService = require('./fetch-table.service.js') /** - * Exports the specific database table by fetching its data, converting it to CSV format, - * and exporting the data files to the provided schema folder path + * Exports a database table + * + * Exports the specific database table by fetching its data, converting it to CSV format, and exporting the data files + * to the provided schema folder path * * @param {String} tableName The name of the database table to export * @param {String} schemaFolderPath The folder path where the schema files are stored @@ -20,10 +21,9 @@ const FetchTableService = require('./fetch-table.service.js') */ async function go (tableName, schemaFolderPath, schemaName) { const data = await FetchTableService.go(tableName, schemaName) + const tableConvertedToCSV = ConvertToCSVService.go(data.headers, data.rows) - const tableConvertedToCsv = ConvertToCSVService.go(data.headers, data.rows) - - await ExportDataFilesService.go(tableConvertedToCsv, data.tableName, schemaFolderPath) + await ExportDataFilesService.go(tableConvertedToCSV, data.tableName, schemaFolderPath) } module.exports = { diff --git a/app/services/db-export/schema-export.service.js b/app/services/db-export/schema-export.service.js index 02b58530a4..a76826782d 100644 --- a/app/services/db-export/schema-export.service.js +++ b/app/services/db-export/schema-export.service.js @@ -5,26 +5,25 @@ * @module SchemaExportService */ -const path = require('path') const os = require('os') +const path = require('path') -const CompressSchemaFolderService = require('../db-export/compress-schema-folder.service.js') -const DeleteFileService = require('./delete-file.service.js') -const DeleteFolderService = require('./delete-folder.service.js') +const CompressSchemaFolderService = require('./compress-schema-folder.service.js') +const DeleteFilesService = require('./delete-files.service.js') const ExportTableService = require('./export-table.service.js') -const FetchTableNamesService = require('../db-export/fetch-table-names.service.js') -const SendToS3BucketService = require('../db-export/send-to-s3-bucket.service.js') +const FetchTableNamesService = require('./fetch-table-names.service.js') +const SendToS3BucketService = require('./send-to-s3-bucket.service.js') /** - * Exports the specific schema by fetching table names, exporting each table to a schema folder - * converting the folder into a compressed tarball file and uploading this to the S3 bucket - * Finally deleting the schema folder and the schema.tgz file + * Exports the specific schema by fetching table names, exporting each table to a schema folder converting the folder + * into a compressed tarball file and uploading this to the S3 bucket. Finally deleting the schema folder and the + * schema.tgz file * - * @param {String} schemaName The name of the database to export + * @param {String} schemaName The name of the database schema to export */ async function go (schemaName) { const schemaFolderPath = _folderToUpload(schemaName) - let tarSchemaPath + let compressedSchemaPath try { const tableNames = await FetchTableNamesService.go(schemaName) @@ -33,14 +32,14 @@ async function go (schemaName) { await ExportTableService.go(tableName, schemaFolderPath, schemaName) } - const tarSchemaPath = await CompressSchemaFolderService.go(schemaFolderPath) - await SendToS3BucketService.go(tarSchemaPath) + compressedSchemaPath = await CompressSchemaFolderService.go(schemaFolderPath) + await SendToS3BucketService.go(compressedSchemaPath) } catch (error) { global.GlobalNotifier.omfg(`Error: Failed to export schema ${schemaName}`, error.message) + } finally { + await DeleteFilesService.go(schemaFolderPath) + await DeleteFilesService.go(compressedSchemaPath) } - - await DeleteFolderService.go(schemaFolderPath) - await DeleteFileService.go(tarSchemaPath) } /** diff --git a/app/services/db-export/send-to-s3-bucket.service.js b/app/services/db-export/send-to-s3-bucket.service.js index 6f00db8a52..e6b7331134 100644 --- a/app/services/db-export/send-to-s3-bucket.service.js +++ b/app/services/db-export/send-to-s3-bucket.service.js @@ -5,8 +5,7 @@ * @module SendToS3BucketService */ -const fs = require('fs') -const fsPromises = fs.promises +const fsPromises = require('fs').promises const path = require('path') const { PutObjectCommand, S3Client } = require('@aws-sdk/client-s3') @@ -16,14 +15,8 @@ const S3Config = require('../../../config/s3.config.js') * Sends a file to our AWS S3 Bucket using the filePath that it receives * * @param {String} filePath A string containing the path of the file to send to the S3 bucket - * - * @returns {Boolean} True if the file is uploaded successfully and false if not */ async function go (filePath) { - if (!fs.existsSync(filePath)) { - throw new Error() - } - const bucketName = S3Config.s3.bucket const fileName = path.basename(filePath) const fileContent = await fsPromises.readFile(filePath) @@ -40,9 +33,6 @@ async function go (filePath) { * Uploads a file to an Amazon S3 bucket using the given parameters * * @param {Object} params The parameters to use when uploading the file - * @param {String} fileName The name of the file to upload - * - * @returns {Boolean} True if the file is uploaded successfully and false if not */ async function _uploadToBucket (params) { const s3Client = new S3Client() diff --git a/test/services/db-export/delete-file.service.test.js b/test/services/db-export/delete-file.service.test.js deleted file mode 100644 index 9fd3c32c67..0000000000 --- a/test/services/db-export/delete-file.service.test.js +++ /dev/null @@ -1,51 +0,0 @@ -'use strict' - -// Test framework dependencies -const Lab = require('@hapi/lab') -const Code = require('@hapi/code') - -const { describe, it, beforeEach, afterEach } = exports.lab = Lab.script() -const { expect } = Code - -// Test helpers -const fs = require('fs') -const path = require('path') -const mockFs = require('mock-fs') - -// Thing under test -const DeleteFileService = require('../../../app/services/db-export/delete-file.service.js') - -describe('Delete File service', () => { - let filenameWithPath - - beforeEach(() => { - filenameWithPath = path.join('testFolder', 'testFile') - - mockFs({ - testFolder: { - testFile: 'test content' - } - }) - }) - - afterEach(() => { - mockFs.restore() - }) - - describe('When a valid file is specified', () => { - it('deletes the file', async () => { - await DeleteFileService.go(filenameWithPath) - - const fileExists = fs.existsSync(filenameWithPath) - expect(fileExists).to.be.false() - }) - }) - - describe('When a file does not exist', () => { - it('returns without throwing an error', async () => { - const fakeFile = 'FAKE_FILE' - - await expect(DeleteFileService.go(fakeFile)).not.to.reject() - }) - }) -}) diff --git a/test/services/db-export/delete-folder.service.test.js b/test/services/db-export/delete-files.service.test.js similarity index 59% rename from test/services/db-export/delete-folder.service.test.js rename to test/services/db-export/delete-files.service.test.js index 7540fbbba2..321702bda0 100644 --- a/test/services/db-export/delete-folder.service.test.js +++ b/test/services/db-export/delete-files.service.test.js @@ -13,9 +13,9 @@ const path = require('path') const mockFs = require('mock-fs') // Thing under test -const DeleteFolderService = require('../../../app/services/db-export/delete-folder.service.js') +const DeleteFilesService = require('../../../app/services/db-export/delete-files.service.js') -describe('Delete Folder service', () => { +describe('Delete Files service', () => { let filenameWithPath let folderNameWithPath @@ -36,14 +36,14 @@ describe('Delete Folder service', () => { describe('When a valid folder is specified', () => { it('deletes the folder', async () => { - await DeleteFolderService.go(folderNameWithPath) + await DeleteFilesService.go(folderNameWithPath) const folderExists = fs.existsSync(folderNameWithPath) expect(folderExists).to.be.false() }) it('deletes anything inside the folder', async () => { - await DeleteFolderService.go(folderNameWithPath) + await DeleteFilesService.go(folderNameWithPath) const fileExists = fs.existsSync(filenameWithPath) expect(fileExists).to.be.false() @@ -54,7 +54,24 @@ describe('Delete Folder service', () => { it('returns without throwing an error', async () => { const fakeFolder = 'FAKE_FILE' - await expect(DeleteFolderService.go(fakeFolder)).not.to.reject() + await expect(DeleteFilesService.go(fakeFolder)).not.to.reject() + }) + }) + + describe('When a valid file is specified', () => { + it('deletes the file', async () => { + await DeleteFilesService.go(filenameWithPath) + + const fileExists = fs.existsSync(filenameWithPath) + expect(fileExists).to.be.false() + }) + }) + + describe('When a file does not exist', () => { + it('returns without throwing an error', async () => { + const fakeFile = 'FAKE_FILE' + + await expect(DeleteFilesService.go(fakeFile)).not.to.reject() }) }) }) diff --git a/test/services/db-export/schema-export.service.test.js b/test/services/db-export/schema-export.service.test.js index fc8cd49697..0a0afc2848 100644 --- a/test/services/db-export/schema-export.service.test.js +++ b/test/services/db-export/schema-export.service.test.js @@ -10,8 +10,7 @@ const { expect } = Code // Things we need to stub const CompressSchemaFolderService = require('../../../app/services/db-export/compress-schema-folder.service.js') -const DeleteFileService = require('../../../app/services/db-export/delete-file.service.js') -const DeleteFolderService = require('../../../app/services/db-export/delete-folder.service.js') +const DeleteFilesService = require('../../../app/services/db-export/delete-files.service.js') const ExportTableService = require('../../../app/services/db-export/export-table.service.js') const FetchTableNamesService = require('../../../app/services/db-export/fetch-table-names.service.js') const SendToS3BucketService = require('../../../app/services/db-export/send-to-s3-bucket.service.js') @@ -23,18 +22,16 @@ describe('Schema export service', () => { let FetchTableNamesServiceStub let CompressSchemaFolderServiceStub let SendToS3BucketServiceStub - let DeleteFolderServiceStub + let DeleteFilesServiceStub let ExportTableServiceStub - let DeleteFileServiceStub describe('when successful', () => { beforeEach(() => { FetchTableNamesServiceStub = Sinon.stub(FetchTableNamesService, 'go').resolves([]) CompressSchemaFolderServiceStub = Sinon.stub(CompressSchemaFolderService, 'go').resolves('/tmp/water') SendToS3BucketServiceStub = Sinon.stub(SendToS3BucketService, 'go').resolves() - DeleteFolderServiceStub = Sinon.stub(DeleteFolderService, 'go').resolves() + DeleteFilesServiceStub = Sinon.stub(DeleteFilesService, 'go').resolves() ExportTableServiceStub = Sinon.stub(ExportTableService, 'go').resolves() - DeleteFileServiceStub = Sinon.stub(DeleteFileService, 'go').resolves() }) afterEach(() => { @@ -47,8 +44,7 @@ describe('Schema export service', () => { expect(FetchTableNamesServiceStub.called).to.be.true() expect(CompressSchemaFolderServiceStub.called).to.be.true() expect(SendToS3BucketServiceStub.called).to.be.true() - expect(DeleteFolderServiceStub.called).to.be.true() - expect(DeleteFileServiceStub.called).to.be.true() + expect(DeleteFilesServiceStub.called).to.be.true() }) it('calls the ExportTableService with the different table names as arguments', async () => { @@ -87,8 +83,7 @@ describe('Schema export service', () => { FetchTableNamesServiceStub = Sinon.stub(FetchTableNamesService, 'go') SendToS3BucketServiceStub = Sinon.stub(SendToS3BucketService, 'go') CompressSchemaFolderServiceStub = Sinon.stub(CompressSchemaFolderService, 'go') - DeleteFileServiceStub = Sinon.stub(DeleteFileService, 'go').resolves() - DeleteFolderServiceStub = Sinon.stub(DeleteFolderService, 'go').resolves() + DeleteFilesServiceStub = Sinon.stub(DeleteFilesService, 'go').resolves() }) afterEach(() => { @@ -109,8 +104,7 @@ describe('Schema export service', () => { it('cleans up the files', async () => { await SchemaExportService.go('water') - expect(DeleteFileServiceStub.called).to.be.true() - expect(DeleteFolderServiceStub.called).to.be.true() + expect(DeleteFilesServiceStub.called).to.be.true() }) }) }) From 6e7783cd06d2099bc5c36b5a736ad614cefef38b Mon Sep 17 00:00:00 2001 From: Rebecca Ransome Date: Fri, 2 Jun 2023 12:49:57 +0100 Subject: [PATCH 14/14] Fixing broken tests --- app/controllers/data/data.controller.js | 8 ++------ test/controllers/data/data.controller.test.js | 14 -------------- test/services/db-export/db-export.service.test.js | 12 ++++++++++++ .../process-billing-batch.service.test.js | 1 + 4 files changed, 15 insertions(+), 20 deletions(-) diff --git a/app/controllers/data/data.controller.js b/app/controllers/data/data.controller.js index 5df15aea1b..1a96a29414 100644 --- a/app/controllers/data/data.controller.js +++ b/app/controllers/data/data.controller.js @@ -24,13 +24,9 @@ async function tearDown (_request, h) { * Triggers export of all relevant tables to CSV and then uploads them to S3 */ async function dbExport (_request, h) { - try { - DbExportService.go() + DbExportService.go() - return h.response().code(204) - } catch (error) { - return Boom.badImplementation(error.message) - } + return h.response().code(204) } module.exports = { diff --git a/test/controllers/data/data.controller.test.js b/test/controllers/data/data.controller.test.js index 3bca3452ca..7058e5d00e 100644 --- a/test/controllers/data/data.controller.test.js +++ b/test/controllers/data/data.controller.test.js @@ -84,19 +84,5 @@ describe('Data controller', () => { expect(response.statusCode).to.equal(204) }) }) - - describe('when the service fails', () => { - describe('because the DbExportService errors', () => { - beforeEach(async () => { - Sinon.stub(DbExportService, 'go').rejects() - }) - - it('displays the error message', async () => { - const response = await server.inject(options) - - expect(response.statusCode).to.equal(500) - }) - }) - }) }) }) diff --git a/test/services/db-export/db-export.service.test.js b/test/services/db-export/db-export.service.test.js index 42213f0776..5c12393e2b 100644 --- a/test/services/db-export/db-export.service.test.js +++ b/test/services/db-export/db-export.service.test.js @@ -16,13 +16,17 @@ const DbExportService = require('../../../app/services/db-export/db-export.servi describe('Db Export Service', () => { let SchemaExportServiceStub + let notifierStub beforeEach(async () => { SchemaExportServiceStub = Sinon.stub(SchemaExportService, 'go').resolves() + notifierStub = { omg: Sinon.stub(), omfg: Sinon.stub() } + global.GlobalNotifier = notifierStub }) afterEach(() => { Sinon.restore() + delete global.GlobalNotifier }) it('calls the SchemaExportService with the different schema names', async () => { @@ -36,4 +40,12 @@ describe('Db Export Service', () => { expect(allArgs).to.equal(schemaNames) }) + + it('logs the time taken to export the db', async () => { + await DbExportService.go() + + const logMessage = notifierStub.omg.firstCall.args[0] + + expect(logMessage).to.startWith('Time taken to export the db: ') + }) }) diff --git a/test/services/supplementary-billing/process-billing-batch.service.test.js b/test/services/supplementary-billing/process-billing-batch.service.test.js index 902bc7fb8b..98273048d2 100644 --- a/test/services/supplementary-billing/process-billing-batch.service.test.js +++ b/test/services/supplementary-billing/process-billing-batch.service.test.js @@ -55,6 +55,7 @@ describe('Process billing batch service', () => { afterEach(() => { Sinon.restore() + delete global.GlobalNotifier }) describe('when the service is called', () => {