Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Uploading compressed tarball file to S3 #250

Merged
merged 17 commits into from
Jun 2, 2023
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 0 additions & 42 deletions app/services/db-export/compress-files.service.js

This file was deleted.

29 changes: 29 additions & 0 deletions app/services/db-export/compressed-tarball.service.js
Beckyrose200 marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
'use strict'

/**
* Creates a compressed tarball (.tgz) from a given schema folder
* @module CompressedTarballService
*/

const tar = require('tar')

/**
* Create a compressed tarball (.tgz) from a given schema folder
* @param {String} schemaFolderPath
*
* @returns {String} The path to the created tarball file
*/
async function go (schemaFolderPath) {
await tar.create(
{
gzip: true,
file: `${schemaFolderPath}.tgz`
},
[schemaFolderPath]
)
return `${schemaFolderPath}.tgz`
Beckyrose200 marked this conversation as resolved.
Show resolved Hide resolved
}

module.exports = {
go
}
11 changes: 2 additions & 9 deletions app/services/db-export/export-compressed-table.service.js
Beckyrose200 marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -1,15 +1,12 @@
'use strict'

/**
* Exports a table from the db, converts it to CSV and compresses it. The CSV file
* is then deleted whilst the compressed file remains, ready to be sent to our S3 bucket
* Exports a table from the db, converts it to CSV format and saves it to a file
*
* @module ExportCompressedTableService
*/

const ConvertToCSVService = require('./convert-to-csv.service.js')
const CompressFilesService = require('./compress-files.service.js')
const DeleteFileService = require('./delete-file.service.js')
const ExportDataFilesService = require('./export-data-files.service.js')
const FetchTableService = require('./fetch-table.service.js')

Expand All @@ -26,11 +23,7 @@ async function go (tableName, schemaFolderPath, schemaName) {

const tableConvertedToCsv = ConvertToCSVService.go(data.headers, data.rows)

const filePath = await ExportDataFilesService.go(tableConvertedToCsv, data.tableName, schemaFolderPath)

await CompressFilesService.go(filePath)

await DeleteFileService.go(filePath)
await ExportDataFilesService.go(tableConvertedToCsv, data.tableName, schemaFolderPath)
}

module.exports = {
Expand Down
2 changes: 1 addition & 1 deletion app/services/db-export/fetch-table-names.service.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

/**
* Fetches all the table names from a given schema
* @module FetchTableNames
* @module FetchTableNamesService
*/

const { db } = require('../../../db/db.js')
Expand Down
12 changes: 8 additions & 4 deletions app/services/db-export/schema-export.service.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,26 +8,30 @@
const path = require('path')
const os = require('os')

const CompressedTarballService = require('../db-export/compressed-tarball.service.js')
const DeleteFolderService = require('./delete-folder.service.js')
const ExportCompressedTableService = require('./export-compressed-table.service.js')
const FetchTableNames = require('../db-export/fetch-table-names.service.js')
const FetchTableNamesService = require('../db-export/fetch-table-names.service.js')
const SendToS3BucketService = require('../db-export/send-to-s3-bucket.service.js')

/**
* Exports the specific schema by fetching table names, exporting each table,
* and uploading the schema folder to an S3 bucket
* uploading the schema folder to an S3 bucket and finally deleting the folder
*
* @param {String} schemaName The name of the database to export
*/
async function go (schemaName) {
const tableNames = await FetchTableNames.go(schemaName)
const tableNames = await FetchTableNamesService.go(schemaName)

const schemaFolderPath = _folderToUpload(schemaName)

for (const tableName of tableNames) {
await ExportCompressedTableService.go(tableName, schemaFolderPath, schemaName)
}

await SendToS3BucketService.go(schemaFolderPath)
const tarSchemaPath = await CompressedTarballService.go(schemaFolderPath)

await SendToS3BucketService.go(tarSchemaPath)

await DeleteFolderService.go(schemaFolderPath)
}
Expand Down
70 changes: 23 additions & 47 deletions app/services/db-export/send-to-s3-bucket.service.js
Beckyrose200 marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -1,79 +1,55 @@
'use strict'

/**
* Uploads a folders worth of files to our S3 bucket
* Sends a file to our AWS S3 bucket
* @module SendToS3BucketService
*/

const fsPromises = require('fs').promises
const fs = require('fs')
const path = require('path')
const { PutObjectCommand, S3Client } = require('@aws-sdk/client-s3')
Beckyrose200 marked this conversation as resolved.
Show resolved Hide resolved

const S3Config = require('../../../config/s3.config.js')

/**
* Sends a schema folder with table files in to our AWS S3 Bucket using the folderPath that it receives
* Sends a file to our AWS S3 Bucket using the filePath that it receives
*
* @param {String} folderPath A string containing the path of the folder to send to the S3 bucket
* @param {String} filePath A string containing the path of the file to send to the S3 bucket
*
* @returns {Boolean} True if the folder is uploaded successfully and false if not
* @returns {Boolean} True if the file is uploaded successfully and false if not
*/
async function go (folderPath) {
async function go (filePath) {
const bucketName = S3Config.s3.bucket
const folderName = path.basename(folderPath)

const files = await _getFilesFromFolder(folderPath)

for (const file of files) {
try {
await _uploadToBucket(bucketName, folderName, file)
} catch (error) {
return false
}
const fileName = path.basename(filePath)
const fileContent = fs.readFileSync(filePath)
Beckyrose200 marked this conversation as resolved.
Show resolved Hide resolved
const params = {
Bucket: bucketName,
Key: `export/${fileName}`,
Body: fileContent
}
return true
}

/**
* Retrieves all the files within a folder
*
* @param {String} folderPath A string containing the path of the folder
*
* @returns {[]} An array of file paths within the folder
*/
async function _getFilesFromFolder (folderPath) {
const files = await fsPromises.readdir(folderPath)

return files.map((file) => {
return path.join(folderPath, file)
})
return _uploadToBucket(params)
Beckyrose200 marked this conversation as resolved.
Show resolved Hide resolved
}

/**
* Uploads a file to an Amazon S3 bucket using the given parameters
*
* @param {Object} bucketName The name of the bucket we want to upload to
* @param {String} folderName The name of the folder to upload
* @param {String} filePath The path of the individual file to upload
* @param {Object} params The parameters to use when uploading the file
* @param {String} fileName The name of the file to upload
*
* @returns {Boolean} True if the file is uploaded successfully and false if not
*/
async function _uploadToBucket (bucketName, folderName, filePath) {
const fileName = path.basename(filePath)
const fileContent = await fsPromises.readFile(filePath)

const params = {
Bucket: bucketName,
Key: `export/${folderName}/${fileName}`,
Body: fileContent
}

async function _uploadToBucket (params) {
const s3Client = new S3Client()
const command = new PutObjectCommand(params)

await s3Client.send(command)
}
try {
await s3Client.send(command)

return true
} catch (error) {
return false
}
}
module.exports = {
go
}
Loading