From e593aba2946c98820b0c2edf9c5dab6cb30c7402 Mon Sep 17 00:00:00 2001 From: Richard Moore Date: Mon, 6 Jan 2020 18:47:20 -0500 Subject: [PATCH] Added documentation uploading script. --- admin/cmds/upload-docs.js | 172 ++++++++++++++++++++++++++++++++++++++ package.json | 2 + 2 files changed, 174 insertions(+) create mode 100644 admin/cmds/upload-docs.js diff --git a/admin/cmds/upload-docs.js b/admin/cmds/upload-docs.js new file mode 100644 index 0000000000..42a045378d --- /dev/null +++ b/admin/cmds/upload-docs.js @@ -0,0 +1,172 @@ +"use strict"; + +const crypto = require('crypto'); +const fs = require('fs'); +const path = require('path'); + +const AWS = require('aws-sdk'); + +const config = require("../config"); + + +const Bucket = "docs-beta.ethers.io"; + + +function _getKeys(s3, result, nextToken, callback) { + const params = { + Bucket: Bucket, + MaxKeys: 1000, + ContinuationToken: nextToken, + }; + s3.listObjectsV2(params, function(error, data) { + if (error) { + console.log(error); + callback(error); + return; + } + data.Contents.forEach(function(item) { + result[item.Key] = item.ETag.replace(/"/g,''); + }); + callback(null, data.IsTruncated ? data.NextContinuationToken: null); + }); +} + +function getKeys(s3) { + const result = {}; + return new Promise(function(resolve, reject) { + function handleBlock(error, nextToken) { + if (error) { + reject(error); + } else if (nextToken) { + nextBlock(nextToken); + } else { + resolve(result); + } + } + function nextBlock(nextToken) { + _getKeys(s3, result, nextToken, handleBlock); + } + nextBlock(undefined); + }); +} + +function getMime(filename) { + const comps = filename.split('.'); + const ext = comps[comps.length - 1]; + switch (ext.toLowerCase()) { + case 'css': return 'text/css'; + case 'doctree': return 'application/x-doctree'; + case 'eot': return 'application/vnd.ms-fontobject'; + case 'gif': return 'image/gif'; + case 'html': return 'text/html'; + case 'js': return 'application/javascript'; + case 'jpg': return 'image/jpeg'; + case 'jpeg': return 'image/jpeg'; + case 'md': return 'text/markdown'; + case 'pickle': return 'application/x-pickle'; + case 'png': return 'image/png'; + case 'svg': return 'image/svg+xml'; + case 'ttf': return 'application/x-font-ttf'; + case 'txt': return 'text/plain'; + case 'woff': return 'application/font-woff'; + } + console.log('NO MIME', filename); + return undefined; +} + +function putObject(s3, name, content) { + return new Promise(function(resolve, reject) { + s3.putObject({ + ACL: 'public-read', + Body: content, + Bucket: Bucket, + ContentType: getMime(name), + Key: name + }, function(error, data) { + if (error) { + reject(error); + } else { + console.log('Uplodaed:', name) + resolve({ + name: name, + hash: data.ETag.replace(/"/g, '') + }); + } + }); + }); +} + +function hash(filename) { + const hasher = crypto.createHash('md5'); + hasher.update(fs.readFileSync(filename)); + return hasher.digest().toString('hex'); +} + +function _getFiles(result, root) { + fs.readdirSync(root).forEach(function(filename) { + + // We don't need to upload junk + if (filename === '.DS_Store') { return; } + + const fullFilename = path.join(root, filename) + const stat = fs.statSync(fullFilename); + if (stat.isDirectory()) { + _getFiles(result, fullFilename); + } else { + result[fullFilename] = hash(fullFilename); + } + }); +} + +function getFiles(dirs) { + const result = { } //"index.html": hash("index.html") }; + dirs.forEach(function(dir) { + _getFiles(result, dir); + }) + return result; +} + +(async function() { + const awsAccessId = await config.get("aws-upload-docs-accesskey"); + const awsSecretKey = await config.get("aws-upload-docs-secretkey"); + + const s3 = new AWS.S3({ + apiVersion: '2006-03-01', + accessKeyId: awsAccessId, + secretAccessKey: awsSecretKey + }); + + const added = [], removed = [], changed = [], upload = []; + + const local = await getFiles([ "docs" ]); + const remote = await getKeys(s3); + + Object.keys(local).forEach((filename) => { + if (!remote[filename]) { + added.push(filename); + upload.push(filename); + } else if (remote[filename] != local[filename]) { + changed.push(filename); + upload.push(filename); + } + }); + + Object.keys(remote).forEach((filename) => { + if (!local[filename]) { + removed.push(filename); + } else if (!local[filename] && remote[filename] != local[filename]) { + changed.push(filename); + upload.push(filename); + } + }); + + console.log('Added: ', added.length); + console.log('Removed: ', removed.length); + console.log('Changed: ', changed.length); + + for (let i = 0; i < upload.length; i++) { + const filename = upload[i]; + console.log("Uploading:", filename); + await putObject(s3, filename, fs.readFileSync(filename)); + } +})(); diff --git a/package.json b/package.json index 8ade1b6f83..207fba27bb 100644 --- a/package.json +++ b/package.json @@ -40,6 +40,7 @@ "test": "if [ \"$TEST\" == \"\" ]; then npm run test-node; else npm run \"test-$TEST\"; fi", "lock-versions": "node ./admin/cmds/lock-versions", "build-docs": "flatworm docs.wrm docs", + "upload-docs": " node ./admin/cmds/upload-docs.js", "_admin_prepare": "npm run clean && npm run bootstrap && npm run build && node ./admin/cmds/update-exports.js", "update-versions": "npm run _admin_prepare && node ./admin/cmds/update-versions", "publish-all": "node ./admin/cmds/publish", @@ -50,6 +51,7 @@ "@types/mocha": "^5.2.0", "@types/node": "^12.7.4", "aes-js": "3.0.0", + "aws-sdk": "2.137.0", "browserify": "16.2.3", "diff": "4.0.1", "karma": "4.4.1",