Skip to content

Commit

Permalink
Merge pull request #1 from 20minutes/rebuild
Browse files Browse the repository at this point in the history
Rebuild using AWS SDK v3
  • Loading branch information
j0k3r authored Jun 10, 2024
2 parents 52e6e53 + 1178b6f commit ea97ea8
Show file tree
Hide file tree
Showing 13 changed files with 3,756 additions and 257 deletions.
12 changes: 12 additions & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
root = true

[*]
indent_style = space
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
indent_size = 2

[*.yml]
indent_size = 4
6 changes: 6 additions & 0 deletions .eslintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"extends": "@20minutes",
"rules": {
"no-console": "off"
}
}
27 changes: 27 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: CI

on:
push:
branches:
- master
pull_request:

jobs:
lint:
name: Quality checks

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0

- uses: actions/setup-node@v4
with:
node-version: 20.x
cache: 'yarn'

- run: yarn install

- run: yarn lint
31 changes: 31 additions & 0 deletions .github/workflows/npm-publish.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name: Node.js Package

on:
release:
types: [created]

jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 20
cache: 'yarn'
- run: yarn install
- run: yarn test

publish-npm:
needs: build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 20
registry-url: https://registry.npmjs.org/
- run: yarn install
- run: yarn publish --access public
env:
NODE_AUTH_TOKEN: ${{secrets.npm_token}}
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,3 @@ node_modules

.DS_Store
*.zip
package-lock.json
6 changes: 0 additions & 6 deletions .idea/misc.xml

This file was deleted.

1 change: 1 addition & 0 deletions .nvmrc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
20
112 changes: 23 additions & 89 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,96 +1,30 @@
# S3 Unzip plus #
# S3 Unzip Plus #

A simple library to unzip an archive file in a S3 bucket to its root folder.
Forked version to:
- remove old AWS SDK v2
- add AWS SDK v3
- convert to async/await

### Install ###
For the official readme, check the [official project](https://github.com/akapuya/s3-unzip-plus).

- To include as library: `npm install s3-unzip-plus`
- To use as a command-line tool: `npm install -g s3-unzip-plus`
### Install

### Restrictions ###

- The module only supports the input zip file MIME type of 'application/zip'.

### Command Line Usage ###

`s3-unzip-plus [-dv] <bucket name> <filename>`

#### Required ####

- `<bucket name>` : Bucket name in S3 where the zip file exists
- `<filename>` : Filename (including the .zip extension) of the archive that will be decompressed

#### Options ####

- `<targetBucket>` : the output bucket
- `<targetKey>` : target folder
- `-d, --delete-on-success` : Delete the zip file once the decompression has finished
- `-v, --verbose` : Show the console log messages during runtime

#### Example ####

`s3-unzip-plus -d -v test-bucket-in-s3 Companies.zip`
```
yarn add @20minutes/s3-unzip-plus
```

### Library Usage ###

Include like most libraries:

`var s3Unziplus = require("s3-unzip-plus");`

Run the decompression for the file in the specified bucket:

#### Options ####

~~~~
var s = new s3Unziplus({
bucket: "test-bucket-in-s3",
file: "Companies.zip",
targetBucket: "test-output-bucket",
targetKey: "test-folder",
copyMetadata: true,
deleteOnSuccess: true,
verbose: false
}, function(err, success){
if (err) console.error(err);
else console.log(success);
});
~~~~

### AWS Lambda Usage ###

Create an AWS Lambda function, name it, and zip all files from this package to upload the code (including node_modules). The function defaults to deleting the zip file after it's been decompressed and verbose messages logged to CloudWatch.

#### Configuration ####

##### Basic #####
- Runtime: **Node.js 6.10**
- Handler: **index.handler**
- Role: **(create a custom role with Full S3 Access)**

##### Advanced #####

For a 20MB zip file:
- Memory: **at least 512MB**
- Timeout: **at least 30 sec**

#### Options ####

Test JSON:
~~~~
{
"event":{
"Records": [
{
"s3": {
"bucket": {
"name": "test-bucket-in-s3"
},
"object": {
"key": "Companies.zip"
}
}
}
]
}
}
~~~~
```js
import s3UnzipPlus from '@20minutes/s3-unzip-plus'

await s3UnzipPlus({
bucket: 'test-bucket-in-s3',
file: 'Companies.zip',
targetBucket: 'test-output-bucket',
targetKey: 'test-folder',
copyMetadata: true,
deleteOnSuccess: true,
verbose: false
});
```
39 changes: 24 additions & 15 deletions bin/s3-unzip-plus
Original file line number Diff line number Diff line change
Expand Up @@ -18,22 +18,31 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
**/
"use strict";
*/

var command = require("commander");
var pkg = require("../package.json");
var s3UnziPlus = require("../index.js");
import { readFile } from 'fs/promises'
import { program } from 'commander'
// eslint-disable-next-line import/extensions
import s3UnzipPlus from '../index.js'

command
.description(pkg.description)
.version(pkg.version);
async function run() {
const pkg = JSON.parse(await readFile(new URL('../package.json', import.meta.url)))

command
.usage("[options] <S3 bucket name> <filename>")
.option("-d --delete-on-success", "Delete the zip file on S3 once the decompression has finished")
.option("-m --copy-metadata', 'Copy S3 metadata from zip file to unzipped files")
.option("-v --verbose", "Show all console logs")
.parse(process.argv);
program.description(pkg.description).version(pkg.version)

var s = new s3UnziPlus(command);
program
.usage('[options] <S3 bucket name> <filename>')
.argument('<bucket-name>', 'S3 bucket name')
.argument('<filename>', 'filename')
.option(
'-d --delete-on-success',
'Delete the zip file on S3 once the decompression has finished'
)
.option('-m --copy-metadata', 'Copy S3 metadata from zip file to unzipped files')
.option('-v --verbose', 'Show all console logs')
.parse()

await s3UnzipPlus(program)
}

run()
68 changes: 28 additions & 40 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,54 +19,42 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
'use strict'

var Utils = require('./util')
// eslint-disable-next-line import/extensions
import { decompress } from './util.js'

function s3UnzipPlus(command, cb) {
if (cb === undefined) { cb = function () { } }
var vBucket, vFile, vTargetBucket, vTargetFolder
if (command.args && command.args.length >= 4) {
vBucket = command.args[0]
vFile = command.args[1]
vTargetBucket = command.args[2]
vTargetFolder = command.args[3]
export default async (command) => {
let bucket
let file
let targetBucket
let targetFolder
if (command.args && command.args.length >= 2) {
;[bucket, file, targetBucket, targetFolder] = command.args
}
if (command.bucket) {
vBucket = command.bucket
}
if (command.file) {
vFile = command.file
}
if (command.targetBucket) {
vTargetBucket = command.targetBucket
} else {
vTargetBucket = command.bucket

if (!targetBucket) {
targetBucket = bucket
}
if (command.targetFolder) {
vTargetFolder = command.targetFolder
} else {
vTargetFolder = ''
if (!targetFolder) {
targetFolder = ''
}
Utils.decompress({
bucket: vBucket,
file: vFile,
targetBucket: vTargetBucket,
targetFolder: vTargetFolder,
deleteOnSuccess: command.deleteOnSuccess,
copyMetadata: command.copyMetadata,
verbose: command.verbose
}, cb)
}

module.exports = s3UnzipPlus
await decompress({
bucket,
file,
targetBucket,
targetFolder,
deleteOnSuccess: command.opts().deleteOnSuccess ?? false,
copyMetadata: command.opts().copyMetadata ?? false,
verbose: command.opts().verbose ?? false,
})
}

module.exports.handler = function (event, context, callback) {
if (callback === undefined) { callback = function () { } }
Utils.decompress({
export const handler = async (event) => {
await decompress({
bucket: event.Records[0].s3.bucket.name,
file: event.Records[0].s3.object.key,
deleteOnSuccess: true,
verbose: true
}, callback)
verbose: true,
})
}
Loading

0 comments on commit ea97ea8

Please sign in to comment.