Skip to content
This repository has been archived by the owner on May 22, 2024. It is now read-only.

Improve core logic #48

Merged
merged 10 commits into from
Mar 16, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ jobs:
strategy:
matrix:
os: [macOS-latest]
node: [8.5.0, 13]
node: [8.3.0, 13]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
Expand Down
6 changes: 0 additions & 6 deletions index.js

This file was deleted.

240 changes: 75 additions & 165 deletions package-lock.json

Large diffs are not rendered by default.

21 changes: 12 additions & 9 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,19 @@
},
"dependencies": {
"archiver": "^3.0.0",
"debug": "^4.1.1",
"common-path-prefix": "^2.0.0",
"cp-file": "^7.0.0",
"elf-tools": "^1.1.1",
"end-of-stream": "^1.4.1",
"glob": "^7.1.3",
"npm-packlist": "^1.1.12",
"p-all": "^2.0.0",
"make-dir": "^3.0.0",
"p-map": "^3.0.0",
"path-exists": "^4.0.0",
"pkg-dir": "^4.2.0",
"precinct": "^6.1.1",
"read-pkg-up": "^7.0.0",
"require-package-name": "^2.0.1",
"resolve": "^1.10.0",
"util.promisify": "^1.0.0",
"yargs": "^14.2.0"
},
"devDependencies": {
Expand Down Expand Up @@ -62,11 +66,10 @@
"pkg": "^4.3.7",
"prettier": "^1.16.4",
"rimraf": "^3.0.0",
"tmp-promise": "^2.0.2",
"util.promisify": "^1.0.0"
"tmp-promise": "^2.0.2"
},
"engines": {
"node": ">=8.5.0"
"node": ">=8.3.0"
},
"homepage": "https://github.com/netlify/zip-it-and-ship-it#README",
"keywords": [
Expand All @@ -79,7 +82,7 @@
"static"
],
"license": "MIT",
"main": "index.js",
"main": "src/main.js",
"pkg": {
"targets": [
"node10"
Expand All @@ -93,7 +96,7 @@
"prepublishOnly": "run-s pkg && git push && git push --tags && gh-release -a build/zip-it-and-ship-it_$(git describe --abbrev=0 --tags)_Linux-64bit.tar.gz,build/zip-it-and-ship-it_$(git describe --abbrev=0 --tags)_macOS-64bit.tar.gz",
"test": "run-s test:* test:dev:*",
"test-ci": "run-s test:* test:ci:*",
"test:lint": "eslint --ignore-path .gitignore --fix --cache --format=codeframe --max-warnings=0 \"src/**/*.js\" \"*.js\"",
"test:lint": "eslint --ignore-path .gitignore --fix --cache --format=codeframe --max-warnings=0 \"src/**/*.js\"",
"test:prettier": "prettier --ignore-path .gitignore --write --loglevel warn \"src/**/*.js\" \"*.{js,md,yml,json}\"",
"test:dev:ava": "ava",
"test:ci:ava": "nyc -r lcovonly -r text -r json ava",
Expand Down
34 changes: 34 additions & 0 deletions src/archive.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
const { createWriteStream } = require('fs')

const archiver = require('archiver')
const endOfStream = require('end-of-stream')
const promisify = require('util.promisify')

const pEndOfStream = promisify(endOfStream)

// Start zipping files
const startZip = function(destPath) {
const output = createWriteStream(destPath)
const archive = archiver('zip', { level: 9 })
archive.pipe(output)
return { archive, output }
}

// Add new file to zip
const addZipFile = function(archive, file, name, stat) {
// Ensure sha256 stability regardless of mtime
archive.file(file, { name, mode: stat.mode, date: new Date(0), stats: stat })
}

// Add new file content to zip
const addZipContent = function(archive, content, name) {
archive.append(content, { name, date: new Date(0) })
}

// End zipping files
const endZip = async function(archive, output) {
archive.finalize()
await pEndOfStream(output)
}

module.exports = { startZip, addZipFile, addZipContent, endZip }
2 changes: 1 addition & 1 deletion src/bin.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ test('CLI | --help', async t => {
t.true(stdout.includes('Options:'))
})

test.skip('CLI | Normal execution', async t => {
test('CLI | Normal execution', async t => {
const tmpDir = await tmpName({ prefix: 'zip-it-test' })
const { stdout } = await execa.command(`${BINARY_PATH} ${join(FIXTURES_DIR, 'simple')} ${tmpDir}`)
const zipped = JSON.parse(stdout)
Expand Down
213 changes: 213 additions & 0 deletions src/dependencies.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,213 @@
const { dirname } = require('path')

const precinct = require('precinct')
const resolve = require('resolve')
const requirePackageName = require('require-package-name')
const promisify = require('util.promisify')
const glob = require('glob')

const pResolve = promisify(resolve)
const pGlob = promisify(glob)

// Retrieve all the files recursively required by a Node.js file
const getDependencies = async function(handler, packageRoot) {
const packageJson = getPackageJson(packageRoot)

const state = { localFiles: [], modulePaths: [] }

try {
return await getFileDependencies(handler, packageJson, state)
} catch (error) {
error.message = `In file "${handler}": ${error.message}`
throw error
}
}

const getPackageJson = function(packageRoot) {
if (packageRoot === undefined) {
return {}
}

return require(`${packageRoot}/package.json`)
}

const getFileDependencies = async function(path, packageJson, state) {
if (state.localFiles.includes(path)) {
return []
}

state.localFiles.push(path)

const basedir = dirname(path)
// This parses JavaScript in `path` to retrieve all the `require()` statements
// TODO: `precinct.paperwork()` uses `fs.readFileSync()` under the hood,
// but should use `fs.readFile()` instead
const dependencies = precinct.paperwork(path, { includeCore: false })

const depsPaths = await Promise.all(
dependencies.map(dependency => getImportDependencies(dependency, basedir, packageJson, state))
)
return [].concat(...depsPaths)
}

// `require()` statements can be either `require('moduleName')` or
// `require(path)`
const getImportDependencies = function(dependency, basedir, packageJson, state) {
if (LOCAL_IMPORT_REGEXP.test(dependency)) {
return getLocalImportDependencies(dependency, basedir, packageJson, state)
}

return getModuleDependencies(dependency, basedir, state, packageJson)
}

const LOCAL_IMPORT_REGEXP = /^(\.|\/)/

// When a file requires another one, we apply the top-level logic recursively
const getLocalImportDependencies = async function(dependency, basedir, packageJson, state) {
const dependencyPath = await pResolve(dependency, { basedir })
const depsPath = await getFileDependencies(dependencyPath, packageJson, state)
return [dependencyPath, ...depsPath]
}

// When a file requires a module, we find its path inside `node_modules` and
// use all its published files. We also recurse on the module's dependencies.
const getModuleDependencies = async function(dependency, basedir, state, packageJson) {
const moduleName = requirePackageName(dependency.replace(BACKSLASH_REGEXP, '/'))

try {
return await getModuleNameDependencies(moduleName, basedir, state)
} catch (error) {
return handleModuleNotFound({ error, moduleName, packageJson })
}
}

const BACKSLASH_REGEXP = /\\/g

const getModuleNameDependencies = async function(moduleName, basedir, state) {
if (EXCLUDED_MODULES.includes(moduleName)) {
return []
}

// Find the Node.js module directory path
const packagePath = await pResolve(`${moduleName}/package.json`, { basedir })
const modulePath = dirname(packagePath)

if (state.modulePaths.includes(modulePath)) {
return []
}

state.modulePaths.push(modulePath)

const pkg = require(packagePath)

const [publishedFiles, depsPaths] = await Promise.all([
getPublishedFiles(modulePath),
getNestedModules(modulePath, state, pkg)
])
return [...publishedFiles, ...depsPaths]
}

const EXCLUDED_MODULES = ['aws-sdk']

// We use all the files published by the Node.js except some that are not needed
const getPublishedFiles = async function(modulePath) {
const ignore = getIgnoredFiles(modulePath)
const publishedFiles = await pGlob(`${modulePath}/**`, {
ignore,
nodir: true,
absolute: true,
dot: true
})
return publishedFiles
}

const getIgnoredFiles = function(modulePath) {
return IGNORED_FILES.map(ignoreFile => `${modulePath}/${ignoreFile}`)
}

// To make the zip archive smaller, we remove those.
const IGNORED_FILES = [
'node_modules/**',
'.npmignore',
'package-lock.json',
'yarn.lock',
'*.log',
'*.lock',
'*~',
'*.map',
'*.ts',
'*.patch'
]

// Apply the Node.js module logic recursively on its own dependencies, using
// the `package.json` `dependencies`, `peerDependencies` and
// `optionalDependencies` keys
const getNestedModules = async function(modulePath, state, pkg) {
const dependencies = getNestedDependencies(pkg)

const depsPaths = await Promise.all(
dependencies.map(dependency => getModuleDependencies(dependency, modulePath, state, pkg))
)
return [].concat(...depsPaths)
}

const getNestedDependencies = function({ dependencies = {}, peerDependencies = {}, optionalDependencies = {} }) {
return [
...Object.keys(dependencies),
...Object.keys(peerDependencies).filter(shouldIncludePeerDependency),
...Object.keys(optionalDependencies)
]
}

// Workaround for https://github.com/netlify/zip-it-and-ship-it/issues/73
// TODO: remove this after adding proper modules exclusion as outlined in
// https://github.com/netlify/zip-it-and-ship-it/issues/68
const shouldIncludePeerDependency = function(name) {
return !EXCLUDED_PEER_DEPENDENCIES.includes(name)
}

const EXCLUDED_PEER_DEPENDENCIES = ['prisma2']

// Modules can be required conditionally (inside an `if` or `try`/`catch` block).
// When a `require()` statement is found but the module is not found, it is
// possible that that block either always evaluates to:
// - `false`: in which case, we should not bundle the dependency
// - `true`: in which case, we should report the dependency as missing
// Those conditional modules might be:
// - present in the `package.json` `dependencies`
// - present in the `package.json` `optionalDependencies`
// - present in the `package.json` `peerDependencies`
// - not present in the `package.json`, if the module author wants its users
// to explicitly install it as an optional dependency.
// The current implementation:
// - when parsing `require()` statements inside function files, always consider
// conditional modules to be included, i.e. report them if not found.
// This is because our current parsing logic does not know whether a
// `require()` is conditional or not.
// - when parsing module dependencies, ignore `require()` statements if not
// present in the `package.json` `*dependencies`. I.e. user must manually
// install them if the module is used.
// `optionalDependencies`:
// - are not reported when missing
// - are included in module dependencies
const handleModuleNotFound = function({ error, moduleName, packageJson }) {
if (error.code === 'MODULE_NOT_FOUND' && isOptionalModule(moduleName, packageJson)) {
return []
}

throw error
}

const isOptionalModule = function(
moduleName,
{ optionalDependencies = {}, peerDependenciesMeta = {}, peerDependencies = {} }
) {
return (
optionalDependencies[moduleName] !== undefined ||
(peerDependenciesMeta[moduleName] &&
peerDependenciesMeta[moduleName].optional &&
peerDependencies[moduleName] !== undefined)
)
}

module.exports = { getDependencies }
Loading