Skip to content

Commit

Permalink
Merge branch 'master' into huijbers/vscode-settings
Browse files Browse the repository at this point in the history
  • Loading branch information
mergify[bot] authored Jun 1, 2020
2 parents 737cb8d + 1755cf2 commit 351df5a
Show file tree
Hide file tree
Showing 13 changed files with 388 additions and 37 deletions.
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ BREAKING CHANGE: Description of what broke and how to achieve this behavior now
### Step 5: Pull Request

* Push to a GitHub fork or to a branch (naming convention: `<user>/<feature-bug-name>`)
* Submit a Pull Requests on GitHub and assign the PR for a review to the "awslabs/aws-cdk" team.
* Submit a Pull Request on GitHub. A reviewer will later be assigned by the maintainers.
* Please follow the PR checklist written below. We trust our contributors to self-check, and this helps that process!
* Discuss review comments and iterate until you get at least one “Approve”. When iterating, push new commits to the
same branch. Usually all these are going to be squashed when you merge to master. The commit messages should be hints
Expand Down
99 changes: 79 additions & 20 deletions packages/@aws-cdk/aws-codepipeline/lib/pipeline.ts
Original file line number Diff line number Diff line change
Expand Up @@ -749,34 +749,52 @@ export class Pipeline extends PipelineBase {
private validateArtifacts(): string[] {
const ret = new Array<string>();

const outputArtifactNames = new Set<string>();
for (const stage of this._stages) {
const sortedActions = stage.actionDescriptors.sort((a1, a2) => a1.runOrder - a2.runOrder);

for (const action of sortedActions) {
// start with inputs
const inputArtifacts = action.inputs;
for (const inputArtifact of inputArtifacts) {
if (!inputArtifact.artifactName) {
ret.push(`Action '${action.actionName}' has an unnamed input Artifact that's not used as an output`);
} else if (!outputArtifactNames.has(inputArtifact.artifactName)) {
ret.push(`Artifact '${inputArtifact.artifactName}' was used as input before being used as output`);
const producers: Record<string, PipelineLocation> = {};
const firstConsumers: Record<string, PipelineLocation> = {};

for (const [stageIndex, stage] of enumerate(this._stages)) {
// For every output artifact, get the producer
for (const action of stage.actionDescriptors) {
const actionLoc = new PipelineLocation(stageIndex, stage, action);

for (const outputArtifact of action.outputs) {
// output Artifacts always have a name set
const name = outputArtifact.artifactName!;
if (producers[name]) {
ret.push(`Both Actions '${producers[name].actionName}' and '${action.actionName}' are producting Artifact '${name}'. Every artifact can only be produced once.`);
continue;
}

producers[name] = actionLoc;
}

// then process outputs by adding them to the Set
const outputArtifacts = action.outputs;
for (const outputArtifact of outputArtifacts) {
// output Artifacts always have a name set
if (outputArtifactNames.has(outputArtifact.artifactName!)) {
ret.push(`Artifact '${outputArtifact.artifactName}' has been used as an output more than once`);
} else {
outputArtifactNames.add(outputArtifact.artifactName!);
// For every input artifact, get the first consumer
for (const inputArtifact of action.inputs) {
const name = inputArtifact.artifactName;
if (!name) {
ret.push(`Action '${action.actionName}' is using an unnamed input Artifact, which is not being produced in this pipeline`);
continue;
}

firstConsumers[name] = firstConsumers[name] ? firstConsumers[name].first(actionLoc) : actionLoc;
}
}
}

// Now validate that every input artifact is produced before it's
// being consumed.
for (const [artifactName, consumerLoc] of Object.entries(firstConsumers)) {
const producerLoc = producers[artifactName];
if (!producerLoc) {
ret.push(`Action '${consumerLoc.actionName}' is using input Artifact '${artifactName}', which is not being produced in this pipeline`);
continue;
}

if (consumerLoc.beforeOrEqual(producerLoc)) {
ret.push(`${consumerLoc} is consuming input Artifact '${artifactName}' before it is being produced at ${producerLoc}`);
}
}

return ret;
}

Expand Down Expand Up @@ -874,3 +892,44 @@ interface CrossRegionInfo {

readonly region?: string;
}

function enumerate<A>(xs: A[]): Array<[number, A]> {
const ret = new Array<[number, A]>();
for (let i = 0; i < xs.length; i++) {
ret.push([i, xs[i]]);
}
return ret;
}

class PipelineLocation {
constructor(private readonly stageIndex: number, private readonly stage: IStage, private readonly action: FullActionDescriptor) {
}

public get stageName() {
return this.stage.stageName;
}

public get actionName() {
return this.action.actionName;
}

/**
* Returns whether a is before or the same order as b
*/
public beforeOrEqual(rhs: PipelineLocation) {
if (this.stageIndex !== rhs.stageIndex) { return rhs.stageIndex < rhs.stageIndex; }
return this.action.runOrder <= rhs.action.runOrder;
}

/**
* Returns the first location between this and the other one
*/
public first(rhs: PipelineLocation) {
return this.beforeOrEqual(rhs) ? this : rhs;
}

public toString() {
// runOrders are 1-based, so make the stageIndex also 1-based otherwise it's going to be confusing.
return `Stage ${this.stageIndex + 1} Action ${this.action.runOrder} ('${this.stageName}'/'${this.actionName}')`;
}
}
59 changes: 56 additions & 3 deletions packages/@aws-cdk/aws-codepipeline/test/test.artifacts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ export = {
test.equal(errors.length, 1);
const error = errors[0];
test.same(error.source, pipeline);
test.equal(error.message, "Action 'Build' has an unnamed input Artifact that's not used as an output");
test.equal(error.message, "Action 'Build' is using an unnamed input Artifact, which is not being produced in this pipeline");

test.done();
},
Expand Down Expand Up @@ -82,7 +82,7 @@ export = {
test.equal(errors.length, 1);
const error = errors[0];
test.same(error.source, pipeline);
test.equal(error.message, "Artifact 'named' was used as input before being used as output");
test.equal(error.message, "Action 'Build' is using input Artifact 'named', which is not being produced in this pipeline");

test.done();
},
Expand Down Expand Up @@ -119,7 +119,7 @@ export = {
test.equal(errors.length, 1);
const error = errors[0];
test.same(error.source, pipeline);
test.equal(error.message, "Artifact 'Artifact_Source_Source' has been used as an output more than once");
test.equal(error.message, "Both Actions 'Source' and 'Build' are producting Artifact 'Artifact_Source_Source'. Every artifact can only be produced once.");

test.done();
},
Expand Down Expand Up @@ -173,6 +173,59 @@ export = {
test.done();
},

'violation of runOrder constraints is detected and reported'(test: Test) {
const stack = new cdk.Stack();

const sourceOutput1 = new codepipeline.Artifact('sourceOutput1');
const buildOutput1 = new codepipeline.Artifact('buildOutput1');
const sourceOutput2 = new codepipeline.Artifact('sourceOutput2');

const pipeline = new codepipeline.Pipeline(stack, 'Pipeline', {
stages: [
{
stageName: 'Source',
actions: [
new FakeSourceAction({
actionName: 'source1',
output: sourceOutput1,
}),
new FakeSourceAction({
actionName: 'source2',
output: sourceOutput2,
}),
],
},
{
stageName: 'Build',
actions: [
new FakeBuildAction({
actionName: 'build1',
input: sourceOutput1,
output: buildOutput1,
runOrder: 3,
}),
new FakeBuildAction({
actionName: 'build2',
input: sourceOutput2,
extraInputs: [buildOutput1],
output: new codepipeline.Artifact('buildOutput2'),
runOrder: 2,
}),
],
},
],
});

const errors = validate(stack);

test.equal(errors.length, 1);
const error = errors[0];
test.same(error.source, pipeline);
test.equal(error.message, "Stage 2 Action 2 ('Build'/'build2') is consuming input Artifact 'buildOutput1' before it is being produced at Stage 2 Action 3 ('Build'/'build1')");

test.done();
},

'without a name, sanitize the auto stage-action derived name'(test: Test) {
const stack = new cdk.Stack();

Expand Down
11 changes: 11 additions & 0 deletions packages/@aws-cdk/aws-lambda-nodejs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,17 @@ new lambda.NodejsFunction(this, 'MyFunction', {

All other properties of `lambda.Function` are supported, see also the [AWS Lambda construct library](https://github.com/aws/aws-cdk/tree/master/packages/%40aws-cdk/aws-lambda).

Use the `containerEnvironment` prop to pass environments variables to the Docker container
running Parcel:

```ts
new lambda.NodejsFunction(this, 'my-handler', {
containerEnvironment: {
NODE_ENV: 'production',
},
});
```

### Configuring Parcel
The `NodejsFunction` construct exposes some [Parcel](https://parceljs.org/) options via properties: `minify`, `sourceMaps`,
`buildDir` and `cacheDir`.
Expand Down
12 changes: 12 additions & 0 deletions packages/@aws-cdk/aws-lambda-nodejs/lib/builder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,13 @@ export interface BuilderOptions {
* mounted in the Docker container.
*/
readonly projectRoot: string;

/**
* The environment variables to pass to the container running Parcel.
*
* @default - no environment variables are passed to the container
*/
readonly environment?: { [key: string]: string; };
}

/**
Expand Down Expand Up @@ -111,6 +118,7 @@ export class Builder {
'-v', `${this.options.projectRoot}:${containerProjectRoot}`,
'-v', `${path.resolve(this.options.outDir)}:${containerOutDir}`,
...(this.options.cacheDir ? ['-v', `${path.resolve(this.options.cacheDir)}:${containerCacheDir}`] : []),
...flatten(Object.entries(this.options.environment || {}).map(([k, v]) => ['--env', `${k}=${v}`])),
'-w', path.dirname(containerEntryPath).replace(/\\/g, '/'), // Always use POSIX paths in the container
'parcel-bundler',
];
Expand Down Expand Up @@ -164,3 +172,7 @@ export class Builder {
fs.writeFileSync(this.pkgPath, this.originalPkg);
}
}

function flatten(x: string[][]) {
return Array.prototype.concat([], ...x);
}
8 changes: 8 additions & 0 deletions packages/@aws-cdk/aws-lambda-nodejs/lib/function.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,13 @@ export interface NodejsFunctionProps extends lambda.FunctionOptions {
* @default - the closest path containing a .git folder
*/
readonly projectRoot?: string;

/**
* The environment variables to pass to the container running Parcel.
*
* @default - no environment variables are passed to the container
*/
readonly containerEnvironment?: { [key: string]: string; };
}

/**
Expand Down Expand Up @@ -119,6 +126,7 @@ export class NodejsFunction extends lambda.Function {
nodeVersion: extractVersion(runtime),
nodeDockerTag: props.nodeDockerTag || `${process.versions.node}-alpine`,
projectRoot: path.resolve(projectRoot),
environment: props.containerEnvironment,
});
builder.build();

Expand Down
24 changes: 24 additions & 0 deletions packages/@aws-cdk/aws-lambda-nodejs/test/builder.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,30 @@ test('with Windows paths', () => {
]));
});

test('with env vars', () => {
const builder = new Builder({
entry: '/project/folder/entry.ts',
global: 'handler',
outDir: '/out-dir',
cacheDir: '/cache-dir',
nodeDockerTag: 'lts-alpine',
nodeVersion: '12',
projectRoot: '/project',
environment: {
KEY1: 'VALUE1',
KEY2: 'VALUE2',
},
});
builder.build();

// docker run
expect(spawnSync).toHaveBeenCalledWith('docker', expect.arrayContaining([
'run',
'--env', 'KEY1=VALUE1',
'--env', 'KEY2=VALUE2',
]));
});

test('throws in case of error', () => {
const builder = new Builder({
entry: '/project/folder/error',
Expand Down
15 changes: 15 additions & 0 deletions packages/@aws-cdk/aws-lambda-nodejs/test/function.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,21 @@ test('NodejsFunction with .js handler', () => {
}));
});

test('NodejsFunction with container env vars', () => {
// WHEN
new NodejsFunction(stack, 'handler1', {
containerEnvironment: {
KEY: 'VALUE',
},
});

expect(Builder).toHaveBeenCalledWith(expect.objectContaining({
environment: {
KEY: 'VALUE',
},
}));
});

test('throws when entry is not js/ts', () => {
expect(() => new NodejsFunction(stack, 'Fn', {
entry: 'handler.py',
Expand Down
7 changes: 7 additions & 0 deletions packages/@aws-cdk/aws-s3/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,13 @@ bucket.addToResourcePolicy(new iam.PolicyStatement({
}));
```

The bucket policy can be directly accessed after creation to add statements or
adjust the removal policy.

```ts
bucket.policy?.applyRemovalPolicy(RemovalPolicy.RETAIN);
```

Most of the time, you won't have to manipulate the bucket policy directly.
Instead, buckets have "grant" methods called to give prepackaged sets of permissions
to other resources. For example:
Expand Down
Loading

0 comments on commit 351df5a

Please sign in to comment.