Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 11 additions & 6 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,22 +1,27 @@
# Enable container based builds
sudo: required
language: python
dist: xenial

services:
- docker

python:
- "2.7"
- "3.6"
- "3.7"

# Enable 3.7 without globally enabling sudo and dist: xenial for other build jobs
matrix:
include:
- python: 3.7
dist: xenial
sudo: true
addons:
apt:
packages:
# Xenial images don't have jdk8 installed by default.
- openjdk-8-jdk

before_install:
# Use the JDK8 that we installed
- JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64
- PATH=$JAVA_HOME/bin:$PATH

- nvm install 8.10
- npm --version
- node --version
Expand Down
2 changes: 1 addition & 1 deletion requirements/base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@ dateparser~=0.7
python-dateutil~=2.6
pathlib2~=2.3.2; python_version<"3.4"
requests==2.20.1
aws_lambda_builders==0.0.5
aws_lambda_builders==0.1.0
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

0.1.0 🕺

serverlessrepo==0.1.5
7 changes: 4 additions & 3 deletions samcli/commands/build/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,10 @@
\b
Supported Runtimes
------------------
1. Python2.7\n
2. Python3.6\n
3. Python3.7\n
1. Python 2.7, 3.6, 3.7 using PIP\n
4. Nodejs 8.10, 6.10 using NPM
4. Ruby 2.5 using Bundler
5. Java 8 using Gradle
\b
Examples
--------
Expand Down
30 changes: 26 additions & 4 deletions samcli/lib/build/app_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,13 +144,33 @@ def update_template(self, template_dict, original_template_path, built_artifacts
return template_dict

def _build_function(self, function_name, codeuri, runtime):
config = get_workflow_config(runtime)
"""
Given the function information, this method will build the Lambda function. Depending on the configuration
it will either build the function in process or by spinning up a Docker container.

# Create the arguments to pass to the builder
Parameters
----------
function_name : str
Name or LogicalId of the function

codeuri : str
Path to where the code lives

runtime : str
AWS Lambda function runtime

Returns
-------
str
Path to the location where built artifacts are available
"""

# Create the arguments to pass to the builder
# Code is always relative to the given base directory.
code_dir = str(pathlib.Path(self._base_dir, codeuri).resolve())

config = get_workflow_config(runtime, code_dir, self._base_dir)

# artifacts directory will be created by the builder
artifacts_dir = str(pathlib.Path(self._build_dir, function_name))

Expand Down Expand Up @@ -186,7 +206,8 @@ def _build_function_in_process(self,
artifacts_dir,
scratch_dir,
manifest_path,
runtime=runtime)
runtime=runtime,
executable_search_paths=config.executable_search_paths)
except LambdaBuilderError as ex:
raise BuildError(str(ex))

Expand All @@ -212,7 +233,8 @@ def _build_function_on_container(self, # pylint: disable=too-many-locals
runtime,
log_level=log_level,
optimizations=None,
options=None)
options=None,
executable_search_paths=config.executable_search_paths)

try:
try:
Expand Down
132 changes: 115 additions & 17 deletions samcli/lib/build/workflow_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,60 +2,158 @@
Contains Builder Workflow Configs for different Runtimes
"""

import os
import logging
from collections import namedtuple


CONFIG = namedtuple('Capability', ["language", "dependency_manager", "application_framework", "manifest_name"])
LOG = logging.getLogger(__name__)


CONFIG = namedtuple('Capability', ["language", "dependency_manager", "application_framework", "manifest_name",
"executable_search_paths"])

PYTHON_PIP_CONFIG = CONFIG(
language="python",
dependency_manager="pip",
application_framework=None,
manifest_name="requirements.txt")
manifest_name="requirements.txt",
executable_search_paths=None)

NODEJS_NPM_CONFIG = CONFIG(
language="nodejs",
dependency_manager="npm",
application_framework=None,
manifest_name="package.json")
manifest_name="package.json",
executable_search_paths=None)

RUBY_BUNDLER_CONFIG = CONFIG(
language="ruby",
dependency_manager="bundler",
application_framework=None,
manifest_name="Gemfile")
manifest_name="Gemfile",
executable_search_paths=None)

JAVA_GRADLE_CONFIG = CONFIG(
language="java",
dependency_manager="gradle",
application_framework=None,
manifest_name="build.gradle",
executable_search_paths=None)


class UnsupportedRuntimeException(Exception):
pass


def get_workflow_config(runtime):
def get_workflow_config(runtime, code_dir, project_dir):
"""
Get a workflow config that corresponds to the runtime provided
Get a workflow config that corresponds to the runtime provided. This method examines contents of the project
and code directories to determine the most appropriate workflow for the given runtime. Currently the decision is
based on the presence of a supported manifest file. For runtimes that have more than one workflow, we choose a
workflow by examining ``code_dir`` followed by ``project_dir`` for presence of a supported manifest.

Parameters
----------
runtime str
The runtime of the config

code_dir str
Directory where Lambda function code is present

project_dir str
Root of the Serverless application project.

Returns
-------
namedtuple(Capability)
namedtuple that represents the Builder Workflow Config
"""

workflow_config_by_runtime = {
"python2.7": PYTHON_PIP_CONFIG,
"python3.6": PYTHON_PIP_CONFIG,
"python3.7": PYTHON_PIP_CONFIG,
"nodejs4.3": NODEJS_NPM_CONFIG,
"nodejs6.10": NODEJS_NPM_CONFIG,
"nodejs8.10": NODEJS_NPM_CONFIG,
"ruby2.5": RUBY_BUNDLER_CONFIG
selectors_by_runtime = {
"python2.7": BasicWorkflowSelector(PYTHON_PIP_CONFIG),
"python3.6": BasicWorkflowSelector(PYTHON_PIP_CONFIG),
"python3.7": BasicWorkflowSelector(PYTHON_PIP_CONFIG),
"nodejs4.3": BasicWorkflowSelector(NODEJS_NPM_CONFIG),
"nodejs6.10": BasicWorkflowSelector(NODEJS_NPM_CONFIG),
"nodejs8.10": BasicWorkflowSelector(NODEJS_NPM_CONFIG),
"ruby2.5": BasicWorkflowSelector(RUBY_BUNDLER_CONFIG),

# When Maven builder exists, add to this list so we can automatically choose a builder based on the supported
# manifest
"java8": ManifestWorkflowSelector([
# Gradle builder needs custom executable paths to find `gradlew` binary
JAVA_GRADLE_CONFIG._replace(executable_search_paths=[code_dir, project_dir])
]),
}

try:
return workflow_config_by_runtime[runtime]
except KeyError:
if runtime not in selectors_by_runtime:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's much cleaner if you do

try:
  selector = selectors_by_runtime[runtime]
  config = selector.get_config(code_dir, project_dir)
except KeyError:
  #blah 
except ValueError:
  #another blah 

Also more pythonic :)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not really. try/catch blocks should be scoped very tightly to the code you expect to throw. So when you know two statemtents will throw for two different reasons, I would rather have them in separte try blocks or let them throw very specific exceptions - not KeyError & ValueError. That's why I made this way.

raise UnsupportedRuntimeException("'{}' runtime is not supported".format(runtime))

selector = selectors_by_runtime[runtime]

try:
config = selector.get_config(code_dir, project_dir)
return config
except ValueError as ex:
raise UnsupportedRuntimeException("Unable to find a supported build workflow for runtime '{}'. Reason: {}"
.format(runtime, str(ex)))


class BasicWorkflowSelector(object):
"""
Basic workflow selector that returns the first available configuration in the given list of configurations
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this going to be extended in the future? what is the value of this class, if its just returns the first one?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For inheritance.. I didn't create an abstract base class because it didn't seem helpful. But in future we will create a new subclass that will look up Samrc to find the workflow config to use given a runtime. This can be useful for provided runtimes or if customer wants to override a default configuration

"""

def __init__(self, configs):

if not isinstance(configs, list):
configs = [configs]

self.configs = configs

def get_config(self, code_dir, project_dir):
"""
Returns the first available configuration
"""
return self.configs[0]


class ManifestWorkflowSelector(BasicWorkflowSelector):
"""
Selects a workflow by examining the directories for presence of a supported manifest
"""

def get_config(self, code_dir, project_dir):
"""
Finds a configuration by looking for a manifest in the given directories.

Returns
-------
samcli.lib.build.workflow_config.CONFIG
A supported configuration if one is found

Raises
------
ValueError
If none of the supported manifests files are found
"""

# Search for manifest first in code directory and then in the project directory.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How this works and is resolved needs to be clearly documented (somewhere)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I will add to the help text for now

# Search order is important here because we want to prefer the manifest present within the code directory over
# a manifest present in project directory.
search_dirs = [code_dir, project_dir]
LOG.debug("Looking for a supported build workflow in following directories: %s", search_dirs)

for config in self.configs:

if any([self._has_manifest(config, directory) for directory in search_dirs]):
return config

raise ValueError("None of the supported manifests '{}' were found in the following paths '{}'".format(
[config.manifest_name for config in self.configs],
search_dirs))

@staticmethod
def _has_manifest(config, directory):
return os.path.exists(os.path.join(directory, config.manifest_name))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

One day you will use pathlib.. one day..

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

when we move to py3. Pathlib in py2 is really confusing

8 changes: 6 additions & 2 deletions samcli/local/docker/lambda_build_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def __init__(self, # pylint: disable=too-many-locals
runtime,
optimizations=None,
options=None,
executable_search_paths=None,
log_level=None):

abs_manifest_path = pathlib.Path(manifest_path).resolve()
Expand All @@ -53,7 +54,8 @@ def __init__(self, # pylint: disable=too-many-locals
manifest_file_name,
runtime,
optimizations,
options)
options,
executable_search_paths)

image = LambdaBuildContainer._get_image(runtime)
entry = LambdaBuildContainer._get_entrypoint(request_json)
Expand Down Expand Up @@ -96,7 +98,8 @@ def _make_request(protocol_version,
manifest_file_name,
runtime,
optimizations,
options):
options,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just making sure, the protocol version is passed in by importing the builders library right?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Correct.

executable_search_paths):

return json.dumps({
"jsonschema": "2.0",
Expand All @@ -119,6 +122,7 @@ def _make_request(protocol_version,
"runtime": runtime,
"optimizations": optimizations,
"options": options,
"executable_search_paths": executable_search_paths
}
})

Expand Down
19 changes: 18 additions & 1 deletion tests/integration/buildcmd/build_integ_base.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import os
import shutil
import tempfile
import logging
import subprocess
import json
from unittest import TestCase

import docker
Expand All @@ -10,10 +13,12 @@
except ImportError:
from pathlib2 import Path


from samcli.yamlhelper import yaml_parse


LOG = logging.getLogger(__name__)


class BuildIntegBase(TestCase):

@classmethod
Expand Down Expand Up @@ -93,3 +98,15 @@ def _verify_resource_property(self, template_path, logical_id, property, expecte
with open(template_path, 'r') as fp:
template_dict = yaml_parse(fp.read())
self.assertEquals(expected_value, template_dict["Resources"][logical_id]["Properties"][property])

def _verify_invoke_built_function(self, template_path, function_logical_id, overrides, expected_result):
LOG.info("Invoking built function '{}'", function_logical_id)

cmdlist = [self.cmd, "local", "invoke", function_logical_id, "-t", str(template_path), "--no-event",
"--parameter-overrides", overrides]

process = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
process.wait()

process_stdout = b"".join(process.stdout.readlines()).strip().decode('utf-8')
self.assertEquals(json.loads(process_stdout), expected_result)
Loading