diff --git a/.appveyor.yml b/.appveyor.yml index 2d955e670..f526a3c1d 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -2,6 +2,9 @@ version: 1.0.{build} image: Visual Studio 2017 environment: + GOPATH: c:\gopath + GOVERSION: 1.11 + GRADLE_OPTS: -Dorg.gradle.daemon=false matrix: @@ -16,12 +19,26 @@ install: # To run Nodejs workflow integ tests - ps: Install-Product node 8.10 -- "set PATH=%PYTHON%\\Scripts;%PYTHON%\\bin;%PATH%" +- "set PATH=%PYTHON%;%PYTHON%\\Scripts;%PYTHON%\\bin;%PATH%" - "%PYTHON%\\python.exe -m pip install -r requirements/dev.txt" - "%PYTHON%\\python.exe -m pip install -e ." - "set PATH=C:\\Ruby25-x64\\bin;%PATH%" -- "gem install bundler --no-ri --no-rdoc" +- "gem --version" +- "gem install bundler -v 1.17.3" - "bundler --version" +- "echo %PATH%" + +# setup go +- rmdir c:\go /s /q +- "choco install golang" +- "choco install bzr" +- "choco install dep" +- setx PATH "C:\go\bin;C:\gopath\bin;C:\Program Files (x86)\Bazaar\;C:\Program Files\Mercurial;%PATH%;" +- "go version" +- "go env" + +# setup Gradle +- "choco install gradle" test_script: - "%PYTHON%\\python.exe -m pytest --cov aws_lambda_builders --cov-report term-missing tests/unit tests/functional" diff --git a/.gitignore b/.gitignore index 7fdec006e..130accaf0 100644 --- a/.gitignore +++ b/.gitignore @@ -383,4 +383,6 @@ $RECYCLE.BIN/ /Dockerfile -# End of https://www.gitignore.io/api/osx,node,macos,linux,python,windows,pycharm,intellij,sublimetext,visualstudiocode +tests/integration/workflows/go_dep/data/src/*/vendor/* + +# End of https://www.gitignore.io/api/osx,node,macos,linux,python,windows,pycharm,intellij,sublimetext,visualstudiocode \ No newline at end of file diff --git a/.pylintrc b/.pylintrc index 2dbf71e74..c30f1c8d6 100644 --- a/.pylintrc +++ b/.pylintrc @@ -9,7 +9,7 @@ # Add files or directories to the blacklist. They should be base names, not # paths. -ignore=compat.py +ignore=compat.py, utils.py # Pickle collected data for later comparisons. persistent=yes @@ -360,4 +360,4 @@ int-import-graph= # Exceptions that will emit a warning when being caught. Defaults to # "Exception" -overgeneral-exceptions=Exception \ No newline at end of file +overgeneral-exceptions=Exception diff --git a/.travis.yml b/.travis.yml index c22f0aebc..039f25023 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,6 +15,15 @@ install: # To run Nodejs workflow integ tests - nvm install 8.10.0 - nvm use 8.10.0 + # To run Ruby workflow integ tests + - rvm install ruby-2.5.3 + - rvm use ruby-2.5.3 + + # Go workflow integ tests require Go 1.11+ + - eval "$(gimme 1.11.2)" + - go version + + - go get -u github.com/golang/dep/cmd/dep # Install the code requirements - make init diff --git a/DESIGN.md b/DESIGN.md index 09b93023f..708937948 100644 --- a/DESIGN.md +++ b/DESIGN.md @@ -39,7 +39,7 @@ If your build action implementation requires 3rd party libraries, here is how yo Each build action has its own design document. -* [python-pip](./lambda_builders/actions/python_pip/DESIGN.md) +* [python-pip](./aws_lambda_builders/workflows/python_pip/DESIGN.md) ### Builders Library diff --git a/NOTICE b/NOTICE index 06e1c1af1..674d01f45 100644 --- a/NOTICE +++ b/NOTICE @@ -1,2 +1,6 @@ AWS Lambda Builders Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. + +The function "which" at aws_lambda_builders/utils.py was copied from https://github.com/python/cpython/blob/3.7/Lib/shutil.py +SPDX-License-Identifier: Python-2.0 +Copyright 2019 by the Python Software Foundation \ No newline at end of file diff --git a/aws_lambda_builders/__init__.py b/aws_lambda_builders/__init__.py index 1dc20590b..6c140c2f8 100644 --- a/aws_lambda_builders/__init__.py +++ b/aws_lambda_builders/__init__.py @@ -1,5 +1,5 @@ """ AWS Lambda Builder Library """ -__version__ = '0.0.5' -RPC_PROTOCOL_VERSION = "0.1" +__version__ = '0.1.0' +RPC_PROTOCOL_VERSION = "0.2" diff --git a/aws_lambda_builders/__main__.py b/aws_lambda_builders/__main__.py index 142f4eb1e..bc01b46e7 100644 --- a/aws_lambda_builders/__main__.py +++ b/aws_lambda_builders/__main__.py @@ -10,10 +10,11 @@ import json import os import logging +import re from aws_lambda_builders.builder import LambdaBuilder from aws_lambda_builders.exceptions import WorkflowNotFoundError, WorkflowUnknownError, WorkflowFailedError - +from aws_lambda_builders import RPC_PROTOCOL_VERSION as lambda_builders_protocol_version log_level = int(os.environ.get("LAMBDA_BUILDERS_LOG_LEVEL", logging.INFO)) @@ -24,6 +25,8 @@ LOG = logging.getLogger(__name__) +VERSION_REGEX = re.compile("^([0-9])+.([0-9]+)$") + def _success_response(request_id, artifacts_dir): return json.dumps({ @@ -46,6 +49,31 @@ def _error_response(request_id, http_status_code, message): }) +def _parse_version(version_string): + + if VERSION_REGEX.match(version_string): + return float(version_string) + else: + ex = "Protocol Version does not match : {}".format(VERSION_REGEX.pattern) + LOG.debug(ex) + raise ValueError(ex) + + +def version_compatibility_check(version): + # The following check is between current protocol version vs version of the protocol + # with which aws-lambda-builders is called. + # Example: + # 0.2 < 0.2 comparison will fail, don't throw a value Error saying incompatible version. + # 0.2 < 0.3 comparison will pass, throwing a ValueError + # 0.2 < 0.1 comparison will fail, don't throw a value Error saying incompatible version + + if _parse_version(lambda_builders_protocol_version) < version: + ex = "Incompatible Protocol Version : {}, " \ + "Current Protocol Version: {}".format(version, lambda_builders_protocol_version) + LOG.error(ex) + raise ValueError(ex) + + def _write_response(response, exit_code): sys.stdout.write(response) sys.stdout.flush() # Make sure it is written @@ -77,11 +105,20 @@ def main(): # pylint: disable=too-many-statements response = _error_response(request_id, -32601, "Method unavailable") return _write_response(response, 1) + try: + protocol_version = _parse_version(params.get("__protocol_version")) + version_compatibility_check(protocol_version) + + except ValueError: + response = _error_response(request_id, 505, "Unsupported Protocol Version") + return _write_response(response, 1) + capabilities = params["capability"] supported_workflows = params.get("supported_workflows") exit_code = 0 response = None + try: builder = LambdaBuilder(language=capabilities["language"], dependency_manager=capabilities["dependency_manager"], @@ -93,6 +130,7 @@ def main(): # pylint: disable=too-many-statements params["artifacts_dir"], params["scratch_dir"], params["manifest_path"], + executable_search_paths=params.get('executable_search_paths', None), runtime=params["runtime"], optimizations=params["optimizations"], options=params["options"]) diff --git a/aws_lambda_builders/binary_path.py b/aws_lambda_builders/binary_path.py new file mode 100644 index 000000000..78467666f --- /dev/null +++ b/aws_lambda_builders/binary_path.py @@ -0,0 +1,21 @@ +""" +Class containing resolved path of binary given a validator and a resolver and the name of the binary. +""" + + +class BinaryPath(object): + + def __init__(self, resolver, validator, binary, binary_path=None): + self.resolver = resolver + self.validator = validator + self.binary = binary + self._binary_path = binary_path + self.path_provided = True if self._binary_path else False + + @property + def binary_path(self): + return self._binary_path + + @binary_path.setter + def binary_path(self, binary_path): + self._binary_path = binary_path diff --git a/aws_lambda_builders/builder.py b/aws_lambda_builders/builder.py index 1872bfe29..1012a5d02 100644 --- a/aws_lambda_builders/builder.py +++ b/aws_lambda_builders/builder.py @@ -7,7 +7,6 @@ import logging from aws_lambda_builders.registry import get_workflow, DEFAULT_REGISTRY -from aws_lambda_builders.validate import RuntimeValidator from aws_lambda_builders.workflow import Capability LOG = logging.getLogger(__name__) @@ -57,7 +56,7 @@ def __init__(self, language, dependency_manager, application_framework, supporte LOG.debug("Found workflow '%s' to support capabilities '%s'", self.selected_workflow_cls.NAME, self.capability) def build(self, source_dir, artifacts_dir, scratch_dir, manifest_path, - runtime=None, optimizations=None, options=None): + runtime=None, optimizations=None, options=None, executable_search_paths=None): """ Actually build the code by running workflows @@ -90,9 +89,11 @@ def build(self, source_dir, artifacts_dir, scratch_dir, manifest_path, :type options: dict :param options: Optional dictionary of options ot pass to build action. **Not supported**. + + :type executable_search_paths: list + :param executable_search_paths: + Additional list of paths to search for executables required by the workflow. """ - if runtime: - self._validate_runtime(runtime) if not os.path.exists(scratch_dir): os.makedirs(scratch_dir) @@ -103,20 +104,10 @@ def build(self, source_dir, artifacts_dir, scratch_dir, manifest_path, manifest_path, runtime=runtime, optimizations=optimizations, - options=options) + options=options, + executable_search_paths=executable_search_paths) return workflow.run() - def _validate_runtime(self, runtime): - """ - validate runtime and local runtime version to make sure they match - - :type runtime: str - :param runtime: - String matching a lambda runtime eg: python3.6 - """ - RuntimeValidator.validate_runtime(required_language=self.capability.language, - required_runtime=runtime) - def _clear_workflows(self): DEFAULT_REGISTRY.clear() diff --git a/aws_lambda_builders/exceptions.py b/aws_lambda_builders/exceptions.py index 656763667..737188510 100644 --- a/aws_lambda_builders/exceptions.py +++ b/aws_lambda_builders/exceptions.py @@ -18,7 +18,7 @@ class UnsupportedManifestError(LambdaBuilderError): class MisMatchRuntimeError(LambdaBuilderError): MESSAGE = "{language} executable found in your path does not " \ "match runtime. " \ - "\n Expected version: {required_runtime}, Found version: {found_runtime}. " \ + "\n Expected version: {required_runtime}, Found version: {runtime_path}. " \ "\n Possibly related: https://github.com/awslabs/aws-lambda-builders/issues/30" diff --git a/aws_lambda_builders/path_resolver.py b/aws_lambda_builders/path_resolver.py new file mode 100644 index 000000000..3d05e135c --- /dev/null +++ b/aws_lambda_builders/path_resolver.py @@ -0,0 +1,29 @@ +""" +Basic Path Resolver that looks for the executable by runtime first, before proceeding to 'language' in PATH. +""" + +from aws_lambda_builders.utils import which + + +class PathResolver(object): + + def __init__(self, binary, runtime, executable_search_paths=None): + self.binary = binary + self.runtime = runtime + self.executables = [self.runtime, self.binary] + self.executable_search_paths = executable_search_paths + + def _which(self): + exec_paths = [] + for executable in [executable for executable in self.executables if executable is not None]: + paths = which(executable, executable_search_paths=self.executable_search_paths) + exec_paths.extend(paths) + + if not exec_paths: + raise ValueError("Path resolution for runtime: {} of binary: " + "{} was not successful".format(self.runtime, self.binary)) + return exec_paths + + @property + def exec_paths(self): + return self._which() diff --git a/aws_lambda_builders/utils.py b/aws_lambda_builders/utils.py index e7ebc3941..d5b2ec9aa 100644 --- a/aws_lambda_builders/utils.py +++ b/aws_lambda_builders/utils.py @@ -3,6 +3,7 @@ """ import shutil +import sys import os import logging @@ -57,3 +58,92 @@ def copytree(source, destination, ignore=None): copytree(new_source, new_destination, ignore=ignore) else: shutil.copy2(new_source, new_destination) + +# NOTE: The below function is copied from Python source code and modified +# slightly to return a list of paths that match a given command +# instead of returning just the first match + +# The function "which" at aws_lambda_builders/utils.py was copied from https://github.com/python/cpython/blob/3.7/Lib/shutil.py +# SPDX-License-Identifier: Python-2.0 +# Copyright 2019 by the Python Software Foundation + + +def which(cmd, mode=os.F_OK | os.X_OK, executable_search_paths=None): # pragma: no cover + """Given a command, mode, and executable search paths list, return the paths which + conforms to the given mode on the PATH with the prepended additional search paths, + or None if there is no such file. + `mode` defaults to os.F_OK | os.X_OK. the default search `path` defaults + to the result of os.environ.get("PATH") + Note: This function was backported from the Python 3 source code. + + :type cmd: str + :param cmd: + Executable to be looked up in PATH. + + :type mode: str + :param mode: + Modes of access for the executable. + + :type executable_search_paths: list + :param executable_search_paths: + List of paths to look for `cmd` in preference order. + """ + + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + + def _access_check(fn, mode): + return os.path.exists(fn) and os.access(fn, mode) and not os.path.isdir(fn) + + # If we're given a path with a directory part, look it up directly + # rather than referring to PATH directories. This includes checking + # relative to the current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + + return None + + path = os.environ.get("PATH", os.defpath) + + if not path: + return None + + path = path.split(os.pathsep) + + if executable_search_paths: + path = executable_search_paths + path + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if os.curdir not in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path + # extensions. This will allow us to short circuit when given + # "python.exe". If it does match, only test that one, otherwise we + # have to try others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + paths = [] + + for dir in path: + normdir = os.path.normcase(dir) + if normdir not in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + paths.append(name) + return paths diff --git a/aws_lambda_builders/validate.py b/aws_lambda_builders/validate.py deleted file mode 100644 index 82d34d3a9..000000000 --- a/aws_lambda_builders/validate.py +++ /dev/null @@ -1,74 +0,0 @@ -""" -Supported Runtimes and their validations. -""" - -import logging -import os -import subprocess - -from aws_lambda_builders.exceptions import MisMatchRuntimeError - -LOG = logging.getLogger(__name__) - - -def validate_python_cmd(required_language, required_runtime_version): - major, minor = required_runtime_version.replace(required_language, "").split('.') - cmd = [ - "python", - "-c", - "import sys; " - "sys.stdout.write('python' + str(sys.version_info.major) + '.' + str(sys.version_info.minor)); " - "assert sys.version_info.major == {major} " - "and sys.version_info.minor == {minor}".format( - major=major, - minor=minor)] - return cmd - - -_RUNTIME_VERSION_RESOLVER = { - "python": validate_python_cmd -} - - -class RuntimeValidator(object): - SUPPORTED_RUNTIMES = [ - "python2.7", - "python3.6", - "python3.7", - ] - - @classmethod - def has_runtime(cls, runtime): - """ - Checks if the runtime is supported. - :param string runtime: Runtime to check - :return bool: True, if the runtime is supported. - """ - return runtime in cls.SUPPORTED_RUNTIMES - - @classmethod - def validate_runtime(cls, required_language, required_runtime): - """ - Checks if the language supplied matches the required lambda runtime - :param string required_language: language to check eg: python - :param string required_runtime: runtime to check eg: python3.6 - :raises MisMatchRuntimeError: Version mismatch of the language vs the required runtime - """ - if required_language in _RUNTIME_VERSION_RESOLVER: - if not RuntimeValidator.has_runtime(required_runtime): - LOG.warning("'%s' runtime is not " - "a supported runtime", required_runtime) - return - cmd = _RUNTIME_VERSION_RESOLVER[required_language](required_language, required_runtime) - - p = subprocess.Popen(cmd, - cwd=os.getcwd(), - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - found_runtime, _ = p.communicate() - if p.returncode != 0: - raise MisMatchRuntimeError(language=required_language, - required_runtime=required_runtime, - found_runtime=str(found_runtime.decode('utf-8'))) - else: - LOG.warning("'%s' runtime has not " - "been validated!", required_language) diff --git a/aws_lambda_builders/validator.py b/aws_lambda_builders/validator.py new file mode 100644 index 000000000..aa0fa1528 --- /dev/null +++ b/aws_lambda_builders/validator.py @@ -0,0 +1,18 @@ +""" +No-op validator that does not validate the runtime_path for a specified language. +""" + +import logging + +LOG = logging.getLogger(__name__) + + +class RuntimeValidator(object): + + def __init__(self, runtime): + self.runtime = runtime + self._runtime_path = None + + def validate(self, runtime_path): + self._runtime_path = runtime_path + return runtime_path diff --git a/aws_lambda_builders/workflow.py b/aws_lambda_builders/workflow.py index 140887de8..8a6032602 100644 --- a/aws_lambda_builders/workflow.py +++ b/aws_lambda_builders/workflow.py @@ -1,15 +1,18 @@ """ Implementation of a base workflow """ - +import functools import os import logging from collections import namedtuple import six +from aws_lambda_builders.binary_path import BinaryPath +from aws_lambda_builders.path_resolver import PathResolver +from aws_lambda_builders.validator import RuntimeValidator from aws_lambda_builders.registry import DEFAULT_REGISTRY -from aws_lambda_builders.exceptions import WorkflowFailedError, WorkflowUnknownError +from aws_lambda_builders.exceptions import WorkflowFailedError, WorkflowUnknownError, MisMatchRuntimeError from aws_lambda_builders.actions import ActionFailedError LOG = logging.getLogger(__name__) @@ -22,6 +25,41 @@ Capability = namedtuple('Capability', ["language", "dependency_manager", "application_framework"]) +# TODO: Move sanitize out to its own class. +def sanitize(func): + """ + sanitize the executable path of the runtime specified by validating it. + :param func: Workflow's run method is sanitized + """ + + @functools.wraps(func) + def wrapper(self, *args, **kwargs): + valid_paths = [] + # NOTE: we need to access binaries to get paths and resolvers, before validating. + binaries_copy = self.binaries + for binary, binary_path in binaries_copy.items(): + validator = binary_path.validator + exec_paths = binary_path.resolver.exec_paths if not binary_path.path_provided else binary_path.binary_path + for executable_path in exec_paths: + valid_path = None + try: + valid_path = validator.validate(executable_path) + except MisMatchRuntimeError as ex: + LOG.debug("Invalid executable for %s at %s", + binary, executable_path, exc_info=str(ex)) + if valid_path: + binary_path.binary_path = valid_path + valid_paths.append(valid_path) + break + self.binaries = binaries_copy + if len(self.binaries) != len(valid_paths): + raise WorkflowFailedError(workflow_name=self.NAME, + action_name=None, + reason='Binary validation failed!') + func(self, *args, **kwargs) + return wrapper + + class _WorkflowMetaClass(type): """ A metaclass that maintains the registry of loaded builders @@ -79,6 +117,7 @@ def __init__(self, scratch_dir, manifest_path, runtime=None, + executable_search_paths=None, optimizations=None, options=None): """ @@ -114,6 +153,10 @@ def __init__(self, :type options: dict :param options: Optional dictionary of options ot pass to build action. **Not supported**. + + :type executable_search_paths: list + :param executable_search_paths: + Optional, Additional list of paths to search for executables required by the workflow. """ self.source_dir = source_dir @@ -123,9 +166,11 @@ def __init__(self, self.runtime = runtime self.optimizations = optimizations self.options = options + self.executable_search_paths = executable_search_paths # Actions are registered by the subclasses as they seem fit self.actions = [] + self._binaries = {} def is_supported(self): """ @@ -138,6 +183,33 @@ def is_supported(self): return True + def get_resolvers(self): + """ + Non specialized path resolver that just returns the list of executable for the runtime on the path. + """ + return [PathResolver(runtime=self.runtime, binary=self.CAPABILITY.language, + executable_search_paths=self.executable_search_paths)] + + def get_validators(self): + """ + No-op validator that does not validate the runtime_path. + """ + return [RuntimeValidator(runtime=self.runtime)] + + @property + def binaries(self): + if not self._binaries: + resolvers = self.get_resolvers() + validators = self.get_validators() + self._binaries = {resolver.binary: BinaryPath(resolver=resolver, validator=validator, binary=resolver.binary) + for resolver, validator in zip(resolvers, validators)} + return self._binaries + + @binaries.setter + def binaries(self, binaries): + self._binaries = binaries + + @sanitize def run(self): """ Actually perform the build by executing registered actions. diff --git a/aws_lambda_builders/workflows/__init__.py b/aws_lambda_builders/workflows/__init__.py index c531a1bc2..1a3ed60ec 100644 --- a/aws_lambda_builders/workflows/__init__.py +++ b/aws_lambda_builders/workflows/__init__.py @@ -5,3 +5,6 @@ import aws_lambda_builders.workflows.python_pip import aws_lambda_builders.workflows.nodejs_npm import aws_lambda_builders.workflows.ruby_bundler +import aws_lambda_builders.workflows.go_dep +import aws_lambda_builders.workflows.go_modules +import aws_lambda_builders.workflows.java_gradle diff --git a/aws_lambda_builders/workflows/go_dep/DESIGN.md b/aws_lambda_builders/workflows/go_dep/DESIGN.md new file mode 100644 index 000000000..e5cf63d00 --- /dev/null +++ b/aws_lambda_builders/workflows/go_dep/DESIGN.md @@ -0,0 +1,33 @@ +# Go Dep - Lambda Builder + +## Scope +Building Go projects using the dep tool (https://github.com/golang/dep) is rather simple, if you was to do +this by hand, you would perform these commands: + + - `dep ensure` + - `GOOS=linux GOARCH=amd64 go build -o handler main.go` + - `zip -r source.zip` + +The scope of the Go dep builder is to create a macro for these commands to ensure that spelling and paths are correct. +We don't have to care about versioning of the tooling of either Go or dep since Lambda doesn't have to care, and so it becomes +user preference. + +## Implementation +The go-dep builder runs the above commands with some minor tweaks, the commands ran on behalf of the user are: + + 1. dep ensure + 2. GOOS=linux GOARCH=amd64 go build -o $ARTIFACT_DIR/$HANDLER_NAME $SOURCE_DIR + +The main difference being we want to capture the compiled binary to package later, so the binary has the +output path as the artifact dir set by the caller. + +## Challenges +There are no challenges for go building, most problems have been abstracted away by the Go tooling + +## Notes +Go does native cross-compilation regardless of what's compiling it. Regardless of how the user builds their code it would run on +AWS Lambda. + +### Layers +This pattern might not work for Layers, plugins for go require an extra compilation flag (`-buildmode=plugin`), this would be something +to add later on, should SAM CLI support building layers \ No newline at end of file diff --git a/aws_lambda_builders/workflows/go_dep/__init__.py b/aws_lambda_builders/workflows/go_dep/__init__.py new file mode 100644 index 000000000..1cd6ddb8f --- /dev/null +++ b/aws_lambda_builders/workflows/go_dep/__init__.py @@ -0,0 +1,5 @@ +""" +Builds Go Lambda functions using the `dep` dependency manager +""" + +from .workflow import GoDepWorkflow diff --git a/aws_lambda_builders/workflows/go_dep/actions.py b/aws_lambda_builders/workflows/go_dep/actions.py new file mode 100644 index 000000000..067aac58f --- /dev/null +++ b/aws_lambda_builders/workflows/go_dep/actions.py @@ -0,0 +1,66 @@ +""" +Actions for Go dependency resolution with dep +""" + +import logging +import os + +from aws_lambda_builders.actions import BaseAction, Purpose, ActionFailedError + +from .subproc_exec import ExecutionError + + +LOG = logging.getLogger(__name__) + +class DepEnsureAction(BaseAction): + + """ + A Lambda Builder Action which runs dep to install dependencies from Gopkg.toml + """ + + NAME = "DepEnsure" + DESCRIPTION = "Ensures all dependencies are installed for a project" + PURPOSE = Purpose.RESOLVE_DEPENDENCIES + + def __init__(self, base_dir, subprocess_dep): + super(DepEnsureAction, self).__init__() + + self.base_dir = base_dir + self.subprocess_dep = subprocess_dep + + def execute(self): + try: + self.subprocess_dep.run(["ensure"], + cwd=self.base_dir) + except ExecutionError as ex: + raise ActionFailedError(str(ex)) + +class GoBuildAction(BaseAction): + + """ + A Lambda Builder Action which runs `go build` to create a binary + """ + + NAME = "GoBuild" + DESCRIPTION = "Builds final binary" + PURPOSE = Purpose.COMPILE_SOURCE + + def __init__(self, base_dir, source_path, output_path, subprocess_go, env=None): + super(GoBuildAction, self).__init__() + + self.base_dir = base_dir + self.source_path = source_path + self.output_path = output_path + + self.subprocess_go = subprocess_go + self.env = env if not env is None else {} + + def execute(self): + env = self.env + env.update({"GOOS": "linux", "GOARCH": "amd64"}) + + try: + self.subprocess_go.run(["build", "-o", self.output_path, self.source_path], + cwd=self.source_path, env=env) + except ExecutionError as ex: + raise ActionFailedError(str(ex)) diff --git a/aws_lambda_builders/workflows/go_dep/subproc_exec.py b/aws_lambda_builders/workflows/go_dep/subproc_exec.py new file mode 100644 index 000000000..1bb9f9746 --- /dev/null +++ b/aws_lambda_builders/workflows/go_dep/subproc_exec.py @@ -0,0 +1,93 @@ +""" +Wrapper around calling dep through a subprocess. +""" + +import logging + +LOG = logging.getLogger(__name__) + + +class ExecutionError(Exception): + """ + Exception raised in case binary execution fails. + It will pass on the standard error output from the binary console. + """ + + MESSAGE = "Exec Failed: {}" + + def __init__(self, message): + raw_message = message + if isinstance(message, bytes): + message = message.decode('utf-8') + + try: + Exception.__init__(self, self.MESSAGE.format(message.strip())) + except UnicodeError: + Exception.__init__(self, self.MESSAGE.format(raw_message.strip())) + +class SubprocessExec(object): + + """ + Wrapper around the Dep command line utility, making it + easy to consume execution results. + """ + + def __init__(self, osutils, binary=None): + """ + :type osutils: aws_lambda_builders.workflows.go_dep.utils.OSUtils + :param osutils: An instance of OS Utilities for file manipulation + + :type binary: str + :param binary: Path to the binary. If not set, + the default executable path will be used + """ + self.osutils = osutils + + self.binary = binary + + + def run(self, args, cwd=None, env=None): + + """ + Runs the action. + + :type args: list + :param args: Command line arguments to pass to the binary + + :type cwd: str + :param cwd: Directory where to execute the command (defaults to current dir) + + :rtype: str + :return: text of the standard output from the command + + :raises aws_lambda_builders.workflows.go_dep.dep.ExecutionError: + when the command executes with a non-zero return code. The exception will + contain the text of the standard error output from the command. + + :raises ValueError: if arguments are not provided, or not a list + """ + + if not isinstance(args, list): + raise ValueError("args must be a list") + + if not args: + raise ValueError("requires at least one arg") + + invoke_bin = [self.binary] + args + + LOG.debug("executing binary: %s", invoke_bin) + + p = self.osutils.popen(invoke_bin, + stdout=self.osutils.pipe, + stderr=self.osutils.pipe, + cwd=cwd, + env=env) + + out, err = p.communicate() + + if p.returncode != 0: + raise ExecutionError(message=err) + + out = out.decode('utf-8') if isinstance(out, bytes) else out + + return out.strip() diff --git a/aws_lambda_builders/workflows/go_dep/utils.py b/aws_lambda_builders/workflows/go_dep/utils.py new file mode 100644 index 000000000..7c0d0052e --- /dev/null +++ b/aws_lambda_builders/workflows/go_dep/utils.py @@ -0,0 +1,42 @@ +""" +Commonly used utilities +""" + +import os +import platform +import tarfile +import subprocess + + +class OSUtils(object): + + """ + Wrapper around file system functions, to make it easy to + unit test actions in memory + + TODO: move to somewhere generic + """ + + def joinpath(self, *args): + return os.path.join(*args) + + def popen(self, command, stdout=None, stderr=None, env=None, cwd=None): + p = subprocess.Popen(command, stdout=stdout, stderr=stderr, env=env, cwd=cwd) + return p + + @property + def pipe(self): + return subprocess.PIPE + + @property + def environ(self): + return os.environ.copy() + + def dirname(self, path): + return os.path.dirname(path) + + def abspath(self, path): + return os.path.abspath(path) + + def is_windows(self): + return platform.system().lower() == 'windows' diff --git a/aws_lambda_builders/workflows/go_dep/workflow.py b/aws_lambda_builders/workflows/go_dep/workflow.py new file mode 100644 index 000000000..c2f58ab14 --- /dev/null +++ b/aws_lambda_builders/workflows/go_dep/workflow.py @@ -0,0 +1,63 @@ +""" +Go Dep Workflow +""" + +import logging +import os + +from aws_lambda_builders.actions import CopySourceAction +from aws_lambda_builders.workflow import BaseWorkflow, Capability + +from .actions import DepEnsureAction, GoBuildAction +from .utils import OSUtils +from .subproc_exec import SubprocessExec + +LOG = logging.getLogger(__name__) + +class GoDepWorkflow(BaseWorkflow): + """ + A Lambda builder workflow that knows how to build + Go projects using `dep` + """ + + NAME = "GoDepBuilder" + + CAPABILITY = Capability(language="go", + dependency_manager="dep", + application_framework=None) + + EXCLUDED_FILES = (".aws-sam") + + def __init__(self, + source_dir, + artifacts_dir, + scratch_dir, + manifest_path, + runtime=None, + osutils=None, + **kwargs): + + super(GoDepWorkflow, self).__init__(source_dir, + artifacts_dir, + scratch_dir, + manifest_path, + runtime=runtime, + **kwargs) + + options = kwargs["options"] if "options" in kwargs else {} + handler = options.get("artifact_executable_name", None) + + if osutils is None: + osutils = OSUtils() + + # project base name, where the Gopkg.toml and vendor dir are. + base_dir = osutils.abspath(osutils.dirname(manifest_path)) + output_path = osutils.joinpath(osutils.abspath(artifacts_dir), handler) + + subprocess_dep = SubprocessExec(osutils, "dep") + subprocess_go = SubprocessExec(osutils, "go") + + self.actions = [ + DepEnsureAction(base_dir, subprocess_dep), + GoBuildAction(base_dir, osutils.abspath(source_dir), output_path, subprocess_go, env=osutils.environ) + ] diff --git a/aws_lambda_builders/workflows/go_modules/DESIGN.md b/aws_lambda_builders/workflows/go_modules/DESIGN.md new file mode 100644 index 000000000..22b0bf00c --- /dev/null +++ b/aws_lambda_builders/workflows/go_modules/DESIGN.md @@ -0,0 +1,38 @@ +## Go - Go Modules Lambda Builder + +### Scope + +This package leverages standard Go tooling available as of Go1.11 to build Go +applications to be deployed in an AWS Lambda environment. The scope of this +builder is to take an existing directory containing customer code, and a +top-level `go.mod` file specifying third party dependencies. The builder will +run `go build` on the project and put the resulting binary in the given +artifacts folder. + +### Interface + +The top level interface is presented by the `GoModulesBuilder` class. There +will be one public method `build`, which takes the provided arguments and +builds a static binary using standard go tools. + +```python +def build(self, source_dir_path, artifacts_dir_path, executable_name): + """Builds a go project onto an output path. + + :type source_dir_path: str + :param source_dir_path: Directory with the source files. + + :type output_path: str + :param output_path: Filename to write the executable output to. +``` + +### Implementation + +The general algorithm for preparing a Go package for use on AWS Lambda +is very simple. It's as follows: + +Pass in GOOS=linux and GOARCH=amd64 to the `go build` command to target the +OS and architecture used on AWS Lambda. Let go tooling handle the +cross-compilation, regardless of the build environment. Move the resulting +static binary to the artifacts folder to be shipped as a single-file zip +archive. diff --git a/aws_lambda_builders/workflows/go_modules/__init__.py b/aws_lambda_builders/workflows/go_modules/__init__.py new file mode 100644 index 000000000..a46dddfab --- /dev/null +++ b/aws_lambda_builders/workflows/go_modules/__init__.py @@ -0,0 +1,5 @@ +""" +Builds Go Lambda functions using standard Go tooling +""" + +from .workflow import GoModulesWorkflow diff --git a/aws_lambda_builders/workflows/go_modules/actions.py b/aws_lambda_builders/workflows/go_modules/actions.py new file mode 100644 index 000000000..39ddedc6c --- /dev/null +++ b/aws_lambda_builders/workflows/go_modules/actions.py @@ -0,0 +1,27 @@ +""" +Action to build a Go project using standard Go tooling +""" + +from aws_lambda_builders.actions import BaseAction, Purpose, ActionFailedError +from .builder import BuilderError + + +class GoModulesBuildAction(BaseAction): + + NAME = "Build" + DESCRIPTION = "Building Go package with Go Modules" + PURPOSE = Purpose.COMPILE_SOURCE + + def __init__(self, source_dir, output_path, builder): + self.source_dir = source_dir + self.output_path = output_path + self.builder = builder + + def execute(self): + try: + self.builder.build( + self.source_dir, + self.output_path, + ) + except BuilderError as ex: + raise ActionFailedError(str(ex)) diff --git a/aws_lambda_builders/workflows/go_modules/builder.py b/aws_lambda_builders/workflows/go_modules/builder.py new file mode 100644 index 000000000..35f649b26 --- /dev/null +++ b/aws_lambda_builders/workflows/go_modules/builder.py @@ -0,0 +1,61 @@ +""" +Build a Go project using standard Go tooling +""" +import logging + + +LOG = logging.getLogger(__name__) + + +class BuilderError(Exception): + MESSAGE = "Builder Failed: {message}" + + def __init__(self, **kwargs): + Exception.__init__(self, self.MESSAGE.format(**kwargs)) + + +class GoModulesBuilder(object): + + LANGUAGE = "go" + + def __init__(self, osutils, binaries): + """Initialize a GoModulesBuilder. + + :type osutils: :class:`lambda_builders.utils.OSUtils` + :param osutils: A class used for all interactions with the + outside OS. + + :type binaries: dict + :param binaries: A dict of language binaries + """ + self.osutils = osutils + self.binaries = binaries + + def build(self, source_dir_path, output_path): + """Builds a go project onto an output path. + + :type source_dir_path: str + :param source_dir_path: Directory with the source files. + + :type output_path: str + :param output_path: Filename to write the executable output to. + """ + env = {} + env.update(self.osutils.environ) + env.update({"GOOS": "linux", "GOARCH": "amd64"}) + runtime_path = self.binaries[self.LANGUAGE].binary_path + cmd = [runtime_path, "build", "-o", output_path, source_dir_path] + + p = self.osutils.popen( + cmd, + cwd=source_dir_path, + env=env, + stdout=self.osutils.pipe, + stderr=self.osutils.pipe, + ) + out, err = p.communicate() + + if p.returncode != 0: + raise BuilderError(message=err.decode("utf8").strip()) + + return out.decode("utf8").strip() diff --git a/aws_lambda_builders/workflows/go_modules/utils.py b/aws_lambda_builders/workflows/go_modules/utils.py new file mode 100644 index 000000000..2efc79aca --- /dev/null +++ b/aws_lambda_builders/workflows/go_modules/utils.py @@ -0,0 +1,27 @@ +""" +Commonly used utilities +""" + +import os +import subprocess + + +class OSUtils(object): + """ + Wrapper around file system functions, to make it easy to + unit test actions in memory + """ + @property + def environ(self): + return os.environ.copy() + + def joinpath(self, *args): + return os.path.join(*args) + + def popen(self, command, stdout=None, stderr=None, env=None, cwd=None): + p = subprocess.Popen(command, stdout=stdout, stderr=stderr, env=env, cwd=cwd) + return p + + @property + def pipe(self): + return subprocess.PIPE diff --git a/aws_lambda_builders/workflows/go_modules/validator.py b/aws_lambda_builders/workflows/go_modules/validator.py new file mode 100644 index 000000000..e44779efc --- /dev/null +++ b/aws_lambda_builders/workflows/go_modules/validator.py @@ -0,0 +1,68 @@ +""" +Go Runtime Validation +""" + +import logging +import os +import subprocess + +from aws_lambda_builders.exceptions import MisMatchRuntimeError + +LOG = logging.getLogger(__name__) + + +class GoRuntimeValidator(object): + + LANGUAGE = "go" + SUPPORTED_RUNTIMES = { + "go1.x" + } + + def __init__(self, runtime): + self.runtime = runtime + self._valid_runtime_path = None + + def has_runtime(self): + """ + Checks if the runtime is supported. + :param string runtime: Runtime to check + :return bool: True, if the runtime is supported. + """ + return self.runtime in self.SUPPORTED_RUNTIMES + + def validate(self, runtime_path): + """ + Checks if the language supplied matches the required lambda runtime + :param string runtime_path: runtime to check eg: /usr/bin/go + :raises MisMatchRuntimeError: Version mismatch of the language vs the required runtime + """ + if not self.has_runtime(): + LOG.warning("'%s' runtime is not " + "a supported runtime", self.runtime) + return None + + expected_major_version = int(self.runtime.replace(self.LANGUAGE, "").split('.')[0]) + min_expected_minor_version = 11 if expected_major_version == 1 else 0 + + p = subprocess.Popen([runtime_path, "version"], + cwd=os.getcwd(), + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, _ = p.communicate() + + if p.returncode == 0: + out_parts = out.decode().split() + if len(out_parts) >= 3: + version_parts = [int(x) for x in out_parts[2].replace(self.LANGUAGE, "").split('.')] + if len(version_parts) == 3: + if version_parts[0] == expected_major_version and version_parts[1] >= min_expected_minor_version: + self._valid_runtime_path = runtime_path + return self._valid_runtime_path + + # otherwise, raise mismatch exception + raise MisMatchRuntimeError(language=self.LANGUAGE, + required_runtime=self.runtime, + runtime_path=runtime_path) + + @property + def validated_runtime_path(self): + return self._valid_runtime_path diff --git a/aws_lambda_builders/workflows/go_modules/workflow.py b/aws_lambda_builders/workflows/go_modules/workflow.py new file mode 100644 index 000000000..cbfd6d099 --- /dev/null +++ b/aws_lambda_builders/workflows/go_modules/workflow.py @@ -0,0 +1,51 @@ +""" +Go Modules Workflow +""" +from aws_lambda_builders.workflow import BaseWorkflow, Capability + +from .actions import GoModulesBuildAction +from .builder import GoModulesBuilder +from .validator import GoRuntimeValidator +from .utils import OSUtils + + +class GoModulesWorkflow(BaseWorkflow): + + NAME = "GoModulesBuilder" + + CAPABILITY = Capability(language="go", + dependency_manager="modules", + application_framework=None) + + def __init__(self, + source_dir, + artifacts_dir, + scratch_dir, + manifest_path, + runtime=None, + osutils=None, + **kwargs): + + super(GoModulesWorkflow, self).__init__( + source_dir, + artifacts_dir, + scratch_dir, + manifest_path, + runtime=runtime, + **kwargs) + + if osutils is None: + osutils = OSUtils() + + options = kwargs.get("options") or {} + handler = options.get("artifact_executable_name", None) + + output_path = osutils.joinpath(artifacts_dir, handler) + + builder = GoModulesBuilder(osutils, binaries=self.binaries) + self.actions = [ + GoModulesBuildAction(source_dir, output_path, builder), + ] + + def get_validators(self): + return [GoRuntimeValidator(runtime=self.runtime)] diff --git a/aws_lambda_builders/workflows/java_gradle/DESIGN.md b/aws_lambda_builders/workflows/java_gradle/DESIGN.md new file mode 100644 index 000000000..f20c986f3 --- /dev/null +++ b/aws_lambda_builders/workflows/java_gradle/DESIGN.md @@ -0,0 +1,195 @@ +# Java - Gradle Lambda Builder + +## Scope + +This package enables the creation of a Lambda deployment package for Java +projects managed using the Gradle build tool. + +For Java projects, the most popular way to create a distribution package for +Java based Lambdas is to create an "uber" or "fat" JAR. This is a single JAR +file that contains both the customers' classes and resources, as well as all the +classes and resources extracted from their dependency JAR's. However, this can +cause files that have the same path in two different JAR's to collide within the +uber JAR. + +Another solution is to create a distribution ZIP containing the customer's +classes and resources and include their dependency JARs under a `lib` directory. +This keeps the customers' classes and resources separate from their +dependencies' to avoid any file collisions. However, this incurs some overhead +as the ZIP must be unpacked before the code can run. + +To avoid the problem of colliding files, we will choose the second option and +create distribution ZIP. + +## Challenges + +Java bytecode can only run on the same or newer version of the JVM for which +it was compiled for. For example Java 8 bytecode can run a JVM that is at +least version 8, but bytecode targetting Java 9 cannot run on a Java 8 VM. +This is further complicated by the fact that a newer JDK can generate code to +be run on an older VM if configured using the `targetCompatibility` and +`sourceCompatibility` properties of the Java plugin. Therefore, it is not +sufficient to check the version of the local JDK, nor is it possible to check +the value set for `targetCompatibility` or `sourceCompatibility` since it can +be local to the compile/build task. At best, we can check if the local +version of the JDK is newer than Java 8 and emit a warning that the built +artifact may not run in Lambda. + +Gradle projects are configured using `build.gradle` build scripts. These are +executable files authored in either Groovy or since 5.0, Kotlin, and using the +Gradle DSL. This presents a similar problem to `setup.py` in the Python world in +that arbitrary logic can be executed during build time that could affect both +how the customer's artifact is built, and which dependencies are chosen. + +An interesting challenge is dealing with single build and multi build projects. +Consider the following different projects structures: + +**Project A** +``` +ProjectA +├── build.gradle +├── gradlew +├── src +└── template.yaml +``` + +**Project B** +``` +ProjectB +├── common +│   └── build.gradle +├── lambda1 +│   └── build.gradle +├── lambda2 +│   └── build.gradle +├── build.gradle +├── gradlew +├── settings.gradle +└── template.yaml +``` + +Here `ProjectA` is a a single lambda function, and `ProjectB` is a multi-build +project where sub directories `lambda1` and `lambda2` are each a lambda +function. In addition, suppose that `ProjectB/lambda1` has a dependency on its +sibling project `ProjectB/common`. + +Building Project A is relatively simple since we just need to issue `gradlew +build` and place the built ZIP within the artifact directory. + +Building `ProjectB/lambda1` is very similar from the point of view of the +workflow since it still issues the same command (`gradlew build`), but it +requires that Gradle is able to find its way back up to the parent `ProjectB` so +that it can also build `ProjectB/common` which can be a challenge when mounting +within a container. + +## Implementation + +### Build Workflow + +We leverage Gradle to do all the heavy lifting for executing the +`build.gradle` script which will resolve and download the dependencies and +build the project. To create the distribution ZIP, we use the help of a +Gradle init script to insert a post-build action to do this. + +#### Step 1: Copy custom init file to temporary location + +There is no standard task in Gradle to create a distribution ZIP (or uber JAR). +We add this functionality through the use of a Gradle init script. The script +will be responsible for adding a post-build action that creates the distribution +ZIP. + +It will do something similar to: + +```sh +cp /path/to/lambda-build-init.gradle /$SCRATCH_DIR/ +``` + +where the contents of `lambda-build-init.gradle` contains the code for defining +the post-build action: + +```gradle +gradle.project.afterProject { p -> + // Set the give project's buildDir to one under SCRATCH_DIR +} + +// Include the project classes and resources in the root, and the dependencies +// under lib +gradle.taskGraph.afterTask { t -> + if (t.name != 'build') { + return + } + + // Step 1: Find the directory under scratch_dir where the artifact for + // t.project is located + // Step 2: Open ZIP file in $buildDir/distributions/lambda_build + // Step 3: Copy project class files and resources to ZIP root + // Step 3: Copy libs in configurations.runtimeClasspath into 'lib' + // subdirectory in ZIP +} +``` + +#### Step 2: Resolve Gradle executable to use + +[The recommended +way](https://docs.gradle.org/current/userguide/gradle_wrapper.html) way to +author and distribute a Gradle project is to include a `gradlew` or Gradle +Wrapper file within the root of the project. This essentially locks in the +version of Gradle for the project and uses an executable that is independent of +any local installations. This helps ensure that builds are always consistent +over different environments. + +The `gradlew` script, if it is included, will be located at the root of the +project. We will rely on the invoker of the workflow to supply the path to the +`gradlew` script. + +We give precedence to this `gradlew` file, and if isn't found, we use the +`gradle` executable found on the `PATH` using the [path resolver][path resolver]. + +#### Step 3: Check Java version and emit warning + +Check whether the local JDK version is <= Java 8, and if it is not, emit a +warning that the built artifact may not run in Lambda unless a) the project is +properly configured (i.e. using `targetCompatibility`) or b) the project is +built within a Lambda-compatibile environment like `lambci`. + +We use the Gradle executable from Step 2 for this to ensure that we check the +actual JVM version Gradle is using in case it has been configured to use a +different one than can be found on the PATH. + +#### Step 4: Build and package + +```sh +$GRADLE_EXECUTABLE --project-cache-dir $SCRATCH_DIR/gradle-cache \ + -Dsoftware.amazon.aws.lambdabuilders.scratch-dir=$SCRATCH_DIR \ + --init-script $SCRATCH_DIR/lambda-build-init.gradle build +``` + +Since by default, Gradle stores its build-related metadata in a `.gradle` +directory under the source directory, we specify an alternative directory under +`scratch_dir` to avoid writing anything under `source_dir`. This is simply a +`gradle-cache` directory under `scratch_dir`. + +Next, we also pass the location of the `scratch_dir` as a Java system +property so that it's availabe to our init script. This allows it to correctly +map the build directory for each sub-project within `scratch_dir`. Again, this +ensures that we are not writing anything under the source directory. + +One important detail here is that the init script may create *multiple* +subdirectories under `scratch_dir`, one for each project involved in building +the lambda located at `source_dir`. Going back to the `ProjectB` example, if +we're building `lambda1`, this also has the effect of building `common` because +it's a declared dependency in its `build.gradle`. So, within `scratch_dir` will +be a sub directory for each project that gets built as a result of building +`source_dir`; in this case there will be one for each of `lambda1` and `common`. +The init file uses some way of mapping the source root of each project involved +to a unique directory under `scratch_dir`, like a hashing function. + +#### Step 5: Copy to artifact directory + +The workflow implementation is aware of the mapping scheme used to map a +`source_dir` to the correct directory under `scratch_dir` (described in step 4), +so it knows where to find the built Lambda artifact when copying it to +`artifacts_dir`. They will be located in +`$SCRATCH_DIR//build/distributions/lambda-build`. + +[path resolver]: https://github.com/awslabs/aws-lambda-builders/pull/55 diff --git a/aws_lambda_builders/workflows/java_gradle/__init__.py b/aws_lambda_builders/workflows/java_gradle/__init__.py new file mode 100644 index 000000000..7b18eed99 --- /dev/null +++ b/aws_lambda_builders/workflows/java_gradle/__init__.py @@ -0,0 +1,5 @@ +""" +Builds Java Lambda functions using the Gradle build tool +""" + +from .workflow import JavaGradleWorkflow diff --git a/aws_lambda_builders/workflows/java_gradle/actions.py b/aws_lambda_builders/workflows/java_gradle/actions.py new file mode 100644 index 000000000..3108c8aff --- /dev/null +++ b/aws_lambda_builders/workflows/java_gradle/actions.py @@ -0,0 +1,84 @@ +""" +Actions for the Java Gradle Workflow +""" + +import os +from aws_lambda_builders.actions import ActionFailedError, BaseAction, Purpose +from .gradle import GradleExecutionError + + +class JavaGradleBuildAction(BaseAction): + NAME = "GradleBuild" + DESCRIPTION = "Building the project using Gradle" + PURPOSE = Purpose.COMPILE_SOURCE + + INIT_SCRIPT = 'lambda-build-init.gradle' + SCRATCH_DIR_PROPERTY = 'software.amazon.aws.lambdabuilders.scratch-dir' + GRADLE_CACHE_DIR_NAME = 'gradle-cache' + + def __init__(self, + source_dir, + build_file, + subprocess_gradle, + scratch_dir, + os_utils): + self.source_dir = source_dir + self.build_file = build_file + self.scratch_dir = scratch_dir + self.subprocess_gradle = subprocess_gradle + self.os_utils = os_utils + self.cache_dir = os.path.join(self.scratch_dir, self.GRADLE_CACHE_DIR_NAME) + + def execute(self): + init_script_file = self._copy_init_script() + self._build_project(init_script_file) + + @property + def gradle_cache_dir(self): + return self.cache_dir + + def _copy_init_script(self): + try: + src = os.path.join(os.path.dirname(__file__), 'resources', self.INIT_SCRIPT) + dst = os.path.join(self.scratch_dir, self.INIT_SCRIPT) + return self.os_utils.copy(src, dst) + except Exception as ex: + raise ActionFailedError(str(ex)) + + def _build_project(self, init_script_file): + try: + if not self.os_utils.exists(self.scratch_dir): + self.os_utils.makedirs(self.scratch_dir) + self.subprocess_gradle.build(self.source_dir, self.build_file, self.gradle_cache_dir, + init_script_file, + {self.SCRATCH_DIR_PROPERTY: os.path.abspath(self.scratch_dir)}) + except GradleExecutionError as ex: + raise ActionFailedError(str(ex)) + + +class JavaGradleCopyArtifactsAction(BaseAction): + NAME = "CopyArtifacts" + DESCRIPTION = "Copying the built artifacts" + PURPOSE = Purpose.COPY_SOURCE + + def __init__(self, + source_dir, + artifacts_dir, + build_dir, + os_utils): + self.source_dir = source_dir + self.artifacts_dir = artifacts_dir + self.build_dir = build_dir + self.os_utils = os_utils + + def execute(self): + self._copy_artifacts() + + def _copy_artifacts(self): + lambda_build_output = os.path.join(self.build_dir, 'build', 'distributions', 'lambda-build') + try: + if not self.os_utils.exists(self.artifacts_dir): + self.os_utils.makedirs(self.artifacts_dir) + self.os_utils.copytree(lambda_build_output, self.artifacts_dir) + except Exception as ex: + raise ActionFailedError(str(ex)) diff --git a/aws_lambda_builders/workflows/java_gradle/gradle.py b/aws_lambda_builders/workflows/java_gradle/gradle.py new file mode 100644 index 000000000..e1222ba99 --- /dev/null +++ b/aws_lambda_builders/workflows/java_gradle/gradle.py @@ -0,0 +1,53 @@ +""" +Wrapper around calls to Gradle through a subprocess. +""" + +import logging +import subprocess + +LOG = logging.getLogger(__name__) + + +class GradleExecutionError(Exception): + MESSAGE = "Gradle Failed: {message}" + + def __init__(self, **kwargs): + Exception.__init__(self, self.MESSAGE.format(**kwargs)) + + +class BuildFileNotFoundError(GradleExecutionError): + def __init__(self, build_file_path): + super(BuildFileNotFoundError, self).__init__( + message='Gradle build file not found: %s' % build_file_path) + + +class SubprocessGradle(object): + + def __init__(self, gradle_binary, os_utils=None): + if gradle_binary is None: + raise ValueError("Must provide Gradle BinaryPath") + self.gradle_binary = gradle_binary + if os_utils is None: + raise ValueError("Must provide OSUtils") + self.os_utils = os_utils + + def build(self, source_dir, build_file, cache_dir=None, init_script_path=None, properties=None): + if not self.os_utils.exists(build_file): + raise BuildFileNotFoundError(build_file) + + args = ['build', '--build-file', build_file] + if cache_dir is not None: + args.extend(['--project-cache-dir', cache_dir]) + if properties is not None: + args.extend(['-D%s=%s' % (n, v) for n, v in properties.items()]) + if init_script_path is not None: + args.extend(['--init-script', init_script_path]) + ret_code, _, stderr = self._run(args, source_dir) + if ret_code != 0: + raise GradleExecutionError(message=stderr.decode('utf8').strip()) + + def _run(self, args, cwd=None): + p = self.os_utils.popen([self.gradle_binary.binary_path] + args, cwd=cwd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = p.communicate() + return p.returncode, stdout, stderr diff --git a/aws_lambda_builders/workflows/java_gradle/gradle_resolver.py b/aws_lambda_builders/workflows/java_gradle/gradle_resolver.py new file mode 100644 index 000000000..7827d5083 --- /dev/null +++ b/aws_lambda_builders/workflows/java_gradle/gradle_resolver.py @@ -0,0 +1,31 @@ +""" +Gradle executable resolution +""" + +from .utils import OSUtils + + +class GradleResolver(object): + + def __init__(self, executable_search_paths=None, os_utils=None): + self.binary = 'gradle' + self.executables = [self.binary] + self.executable_search_paths = executable_search_paths + self.os_utils = os_utils if os_utils else OSUtils() + + @property + def exec_paths(self): + # Prefer gradlew/gradlew.bat + paths = self.os_utils.which(self.wrapper_name, executable_search_paths=self.executable_search_paths) + if not paths: + # fallback to the gradle binary + paths = self.os_utils.which('gradle', executable_search_paths=self.executable_search_paths) + + if not paths: + raise ValueError("No Gradle executable found!") + + return paths + + @property + def wrapper_name(self): + return 'gradlew.bat' if self.os_utils.is_windows() else 'gradlew' diff --git a/aws_lambda_builders/workflows/java_gradle/gradle_validator.py b/aws_lambda_builders/workflows/java_gradle/gradle_validator.py new file mode 100644 index 000000000..3350baa84 --- /dev/null +++ b/aws_lambda_builders/workflows/java_gradle/gradle_validator.py @@ -0,0 +1,64 @@ +""" +Gradle Binary Validation +""" + +import logging +import re + +from .utils import OSUtils + +LOG = logging.getLogger(__name__) + + +class GradleValidator(object): + VERSION_STRING_WARNING = "%s failed to return a version string using the '-v' option. The workflow is unable to " \ + "check that the version of the JVM used is compatible with AWS Lambda." + + MAJOR_VERSION_WARNING = "%s is using a JVM with major version %s which is newer than 8 that is supported by AWS " \ + "Lambda. The compiled function code may not run in AWS Lambda unless the project has " \ + "been configured to be compatible with Java 8 using 'targetCompatibility' in Gradle." + + def __init__(self, os_utils=None, log=None): + self.language = 'java' + self._valid_binary_path = None + self.os_utils = OSUtils() if not os_utils else os_utils + self.log = LOG if not log else log + + def validate(self, gradle_path): + jvm_mv = self._get_major_version(gradle_path) + + if jvm_mv: + if int(jvm_mv) > 8: + self.log.warning(self.MAJOR_VERSION_WARNING, gradle_path, jvm_mv) + else: + self.log.warning(self.VERSION_STRING_WARNING, gradle_path) + + self._valid_binary_path = gradle_path + return self._valid_binary_path + + @property + def validated_binary_path(self): + return self._valid_binary_path + + def _get_major_version(self, gradle_path): + vs = self._get_jvm_string(gradle_path) + if vs: + m = re.search(r'JVM:\s+(\d.*)', vs) + version = m.group(1).split('.') + # For Java 8 or earlier, version strings begin with 1.{Major Version} + if version[0] == '1': + return version[1] + # Starting with Java 9, the major version is first + return version[0] + + def _get_jvm_string(self, gradle_path): + p = self.os_utils.popen([gradle_path, '-version'], stdout=self.os_utils.pipe, stderr=self.os_utils.pipe) + stdout, _ = p.communicate() + + if p.returncode != 0: + return None + + for l in stdout.splitlines(): + l_dec = l.decode() + if l_dec.startswith('JVM'): + return l_dec diff --git a/aws_lambda_builders/workflows/java_gradle/resources/lambda-build-init.gradle b/aws_lambda_builders/workflows/java_gradle/resources/lambda-build-init.gradle new file mode 100644 index 000000000..d98d72635 --- /dev/null +++ b/aws_lambda_builders/workflows/java_gradle/resources/lambda-build-init.gradle @@ -0,0 +1,105 @@ +import groovy.io.FileType +import java.nio.file.FileAlreadyExistsException +import java.nio.file.Files +import java.nio.file.FileVisitOption +import java.nio.file.FileVisitResult +import java.nio.file.Path +import java.nio.file.Paths +import java.nio.file.SimpleFileVisitor +import java.security.MessageDigest +import java.util.zip.ZipInputStream +import java.util.zip.ZipOutputStream +import java.util.zip.ZipEntry + +gradle.ext.SCRATCH_DIR_PROPERTY = 'software.amazon.aws.lambdabuilders.scratch-dir' + +// Called after the project has been evaluated +gradle.afterProject({p -> + def buildDir = buildDirForProject(p) + p.buildDir = buildDir +}) + +gradle.taskGraph.afterTask{ t -> + if (t.name != 'build') { + return; + } + + def artifactJars = t.project.configurations.archives.artifacts.files.files + + def runtimeCpJars = t.project.configurations.runtimeClasspath.files + + def artifactDir = createArtifactDir(t.project) + + copyToArtifactDir(artifactDir, artifactJars, runtimeCpJars) +} + +def buildDirForProject(p) { + def scratchDir = System.properties[SCRATCH_DIR_PROPERTY] + if (scratchDir == null) { + throw new RuntimeException("Missing '${SCRATCH_DIR_PROPERTY}' value") + } + return Paths.get(scratchDir, scratchDirForProject(p), 'build') +} + +def scratchDirForProject(p) { + def sha1 = MessageDigest.getInstance('SHA-1') + return sha1.digest(p.projectDir.toString().getBytes('UTF-8')).encodeHex().toString() +} + +def assertExpectedBuildDir(p) { + def expected = buildDirForProject(p) + if (!expected.equals(p.buildDir.toPath())) { + throw new RuntimeException("Project buildDir was changed by the project build script! Expected $expected but found ${p.buildDir}") + } +} + +def createArtifactDir(project) { + def distsDir = project.buildDir.toPath().resolve(project.distsDirName).resolve('lambda-build') + return makeDirs(distsDir) +} + +def copyToArtifactDir(artifactDir, artifactJars, classPathJars) { + artifactJars.each { + it.withInputStream({ jis -> + def zipIs = new ZipInputStream(jis) + for (def e = zipIs.getNextEntry(); e != null; e = zipIs.getNextEntry()) { + def entryPath = artifactDir.resolve(e.name) + if (e.isDirectory()) { + makeDirs(entryPath) + } else { + copyToFile(zipIs, entryPath) + } + zipIs.closeEntry() + } + zipIs.close() + }) + } + + def libDir = artifactDir.resolve('lib') + makeDirs(libDir) + classPathJars.each { + def jarPath = libDir.resolve(it.name) + it.withInputStream({ jIs -> + copyToFile(jIs, jarPath) + }) + } +} + +def makeDirs(p) { + try { + Files.createDirectories(p) + } catch (FileAlreadyExistsException e) { + // ignored + } + return p +} + +def copyToFile(zipIs, path) { + path.withOutputStream({ fos -> + byte[] buff = new byte[4096] + int bytesRead + while ((bytesRead = zipIs.read(buff)) != -1) { + fos.write(buff, 0, bytesRead) + } + }) +} diff --git a/aws_lambda_builders/workflows/java_gradle/utils.py b/aws_lambda_builders/workflows/java_gradle/utils.py new file mode 100644 index 000000000..2e178a320 --- /dev/null +++ b/aws_lambda_builders/workflows/java_gradle/utils.py @@ -0,0 +1,54 @@ +""" +Commonly used utilities +""" + +import os +import platform +import shutil +import subprocess +from aws_lambda_builders.utils import which + + +class OSUtils(object): + """ + Convenience wrapper around common system functions + """ + + def popen(self, command, stdout=None, stderr=None, env=None, cwd=None): + p = subprocess.Popen(command, stdout=stdout, stderr=stderr, env=env, cwd=cwd) + return p + + def is_windows(self): + return platform.system().lower() == 'windows' + + def copy(self, src, dst): + shutil.copy2(src, dst) + return dst + + def listdir(self, d): + return os.listdir(d) + + def exists(self, p): + return os.path.exists(p) + + def which(self, executable, executable_search_paths=None): + return which(executable, executable_search_paths=executable_search_paths) + + def copytree(self, source, destination): + if not os.path.exists(destination): + self.makedirs(destination) + names = self.listdir(source) + for name in names: + new_source = os.path.join(source, name) + new_destination = os.path.join(destination, name) + if os.path.isdir(new_source): + self.copytree(new_source, new_destination) + else: + self.copy(new_source, new_destination) + + def makedirs(self, d): + return os.makedirs(d) + + @property + def pipe(self): + return subprocess.PIPE diff --git a/aws_lambda_builders/workflows/java_gradle/workflow.py b/aws_lambda_builders/workflows/java_gradle/workflow.py new file mode 100644 index 000000000..cb34b14f9 --- /dev/null +++ b/aws_lambda_builders/workflows/java_gradle/workflow.py @@ -0,0 +1,75 @@ +""" +Java Gradle Workflow +""" +import hashlib +import os +from aws_lambda_builders.workflow import BaseWorkflow, Capability +from .actions import JavaGradleBuildAction, JavaGradleCopyArtifactsAction +from .gradle import SubprocessGradle +from .utils import OSUtils +from .gradle_resolver import GradleResolver +from .gradle_validator import GradleValidator + + +class JavaGradleWorkflow(BaseWorkflow): + """ + A Lambda builder workflow that knows how to build Java projects using Gradle. + """ + NAME = "JavaGradleWorkflow" + + CAPABILITY = Capability(language="java", + dependency_manager="gradle", + application_framework=None) + + INIT_FILE = "lambda-build-init.gradle" + + def __init__(self, + source_dir, + artifacts_dir, + scratch_dir, + manifest_path, + **kwargs): + super(JavaGradleWorkflow, self).__init__(source_dir, + artifacts_dir, + scratch_dir, + manifest_path, + **kwargs) + + self.os_utils = OSUtils() + self.build_dir = None + subprocess_gradle = SubprocessGradle(gradle_binary=self.binaries['gradle'], os_utils=self.os_utils) + + self.actions = [ + JavaGradleBuildAction(source_dir, + manifest_path, + subprocess_gradle, + scratch_dir, + self.os_utils), + JavaGradleCopyArtifactsAction(source_dir, + artifacts_dir, + self.build_output_dir, + self.os_utils) + ] + + def get_resolvers(self): + return [GradleResolver(executable_search_paths=self.executable_search_paths)] + + def get_validators(self): + return [GradleValidator(self.os_utils)] + + @property + def build_output_dir(self): + if self.build_dir is None: + self.build_dir = os.path.join(self.scratch_dir, self._compute_scratch_subdir()) + return self.build_dir + + def _compute_scratch_subdir(self): + """ + Compute where the init script will instruct Gradle to place the built artifacts for the lambda within + `scratch_dir`; i.e. the that it will set for 'project.buildDir`. + + :return: The path of the buildDir used for building the lambda. + """ + sha1 = hashlib.sha1() + sha1.update(os.path.abspath(self.source_dir).encode('utf8')) + return sha1.hexdigest() diff --git a/aws_lambda_builders/workflows/nodejs_npm/DESIGN.md b/aws_lambda_builders/workflows/nodejs_npm/DESIGN.md index 1a7746756..803052ed7 100644 --- a/aws_lambda_builders/workflows/nodejs_npm/DESIGN.md +++ b/aws_lambda_builders/workflows/nodejs_npm/DESIGN.md @@ -122,9 +122,8 @@ the local `node_modules` subdirectory. This has to be executed in the directory a clean copy of the source files. Note that NPM can be configured to use proxies or local company repositories using -a local file, `.npmrc`. The packaging process from step 1 normally excludes this file, so it may -need to be copied additionally before dependency installation, and then removed. -_(out of scope for the current version)_ +a local file, `.npmrc`. The packaging process from step 1 normally excludes this file, so it needs +to be copied before dependency installation, and then removed. Some users may want to exclude optional dependencies, or even include development dependencies. To avoid incompatible flags in the `sam` CLI, the packager should allow users to specify diff --git a/aws_lambda_builders/workflows/nodejs_npm/actions.py b/aws_lambda_builders/workflows/nodejs_npm/actions.py index e3e9aad43..ad6e7a927 100644 --- a/aws_lambda_builders/workflows/nodejs_npm/actions.py +++ b/aws_lambda_builders/workflows/nodejs_npm/actions.py @@ -111,3 +111,87 @@ def execute(self): except NpmExecutionError as ex: raise ActionFailedError(str(ex)) + +class NodejsNpmrcCopyAction(BaseAction): + + """ + A Lambda Builder Action that copies NPM config file .npmrc + """ + + NAME = 'CopyNpmrc' + DESCRIPTION = "Copying configuration from .npmrc" + PURPOSE = Purpose.COPY_SOURCE + + def __init__(self, artifacts_dir, source_dir, osutils): + """ + :type artifacts_dir: str + :param artifacts_dir: an existing (writable) directory with project source files. + Dependencies will be installed in this directory. + + :type source_dir: str + :param source_dir: directory containing project source files. + + :type osutils: aws_lambda_builders.workflows.nodejs_npm.utils.OSUtils + :param osutils: An instance of OS Utilities for file manipulation + """ + + super(NodejsNpmrcCopyAction, self).__init__() + self.artifacts_dir = artifacts_dir + self.source_dir = source_dir + self.osutils = osutils + + def execute(self): + """ + Runs the action. + + :raises lambda_builders.actions.ActionFailedError: when .npmrc copying fails + """ + + try: + npmrc_path = self.osutils.joinpath(self.source_dir, ".npmrc") + if self.osutils.file_exists(npmrc_path): + LOG.debug(".npmrc copying in: %s", self.artifacts_dir) + self.osutils.copy_file(npmrc_path, self.artifacts_dir) + + except OSError as ex: + raise ActionFailedError(str(ex)) + +class NodejsNpmrcCleanUpAction(BaseAction): + + """ + A Lambda Builder Action that cleans NPM config file .npmrc + """ + + NAME = 'CleanUpNpmrc' + DESCRIPTION = "Cleans artifacts dir" + PURPOSE = Purpose.COPY_SOURCE + + def __init__(self, artifacts_dir, osutils): + """ + :type artifacts_dir: str + :param artifacts_dir: an existing (writable) directory with project source files. + Dependencies will be installed in this directory. + + :type osutils: aws_lambda_builders.workflows.nodejs_npm.utils.OSUtils + :param osutils: An instance of OS Utilities for file manipulation + """ + + super(NodejsNpmrcCleanUpAction, self).__init__() + self.artifacts_dir = artifacts_dir + self.osutils = osutils + + def execute(self): + """ + Runs the action. + + :raises lambda_builders.actions.ActionFailedError: when .npmrc copying fails + """ + + try: + npmrc_path = self.osutils.joinpath(self.artifacts_dir, ".npmrc") + if self.osutils.file_exists(npmrc_path): + LOG.debug(".npmrc cleanup in: %s", self.artifacts_dir) + self.osutils.remove_file(npmrc_path) + + except OSError as ex: + raise ActionFailedError(str(ex)) diff --git a/aws_lambda_builders/workflows/nodejs_npm/utils.py b/aws_lambda_builders/workflows/nodejs_npm/utils.py index dcff97272..b34863788 100644 --- a/aws_lambda_builders/workflows/nodejs_npm/utils.py +++ b/aws_lambda_builders/workflows/nodejs_npm/utils.py @@ -6,6 +6,7 @@ import platform import tarfile import subprocess +import shutil class OSUtils(object): @@ -15,10 +16,16 @@ class OSUtils(object): unit test actions in memory """ + def copy_file(self, file_path, destination_path): + return shutil.copy2(file_path, destination_path) + def extract_tarfile(self, tarfile_path, unpack_dir): with tarfile.open(tarfile_path, 'r:*') as tar: tar.extractall(unpack_dir) + def file_exists(self, filename): + return os.path.isfile(filename) + def joinpath(self, *args): return os.path.join(*args) @@ -33,6 +40,9 @@ def pipe(self): def dirname(self, path): return os.path.dirname(path) + def remove_file(self, filename): + return os.remove(filename) + def abspath(self, path): return os.path.abspath(path) diff --git a/aws_lambda_builders/workflows/nodejs_npm/workflow.py b/aws_lambda_builders/workflows/nodejs_npm/workflow.py index b55bc9cc1..dc6be8ea4 100644 --- a/aws_lambda_builders/workflows/nodejs_npm/workflow.py +++ b/aws_lambda_builders/workflows/nodejs_npm/workflow.py @@ -1,10 +1,10 @@ """ NodeJS NPM Workflow """ - +from aws_lambda_builders.path_resolver import PathResolver from aws_lambda_builders.workflow import BaseWorkflow, Capability from aws_lambda_builders.actions import CopySourceAction -from .actions import NodejsNpmPackAction, NodejsNpmInstallAction +from .actions import NodejsNpmPackAction, NodejsNpmInstallAction, NodejsNpmrcCopyAction, NodejsNpmrcCleanUpAction from .utils import OSUtils from .npm import SubprocessNpm @@ -55,8 +55,19 @@ def __init__(self, npm_install = NodejsNpmInstallAction(artifacts_dir, subprocess_npm=subprocess_npm) + + npm_copy_npmrc = NodejsNpmrcCopyAction(tar_package_dir, source_dir, osutils=osutils) + self.actions = [ npm_pack, + npm_copy_npmrc, CopySourceAction(tar_package_dir, artifacts_dir, excludes=self.EXCLUDED_FILES), npm_install, + NodejsNpmrcCleanUpAction(artifacts_dir, osutils=osutils) ] + + def get_resolvers(self): + """ + specialized path resolver that just returns the list of executable for the runtime on the path. + """ + return [PathResolver(runtime=self.runtime, binary="npm")] diff --git a/aws_lambda_builders/workflows/python_pip/actions.py b/aws_lambda_builders/workflows/python_pip/actions.py index 81d5fe981..e2c0cb3fb 100644 --- a/aws_lambda_builders/workflows/python_pip/actions.py +++ b/aws_lambda_builders/workflows/python_pip/actions.py @@ -3,7 +3,8 @@ """ from aws_lambda_builders.actions import BaseAction, Purpose, ActionFailedError -from .packager import PythonPipDependencyBuilder, PackagerError +from aws_lambda_builders.workflows.python_pip.utils import OSUtils +from .packager import PythonPipDependencyBuilder, PackagerError, DependencyBuilder, SubprocessPip, PipRunner class PythonPipBuildAction(BaseAction): @@ -11,17 +12,28 @@ class PythonPipBuildAction(BaseAction): NAME = 'ResolveDependencies' DESCRIPTION = "Installing dependencies from PIP" PURPOSE = Purpose.RESOLVE_DEPENDENCIES + LANGUAGE = 'python' - def __init__(self, artifacts_dir, manifest_path, scratch_dir, runtime): + def __init__(self, artifacts_dir, manifest_path, scratch_dir, runtime, binaries): self.artifacts_dir = artifacts_dir self.manifest_path = manifest_path self.scratch_dir = scratch_dir self.runtime = runtime - self.package_builder = PythonPipDependencyBuilder(runtime=runtime) + self.binaries = binaries def execute(self): + os_utils = OSUtils() + python_path = self.binaries[self.LANGUAGE].binary_path + pip = SubprocessPip(osutils=os_utils, python_exe=python_path) + pip_runner = PipRunner(python_exe=python_path, pip=pip) + dependency_builder = DependencyBuilder(osutils=os_utils, pip_runner=pip_runner, + runtime=self.runtime) + + package_builder = PythonPipDependencyBuilder(osutils=os_utils, + runtime=self.runtime, + dependency_builder=dependency_builder) try: - self.package_builder.build_dependencies( + package_builder.build_dependencies( self.artifacts_dir, self.manifest_path, self.scratch_dir diff --git a/aws_lambda_builders/workflows/python_pip/compat.py b/aws_lambda_builders/workflows/python_pip/compat.py index 64aba2f06..ff35cbabe 100644 --- a/aws_lambda_builders/workflows/python_pip/compat.py +++ b/aws_lambda_builders/workflows/python_pip/compat.py @@ -1,13 +1,21 @@ import os +from aws_lambda_builders.workflows.python_pip.utils import OSUtils -def pip_import_string(): - import pip - pip_major_version = pip.__version__.split('.')[0] + +def pip_import_string(python_exe): + os_utils = OSUtils() + cmd = [ + python_exe, + "-c", + "import pip; assert int(pip.__version__.split('.')[0]) <= 9" + ] + p = os_utils.popen(cmd,stdout=os_utils.pipe, stderr=os_utils.pipe) + p.communicate() # Pip moved its internals to an _internal module in version 10. # In order to be compatible with version 9 which has it at at the # top level we need to figure out the correct import path here. - if pip_major_version == '9': + if p.returncode == 0: return 'from pip import main' else: return 'from pip._internal import main' diff --git a/aws_lambda_builders/workflows/python_pip/packager.py b/aws_lambda_builders/workflows/python_pip/packager.py index a3a2f0130..9ba1dcaa9 100644 --- a/aws_lambda_builders/workflows/python_pip/packager.py +++ b/aws_lambda_builders/workflows/python_pip/packager.py @@ -177,7 +177,7 @@ def __init__(self, osutils, runtime, pip_runner=None): """ self._osutils = osutils if pip_runner is None: - pip_runner = PipRunner(SubprocessPip(osutils)) + pip_runner = PipRunner(python_exe=None, pip=SubprocessPip(osutils)) self._pip = pip_runner self.runtime = runtime @@ -546,12 +546,13 @@ def get_package_name_and_version(self, sdist_path): class SubprocessPip(object): """Wrapper around calling pip through a subprocess.""" - def __init__(self, osutils=None, import_string=None): + def __init__(self, osutils=None, python_exe=None, import_string=None): if osutils is None: osutils = OSUtils() self._osutils = osutils + self.python_exe = python_exe if import_string is None: - import_string = pip_import_string() + import_string = pip_import_string(python_exe=self.python_exe) self._import_string = import_string def main(self, args, env_vars=None, shim=None): @@ -559,12 +560,11 @@ def main(self, args, env_vars=None, shim=None): env_vars = self._osutils.environ() if shim is None: shim = '' - python_exe = sys.executable run_pip = ( 'import sys; %s; sys.exit(main(%s))' ) % (self._import_string, args) exec_string = '%s%s' % (shim, run_pip) - invoke_pip = [python_exe, '-c', exec_string] + invoke_pip = [self.python_exe, '-c', exec_string] p = self._osutils.popen(invoke_pip, stdout=self._osutils.pipe, stderr=self._osutils.pipe, @@ -581,9 +581,10 @@ class PipRunner(object): " Link is a directory," " ignoring download_dir") - def __init__(self, pip, osutils=None): + def __init__(self, python_exe, pip, osutils=None): if osutils is None: osutils = OSUtils() + self.python_exe = python_exe self._wrapped_pip = pip self._osutils = osutils diff --git a/aws_lambda_builders/workflows/python_pip/validator.py b/aws_lambda_builders/workflows/python_pip/validator.py new file mode 100644 index 000000000..22e50a2d3 --- /dev/null +++ b/aws_lambda_builders/workflows/python_pip/validator.py @@ -0,0 +1,73 @@ +""" +Python Runtime Validation +""" + +import logging +import os +import subprocess + +from aws_lambda_builders.exceptions import MisMatchRuntimeError + +LOG = logging.getLogger(__name__) + + +class PythonRuntimeValidator(object): + SUPPORTED_RUNTIMES = { + "python2.7", + "python3.6", + "python3.7" + } + + def __init__(self, runtime): + self.language = "python" + self.runtime = runtime + self._valid_runtime_path = None + + def has_runtime(self): + """ + Checks if the runtime is supported. + :param string runtime: Runtime to check + :return bool: True, if the runtime is supported. + """ + return self.runtime in self.SUPPORTED_RUNTIMES + + def validate(self, runtime_path): + """ + Checks if the language supplied matches the required lambda runtime + :param string runtime_path: runtime to check eg: /usr/bin/python3.6 + :raises MisMatchRuntimeError: Version mismatch of the language vs the required runtime + """ + if not self.has_runtime(): + LOG.warning("'%s' runtime is not " + "a supported runtime", self.runtime) + return + + cmd = self._validate_python_cmd(runtime_path) + + p = subprocess.Popen(cmd, + cwd=os.getcwd(), + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p.communicate() + if p.returncode != 0: + raise MisMatchRuntimeError(language=self.language, + required_runtime=self.runtime, + runtime_path=runtime_path) + else: + self._valid_runtime_path = runtime_path + return self._valid_runtime_path + + def _validate_python_cmd(self, runtime_path): + major, minor = self.runtime.replace(self.language, "").split('.') + cmd = [ + runtime_path, + "-c", + "import sys; " + "assert sys.version_info.major == {major} " + "and sys.version_info.minor == {minor}".format( + major=major, + minor=minor)] + return cmd + + @property + def validated_runtime_path(self): + return self._valid_runtime_path diff --git a/aws_lambda_builders/workflows/python_pip/workflow.py b/aws_lambda_builders/workflows/python_pip/workflow.py index fb2582b0d..a3225ce66 100644 --- a/aws_lambda_builders/workflows/python_pip/workflow.py +++ b/aws_lambda_builders/workflows/python_pip/workflow.py @@ -1,9 +1,9 @@ """ Python PIP Workflow """ - from aws_lambda_builders.workflow import BaseWorkflow, Capability from aws_lambda_builders.actions import CopySourceAction +from aws_lambda_builders.workflows.python_pip.validator import PythonRuntimeValidator from .actions import PythonPipBuildAction @@ -65,6 +65,9 @@ def __init__(self, self.actions = [ PythonPipBuildAction(artifacts_dir, scratch_dir, - manifest_path, runtime), + manifest_path, runtime, binaries=self.binaries), CopySourceAction(source_dir, artifacts_dir, excludes=self.EXCLUDED_FILES), ] + + def get_validators(self): + return [PythonRuntimeValidator(runtime=self.runtime)] diff --git a/designs/Path_Resolver_DESIGN.md b/designs/Path_Resolver_DESIGN.md new file mode 100644 index 000000000..fab3f8968 --- /dev/null +++ b/designs/Path_Resolver_DESIGN.md @@ -0,0 +1,118 @@ +## Path Resolver for Builders + +### Scope + +When building for a particular language, there are a few things to consider that a workflow currently already does. They are the following + +* Language +* Dependency Manager +* Application framework + +But what is missing is the ``path to the language executable``, that will be used for building artifacts through a workflow. + +Currently this is solved by just picking up the earliest instance of ``language`` in one's PATH. This works fine if there is just one instance of the language. This does not work in the case where there are more than one language version, with different executables living alongside one another in the path. + +This path resolver given a language and a runtime, will attempt to resolve paths in an opioniated manner before falling back to the defaulting to the first executable in `$PATH`. The resolved path is then validated to check if the resolved path actually matches the runtime attributes specified. eg: version. + +### Challenges + +How do we come up with a mechanism that looks for different versions of the same executable in the PATH. One of the ways to look at this is to explicitly look for the runtime in the path ahead of the language. + +eg: + +```bash +Language | Runtime +python | python3.6, python3.7 +``` + +In this case if our runtime is python3.6, we explicitly look for python3.6 in the path first. This also gives the flexibility to the user to easily change minor versions of the same executable, by just changing the a symlink. + +We will need a resolver in each workflow, on being given a runtime and language combo return a path to the executable to be used for building artifacts. + +### Interface + +We will have a path resolver class, whose functionality is inherently simple to begin with. It has a candidate list of locations to look for the executable depending upon the runtime and language, and return the path. + +```python +class PathResolver(object): + def __init__(self, language, runtime): + self.language = language + self.runtime = runtime + self.executables = [self.runtime, self.language] + + def path(self): + path = ... + return path +``` + +We will also have a runtime validator class to make sure that the path to the executable, is actually the executable we want to be building artifacts for. +This will do actions like making sure that version of the runtime specified, actually matches with the version from the executable path. Each Workflow can have its own validator that would do this. + +```python + +class RuntimeValidator(object): + ... + ... + def validate_runtime(self, runtime_path): + valid = ... (True/False based on some computation) + return valid +``` + +The Base workflow will define a ```get_executable``` and a ```get_validator``` method. + +```python +class BaseWorkflow(object): + ... + ... + ... + + def get_executable(self): + return PathResolver(language=x, runtime=x.y).path + + def get_validator(self): + return RuntimeValidator(runtime_path=/usr/bin/x.y, language=x) + + @sanitize(executable_path=self.get_executable(),validator=self.get_validator()) + def run(self): +``` + +This way we have de-coupled validtion of the path and the actual finding of the path to the executable. Both of these methods can be over-riden in any workflow that subclasses BaseWorkflow. + +There will be default implementations of ```PathResolver``` and ```RuntimeValidator``` that can still be used by workflow authors if they dont want to specialize them. + +A decorator on top of run, would actually execute validation on the resolved path to ensure that its safe to actually start the workflow. + +### Implementation + +Here is an Example of Python workflow that has ```get_executable``` and ```get_validator``` defined. + +```python +class PythonPipWorkflow(object): + ... + ... + ... + + def get_executable(self): + return PythonPathResolver(language=python, runtime=python3.6).path + + def get_validator(self): + return PythonRuntimeValidator(runtime_path=/usr/bin/python3.6, language=python) +``` + +Finding the executable path and validation of the path occurs before the Workflow's ```run``` method is invoked. This way failure is detected early before workflow actions are executed. + +There is a work in progress PR that partially follows this design doc, except it adds the the runtime_path in the per-language workflow. This will be changed to make it align with this design doc. + +[#35](https://github.com/awslabs/aws-lambda-builders/pull/35) + +### Tenets + +The general tenets for this would look like as follows. + +1. Every workflow needs to have a PathResolver and a Validator. The resolver can be a custom version or use the bundled opioniated resolver. +2. The workflow needs to fail if the resolved executable path does not match the supplied runtime version. This fails quickly and stops incorrect artifacts from being built. +3. The resolved executable path should be available to every action. + +### Open questions + +* Should we constrain the interface for the PathResolver with an abstract base class? diff --git a/setup.py b/setup.py index 002325d8a..7dcd2d304 100644 --- a/setup.py +++ b/setup.py @@ -43,7 +43,7 @@ def read_version(): author='Amazon Web Services', author_email='aws-sam-developers@amazon.com', url='https://github.com/awslabs/aws-lambda-builders', - license=read('LICENSE'), + license='Apache License 2.0', packages=find_packages(exclude=('tests', 'docs')), keywords="AWS Lambda Functions Building", # Support Python 2.7 and 3.6 or greater diff --git a/tests/functional/test_builder.py b/tests/functional/test_builder.py index 553887c0e..a5a1b172f 100644 --- a/tests/functional/test_builder.py +++ b/tests/functional/test_builder.py @@ -4,6 +4,11 @@ import shutil import tempfile +try: + import pathlib +except ImportError: + import pathlib2 as pathlib + from unittest import TestCase from aws_lambda_builders.builder import LambdaBuilder @@ -20,7 +25,7 @@ def setUp(self): self.source_dir = tempfile.mkdtemp() self.artifacts_dir = tempfile.mkdtemp() self.scratch_dir = os.path.join(tempfile.mkdtemp(), "scratch") - self.hello_builder = LambdaBuilder(language="test", + self.hello_builder = LambdaBuilder(language="python", dependency_manager="test", application_framework="test", supported_workflows=[ @@ -40,16 +45,17 @@ def tearDown(self): # Remove the workflows folder from PYTHONPATH sys.path.remove(self.TEST_WORKFLOWS_FOLDER) - def test_run_hello_workflow(self): + def test_run_hello_workflow_with_exec_paths(self): self.hello_builder.build(self.source_dir, self.artifacts_dir, self.scratch_dir, - "/ignored") + "/ignored", + executable_search_paths=[str(pathlib.Path(sys.executable).parent)]) self.assertTrue(os.path.exists(self.expected_filename)) contents = '' with open(self.expected_filename, 'r') as fp: contents = fp.read() - self.assertEquals(contents, self.expected_contents) + self.assertEquals(contents, self.expected_contents) \ No newline at end of file diff --git a/tests/functional/test_cli.py b/tests/functional/test_cli.py index 275ccd411..d5ce9301d 100644 --- a/tests/functional/test_cli.py +++ b/tests/functional/test_cli.py @@ -5,10 +5,19 @@ import tempfile import subprocess import copy +import sys from unittest import TestCase from parameterized import parameterized +try: + import pathlib +except ImportError: + import pathlib2 as pathlib + + +from aws_lambda_builders import RPC_PROTOCOL_VERSION as lambda_builders_protocol_version + class TestCliWithHelloWorkflow(TestCase): @@ -22,7 +31,7 @@ def setUp(self): self.scratch_dir = os.path.join(tempfile.mkdtemp(), "scratch") # Capabilities supported by the Hello workflow - self.language = "test" + self.language = "python" self.dependency_manager = "test" self.application_framework = "test" @@ -39,19 +48,21 @@ def setUp(self): def tearDown(self): shutil.rmtree(self.source_dir) shutil.rmtree(self.artifacts_dir) - shutil.rmtree(self.scratch_dir) @parameterized.expand([ - ("request_through_stdin"), - ("request_through_argument") + ("request_through_stdin", lambda_builders_protocol_version), + ("request_through_argument", lambda_builders_protocol_version), + ("request_through_stdin", "0.1"), + ("request_through_argument", "0.1"), ]) - def test_run_hello_workflow(self, flavor): + def test_run_hello_workflow_with_backcompat(self, flavor, protocol_version): - request_json = json.dumps({ + request = { "jsonschema": "2.0", "id": 1234, "method": "LambdaBuilder.build", "params": { + "__protocol_version": protocol_version, "capability": { "language": self.language, "dependency_manager": self.dependency_manager, @@ -64,10 +75,14 @@ def test_run_hello_workflow(self, flavor): "manifest_path": "/ignored", "runtime": "ignored", "optimizations": {}, - "options": {}, + "options": {} } - }) + } + + if protocol_version == lambda_builders_protocol_version: + request["executable_search_paths"] = [str(pathlib.Path(sys.executable).parent)] + request_json = json.dumps(request) env = copy.deepcopy(os.environ) env["PYTHONPATH"] = self.python_path @@ -94,4 +109,52 @@ def test_run_hello_workflow(self, flavor): contents = fp.read() self.assertEquals(contents, self.expected_contents) + shutil.rmtree(self.scratch_dir) + + @parameterized.expand([ + ("request_through_stdin"), + ("request_through_argument") + ]) + def test_run_hello_workflow_incompatible(self, flavor): + + request_json = json.dumps({ + "jsonschema": "2.0", + "id": 1234, + "method": "LambdaBuilder.build", + "params": { + "__protocol_version": "2.0", + "capability": { + "language": self.language, + "dependency_manager": self.dependency_manager, + "application_framework": self.application_framework + }, + "supported_workflows": [self.HELLO_WORKFLOW_MODULE], + "source_dir": self.source_dir, + "artifacts_dir": self.artifacts_dir, + "scratch_dir": self.scratch_dir, + "manifest_path": "/ignored", + "runtime": "ignored", + "optimizations": {}, + "options": {}, + "executable_search_paths": [str(pathlib.Path(sys.executable).parent)] + } + }) + + + env = copy.deepcopy(os.environ) + env["PYTHONPATH"] = self.python_path + stdout_data = None + if flavor == "request_through_stdin": + p = subprocess.Popen([self.command_name], env=env, stdin=subprocess.PIPE, stdout=subprocess.PIPE) + stdout_data = p.communicate(input=request_json.encode('utf-8'))[0] + elif flavor == "request_through_argument": + p = subprocess.Popen([self.command_name, request_json], env=env, stdin=subprocess.PIPE, stdout=subprocess.PIPE) + stdout_data = p.communicate()[0] + else: + raise ValueError("Invalid test flavor") + + # Validate the response object. It should be error response + response = json.loads(stdout_data) + self.assertIn('error', response) + self.assertEquals(response['error']['code'], 505) diff --git a/tests/functional/test_utils.py b/tests/functional/test_utils.py index 69dcd3390..b82032df0 100644 --- a/tests/functional/test_utils.py +++ b/tests/functional/test_utils.py @@ -40,6 +40,7 @@ def test_must_respect_excludes_list(self): self.assertEquals(set(os.listdir(os.path.join(self.dest, "a"))), {"c"}) self.assertEquals(set(os.listdir(os.path.join(self.dest, "a"))), {"c"}) + def file(*args): path = os.path.join(*args) basedir = os.path.dirname(path) diff --git a/tests/functional/testdata/workflows/hello_workflow/write_hello.py b/tests/functional/testdata/workflows/hello_workflow/write_hello.py index 5f5bb4c1d..0a8a5e311 100644 --- a/tests/functional/testdata/workflows/hello_workflow/write_hello.py +++ b/tests/functional/testdata/workflows/hello_workflow/write_hello.py @@ -33,7 +33,7 @@ def execute(self): class WriteHelloWorkflow(BaseWorkflow): NAME = "WriteHelloWorkflow" - CAPABILITY = Capability(language="test", dependency_manager="test", application_framework="test") + CAPABILITY = Capability(language="python", dependency_manager="test", application_framework="test") def __init__(self, source_dir, artifacts_dir, *args, **kwargs): super(WriteHelloWorkflow, self).__init__(source_dir, artifacts_dir, *args, **kwargs) diff --git a/tests/functional/workflows/go_dep/test_godep_utils.py b/tests/functional/workflows/go_dep/test_godep_utils.py new file mode 100644 index 000000000..e2d610ed6 --- /dev/null +++ b/tests/functional/workflows/go_dep/test_godep_utils.py @@ -0,0 +1,63 @@ +import os +import shutil +import sys +import tempfile + +from unittest import TestCase + +from aws_lambda_builders.workflows.go_dep import utils + + +class TestGoDepOSUtils(TestCase): + + def setUp(self): + + self.osutils = utils.OSUtils() + + def test_dirname_returns_directory_for_path(self): + dirname = self.osutils.dirname(sys.executable) + + self.assertEqual(dirname, os.path.dirname(sys.executable)) + + def test_abspath_returns_absolute_path(self): + + result = self.osutils.abspath('.') + + self.assertTrue(os.path.isabs(result)) + + self.assertEqual(result, os.path.abspath('.')) + + def test_joinpath_joins_path_components(self): + + result = self.osutils.joinpath('a', 'b', 'c') + + self.assertEqual(result, os.path.join('a', 'b', 'c')) + + def test_popen_runs_a_process_and_returns_outcome(self): + + cwd_py = os.path.join(os.path.dirname(__file__), '..', '..', 'testdata', 'cwd.py') + + p = self.osutils.popen([sys.executable, cwd_py], + stdout=self.osutils.pipe, + stderr=self.osutils.pipe) + + out, err = p.communicate() + + self.assertEqual(p.returncode, 0) + + self.assertEqual(out.decode('utf8').strip(), os.getcwd()) + + def test_popen_can_accept_cwd(self): + + testdata_dir = os.path.join(os.path.dirname(__file__), '..', '..', 'testdata') + + p = self.osutils.popen([sys.executable, 'cwd.py'], + stdout=self.osutils.pipe, + stderr=self.osutils.pipe, + cwd=testdata_dir) + + out, err = p.communicate() + + self.assertEqual(p.returncode, 0) + + self.assertEqual(out.decode('utf8').strip(), os.path.abspath(testdata_dir)) diff --git a/tests/functional/workflows/go_modules/test_go_utils.py b/tests/functional/workflows/go_modules/test_go_utils.py new file mode 100644 index 000000000..1264af818 --- /dev/null +++ b/tests/functional/workflows/go_modules/test_go_utils.py @@ -0,0 +1,39 @@ +import os +import sys + +from unittest import TestCase + +from aws_lambda_builders.workflows.go_modules import utils + + +class TestOSUtils(TestCase): + + def setUp(self): + self.osutils = utils.OSUtils() + + def test_environ_returns_environment(self): + result = self.osutils.environ + self.assertEqual(result, os.environ) + + def test_joinpath_joins_path_components(self): + result = self.osutils.joinpath('a', 'b', 'c') + self.assertEqual(result, os.path.join('a', 'b', 'c')) + + def test_popen_runs_a_process_and_returns_outcome(self): + cwd_py = os.path.join(os.path.dirname(__file__), '..', '..', 'testdata', 'cwd.py') + p = self.osutils.popen([sys.executable, cwd_py], + stdout=self.osutils.pipe, + stderr=self.osutils.pipe) + out, err = p.communicate() + self.assertEqual(p.returncode, 0) + self.assertEqual(out.decode('utf8').strip(), os.getcwd()) + + def test_popen_can_accept_cwd(self): + testdata_dir = os.path.join(os.path.dirname(__file__), '..', '..', 'testdata') + p = self.osutils.popen([sys.executable, 'cwd.py'], + stdout=self.osutils.pipe, + stderr=self.osutils.pipe, + cwd=testdata_dir) + out, err = p.communicate() + self.assertEqual(p.returncode, 0) + self.assertEqual(out.decode('utf8').strip(), os.path.abspath(testdata_dir)) diff --git a/tests/unit/workflows/python_pip/test_builder.py b/tests/functional/workflows/java_gradle/__init__.py similarity index 100% rename from tests/unit/workflows/python_pip/test_builder.py rename to tests/functional/workflows/java_gradle/__init__.py diff --git a/tests/functional/workflows/java_gradle/test_java_utils.py b/tests/functional/workflows/java_gradle/test_java_utils.py new file mode 100644 index 000000000..a071dbab7 --- /dev/null +++ b/tests/functional/workflows/java_gradle/test_java_utils.py @@ -0,0 +1,57 @@ +import os +import sys +import tempfile + +from unittest import TestCase + +from aws_lambda_builders.workflows.java_gradle import utils + + +class TestOSUtils(TestCase): + + def setUp(self): + self.src = tempfile.mkdtemp() + self.dst = tempfile.mkdtemp() + self.os_utils = utils.OSUtils() + + def test_popen_runs_a_process_and_returns_outcome(self): + cwd_py = os.path.join(os.path.dirname(__file__), '..', '..', 'testdata', 'cwd.py') + p = self.os_utils.popen([sys.executable, cwd_py], + stdout=self.os_utils.pipe, + stderr=self.os_utils.pipe) + out, err = p.communicate() + self.assertEqual(p.returncode, 0) + self.assertEqual(out.decode('utf8').strip(), os.getcwd()) + + def test_popen_can_accept_cwd(self): + testdata_dir = os.path.join(os.path.dirname(__file__), '..', '..', 'testdata') + p = self.os_utils.popen([sys.executable, 'cwd.py'], + stdout=self.os_utils.pipe, + stderr=self.os_utils.pipe, + cwd=testdata_dir) + out, err = p.communicate() + self.assertEqual(p.returncode, 0) + self.assertEqual(out.decode('utf8').strip(), os.path.abspath(testdata_dir)) + + def test_listdir(self): + names = ['a', 'b', 'c'] + for n in names: + self.new_file(self.src, n) + self.assertEquals(set(names), set(self.os_utils.listdir(self.src))) + + def test_copy(self): + f = self.new_file(self.src, 'a') + expected = os.path.join(self.dst, 'a') + copy_ret = self.os_utils.copy(f, expected) + self.assertEquals(expected, copy_ret) + self.assertTrue('a' in os.listdir(self.dst)) + + def test_exists(self): + self.new_file(self.src, 'foo') + self.assertTrue(self.os_utils.exists(os.path.join(self.src, 'foo'))) + + def new_file(self, d, name): + p = os.path.join(d, name) + with open(p, 'w') as f: + f.close() + return p diff --git a/tests/functional/workflows/nodejs_npm/test_utils.py b/tests/functional/workflows/nodejs_npm/test_utils.py index b84bc1e8f..01fd52f01 100644 --- a/tests/functional/workflows/nodejs_npm/test_utils.py +++ b/tests/functional/workflows/nodejs_npm/test_utils.py @@ -14,6 +14,58 @@ def setUp(self): self.osutils = utils.OSUtils() + def test_copy_file_copies_existing_file_into_a_dir(self): + + test_file = os.path.join(os.path.dirname(__file__), "test_data", "test.tgz") + + test_dir = tempfile.mkdtemp() + + self.osutils.copy_file(test_file, test_dir) + + output_files = set(os.listdir(test_dir)) + + shutil.rmtree(test_dir) + + self.assertEqual({"test.tgz"}, output_files) + + def test_copy_file_copies_existing_file_into_a_file(self): + + test_file = os.path.join(os.path.dirname(__file__), "test_data", "test.tgz") + + test_dir = tempfile.mkdtemp() + + self.osutils.copy_file(test_file, os.path.join(test_dir, "copied_test.tgz")) + + output_files = set(os.listdir(test_dir)) + + shutil.rmtree(test_dir) + + self.assertEqual({"copied_test.tgz"}, output_files) + + def test_remove_file_removes_existing_file(self): + + test_file = os.path.join(os.path.dirname(__file__), "test_data", "test.tgz") + + test_dir = tempfile.mkdtemp() + + copied_file = os.path.join(test_dir, "copied_test.tgz") + + shutil.copy(test_file, copied_file) + + self.osutils.remove_file(copied_file) + + self.assertFalse(os.path.isfile(copied_file)) + + def test_file_exists_checking_if_file_exists_in_a_dir(self): + + existing_file = os.path.join(os.path.dirname(__file__), "test_data", "test.tgz") + + nonexisting_file = os.path.join(os.path.dirname(__file__), "test_data", "nonexisting.tgz") + + self.assertTrue(self.osutils.file_exists(existing_file)) + + self.assertFalse(self.osutils.file_exists(nonexisting_file)) + def test_extract_tarfile_unpacks_a_tar(self): test_tar = os.path.join(os.path.dirname(__file__), "test_data", "test.tgz") diff --git a/tests/functional/workflows/python_pip/test_packager.py b/tests/functional/workflows/python_pip/test_packager.py index 8214ff1be..58618b768 100644 --- a/tests/functional/workflows/python_pip/test_packager.py +++ b/tests/functional/workflows/python_pip/test_packager.py @@ -201,7 +201,9 @@ def environ(self): @pytest.fixture def pip_runner(empty_env_osutils): pip = FakePip() - pip_runner = PipRunner(pip, osutils=empty_env_osutils) + pip_runner = PipRunner(python_exe=sys.executable, + pip=pip, + osutils=empty_env_osutils) return pip, pip_runner @@ -871,7 +873,7 @@ def test_build_into_existing_dir_with_preinstalled_packages( class TestSubprocessPip(object): def test_can_invoke_pip(self): - pip = SubprocessPip() + pip = SubprocessPip(python_exe=sys.executable) rc, out, err = pip.main(['--version']) # Simple assertion that we can execute pip and it gives us some output # and nothing on stderr. @@ -879,7 +881,7 @@ def test_can_invoke_pip(self): assert err == b'' def test_does_error_code_propagate(self): - pip = SubprocessPip() + pip = SubprocessPip(python_exe=sys.executable) rc, _, err = pip.main(['badcommand']) assert rc != 0 # Don't want to depend on a particular error message from pip since it diff --git a/tests/integration/workflows/go_dep/data/src/failed-remote/Gopkg.lock b/tests/integration/workflows/go_dep/data/src/failed-remote/Gopkg.lock new file mode 100644 index 000000000..5f571753d --- /dev/null +++ b/tests/integration/workflows/go_dep/data/src/failed-remote/Gopkg.lock @@ -0,0 +1,17 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + digest = "1:69b1cc331fca23d702bd72f860c6a647afd0aa9fcbc1d0659b1365e26546dd70" + name = "not-really-a-git-repo.com/pkg/log" + packages = ["."] + pruneopts = "UT" + revision = "bcd833dfe83d3cebad139e4a29ed79cb2318bf95" + version = "v1.0.0" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + input-imports = ["not-really-a-git-repo.com/pkg/log"] + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/tests/integration/workflows/go_dep/data/src/failed-remote/Gopkg.toml b/tests/integration/workflows/go_dep/data/src/failed-remote/Gopkg.toml new file mode 100644 index 000000000..ec6996018 --- /dev/null +++ b/tests/integration/workflows/go_dep/data/src/failed-remote/Gopkg.toml @@ -0,0 +1,7 @@ +[[constraint]] + name = "not-really-a-git-repo.com/pkg/log" + version = "1.0.0" + +[prune] + go-tests = true + unused-packages = true diff --git a/tests/integration/workflows/go_dep/data/src/failed-remote/main.go b/tests/integration/workflows/go_dep/data/src/failed-remote/main.go new file mode 100644 index 000000000..8a07b424c --- /dev/null +++ b/tests/integration/workflows/go_dep/data/src/failed-remote/main.go @@ -0,0 +1,7 @@ +package main + +import "not-really-a-git-repo.com/pkg/log" + +func main() { + log.Info("hello, world") +} diff --git a/tests/integration/workflows/go_dep/data/src/no-gopkg/main.go b/tests/integration/workflows/go_dep/data/src/no-gopkg/main.go new file mode 100644 index 000000000..635db7ae6 --- /dev/null +++ b/tests/integration/workflows/go_dep/data/src/no-gopkg/main.go @@ -0,0 +1,7 @@ +package main + +import "fmt" + +func main() { + fmt.Println("hello, world") +} diff --git a/tests/integration/workflows/go_dep/data/src/nodeps/Gopkg.lock b/tests/integration/workflows/go_dep/data/src/nodeps/Gopkg.lock new file mode 100644 index 000000000..10ef81118 --- /dev/null +++ b/tests/integration/workflows/go_dep/data/src/nodeps/Gopkg.lock @@ -0,0 +1,9 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + input-imports = [] + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/tests/integration/workflows/go_dep/data/src/nodeps/Gopkg.toml b/tests/integration/workflows/go_dep/data/src/nodeps/Gopkg.toml new file mode 100644 index 000000000..5c879c7de --- /dev/null +++ b/tests/integration/workflows/go_dep/data/src/nodeps/Gopkg.toml @@ -0,0 +1,3 @@ +[prune] + go-tests = true + unused-packages = true diff --git a/tests/integration/workflows/go_dep/data/src/nodeps/main.go b/tests/integration/workflows/go_dep/data/src/nodeps/main.go new file mode 100644 index 000000000..635db7ae6 --- /dev/null +++ b/tests/integration/workflows/go_dep/data/src/nodeps/main.go @@ -0,0 +1,7 @@ +package main + +import "fmt" + +func main() { + fmt.Println("hello, world") +} diff --git a/tests/integration/workflows/go_dep/data/src/remote-deps/Gopkg.lock b/tests/integration/workflows/go_dep/data/src/remote-deps/Gopkg.lock new file mode 100644 index 000000000..7cc815ae3 --- /dev/null +++ b/tests/integration/workflows/go_dep/data/src/remote-deps/Gopkg.lock @@ -0,0 +1,44 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + digest = "1:0a69a1c0db3591fcefb47f115b224592c8dfa4368b7ba9fae509d5e16cdc95c8" + name = "github.com/konsorten/go-windows-terminal-sequences" + packages = ["."] + pruneopts = "UT" + revision = "5c8c8bd35d3832f5d134ae1e1e375b69a4d25242" + version = "v1.0.1" + +[[projects]] + digest = "1:69b1cc331fca23d702bd72f860c6a647afd0aa9fcbc1d0659b1365e26546dd70" + name = "github.com/sirupsen/logrus" + packages = ["."] + pruneopts = "UT" + revision = "bcd833dfe83d3cebad139e4a29ed79cb2318bf95" + version = "v1.2.0" + +[[projects]] + branch = "master" + digest = "1:38f553aff0273ad6f367cb0a0f8b6eecbaef8dc6cb8b50e57b6a81c1d5b1e332" + name = "golang.org/x/crypto" + packages = ["ssh/terminal"] + pruneopts = "UT" + revision = "505ab145d0a99da450461ae2c1a9f6cd10d1f447" + +[[projects]] + branch = "master" + digest = "1:10405139b45e3a97a3842c93984710e30466eb933545f219ad3f5e45246973b4" + name = "golang.org/x/sys" + packages = [ + "unix", + "windows", + ] + pruneopts = "UT" + revision = "9a3f9b0469bbc6b8802087ae5c0af9f61502de01" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + input-imports = ["github.com/sirupsen/logrus"] + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/tests/integration/workflows/go_dep/data/src/remote-deps/Gopkg.toml b/tests/integration/workflows/go_dep/data/src/remote-deps/Gopkg.toml new file mode 100644 index 000000000..2dac9c39f --- /dev/null +++ b/tests/integration/workflows/go_dep/data/src/remote-deps/Gopkg.toml @@ -0,0 +1,7 @@ +[[constraint]] + name = "github.com/sirupsen/logrus" + version = "1.2.0" + +[prune] + go-tests = true + unused-packages = true diff --git a/tests/integration/workflows/go_dep/data/src/remote-deps/main.go b/tests/integration/workflows/go_dep/data/src/remote-deps/main.go new file mode 100644 index 000000000..3a65fc4e5 --- /dev/null +++ b/tests/integration/workflows/go_dep/data/src/remote-deps/main.go @@ -0,0 +1,7 @@ +package main + +import "github.com/sirupsen/logrus" + +func main() { + logrus.Info("hello, world") +} diff --git a/tests/integration/workflows/go_dep/test_go_dep.py b/tests/integration/workflows/go_dep/test_go_dep.py new file mode 100644 index 000000000..e79b63ae2 --- /dev/null +++ b/tests/integration/workflows/go_dep/test_go_dep.py @@ -0,0 +1,82 @@ +import os +import shutil +import tempfile + +from unittest import TestCase + +from aws_lambda_builders.builder import LambdaBuilder +from aws_lambda_builders.exceptions import WorkflowFailedError + + +class TestGoDep(TestCase): + TEST_DATA_FOLDER = os.path.join(os.path.dirname(__file__), "data") + + def setUp(self): + self.artifacts_dir = tempfile.mkdtemp() + self.scratch_dir = tempfile.mkdtemp() + + os.environ["GOPATH"] = self.TEST_DATA_FOLDER + + self.no_deps = os.path.join(self.TEST_DATA_FOLDER, "src", "nodeps") + + self.builder = LambdaBuilder(language="go", + dependency_manager="dep", + application_framework=None) + + self.runtime = "go1.x" + + def tearDown(self): + shutil.rmtree(self.artifacts_dir) + shutil.rmtree(self.scratch_dir) + + def test_builds_project_with_no_deps(self): + source_dir = os.path.join(self.TEST_DATA_FOLDER, "src", "nodeps") + + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, + os.path.join(source_dir, "Gopkg.toml"), + runtime=self.runtime, + options={"artifact_executable_name": "main"}) + + expected_files = {"main"} + output_files = set(os.listdir(self.artifacts_dir)) + + self.assertEquals(expected_files, output_files) + + def test_builds_project_with_no_gopkg_file(self): + source_dir = os.path.join(self.TEST_DATA_FOLDER, "src", "no-gopkg") + + with self.assertRaises(WorkflowFailedError) as ex: + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, + os.path.join(source_dir, "Gopkg.toml"), + runtime=self.runtime, + options={"artifact_executable_name": "main"}) + + self.assertEquals( + "GoDepBuilder:DepEnsure - Exec Failed: could not find project Gopkg.toml," + + " use dep init to initiate a manifest", + str(ex.exception)) + + def test_builds_project_with_remote_deps(self): + source_dir = os.path.join(self.TEST_DATA_FOLDER, "src", "remote-deps") + + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, + os.path.join(source_dir, "Gopkg.toml"), + runtime=self.runtime, + options={"artifact_executable_name": "main"}) + + expected_files = {"main"} + output_files = set(os.listdir(self.artifacts_dir)) + + self.assertEquals(expected_files, output_files) + + def test_builds_project_with_failed_remote_deps(self): + source_dir = os.path.join(self.TEST_DATA_FOLDER, "src", "failed-remote") + + with self.assertRaises(WorkflowFailedError) as ex: + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, + os.path.join(source_dir, "Gopkg.toml"), + runtime=self.runtime, + options={"artifact_executable_name": "main"}) + + # The full message is super long, so part of it is fine. + self.assertNotEqual(str(ex.exception).find('unable to deduce repository and source type for'), -1) diff --git a/tests/integration/workflows/go_modules/test_go.py b/tests/integration/workflows/go_modules/test_go.py new file mode 100644 index 000000000..a871c7190 --- /dev/null +++ b/tests/integration/workflows/go_modules/test_go.py @@ -0,0 +1,59 @@ +import os +import shutil +import tempfile + +from unittest import TestCase + +from aws_lambda_builders.builder import LambdaBuilder +from aws_lambda_builders.exceptions import WorkflowFailedError + + +class TestGoWorkflow(TestCase): + """ + Verifies that `go` workflow works by building a Lambda using Go Modules + """ + + TEST_DATA_FOLDER = os.path.join(os.path.dirname(__file__), "testdata") + + def setUp(self): + self.artifacts_dir = tempfile.mkdtemp() + self.scratch_dir = tempfile.mkdtemp() + self.builder = LambdaBuilder(language="go", + dependency_manager="modules", + application_framework=None) + self.runtime = "go1.x" + + def tearDown(self): + shutil.rmtree(self.artifacts_dir) + shutil.rmtree(self.scratch_dir) + + def test_builds_project_without_dependencies(self): + source_dir = os.path.join(self.TEST_DATA_FOLDER, "no-deps") + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, + os.path.join(source_dir, "go.mod"), + runtime=self.runtime, + options={"artifact_executable_name": "no-deps-main"}) + expected_files = {"no-deps-main"} + output_files = set(os.listdir(self.artifacts_dir)) + print(output_files) + self.assertEquals(expected_files, output_files) + + def test_builds_project_with_dependencies(self): + source_dir = os.path.join(self.TEST_DATA_FOLDER, "with-deps") + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, + os.path.join(source_dir, "go.mod"), + runtime=self.runtime, + options={"artifact_executable_name": "with-deps-main"}) + expected_files = {"with-deps-main"} + output_files = set(os.listdir(self.artifacts_dir)) + self.assertEquals(expected_files, output_files) + + def test_fails_if_modules_cannot_resolve_dependencies(self): + source_dir = os.path.join(self.TEST_DATA_FOLDER, "broken-deps") + with self.assertRaises(WorkflowFailedError) as ctx: + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, + os.path.join(source_dir, "go.mod"), + runtime=self.runtime, + options={"artifact_executable_name": "failed"}) + self.assertIn("GoModulesBuilder:Build - Builder Failed: ", + str(ctx.exception)) diff --git a/tests/integration/workflows/go_modules/testdata/broken-deps/go.mod b/tests/integration/workflows/go_modules/testdata/broken-deps/go.mod new file mode 100644 index 000000000..864baf723 --- /dev/null +++ b/tests/integration/workflows/go_modules/testdata/broken-deps/go.mod @@ -0,0 +1,12 @@ +module github.com/awslabs/aws-lambda-builders + +require ( + github.com/BurntSushi/toml v0.3.1 // indirect + github.com/aws/aws-lambda-go v0.9999.0 // doesn't exist, broken dependency + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/stretchr/objx v0.1.1 // indirect + github.com/stretchr/testify v1.2.2 // indirect + gopkg.in/urfave/cli.v1 v1.20.0 // indirect + gopkg.in/yaml.v2 v2.2.2 // indirect +) diff --git a/tests/integration/workflows/go_modules/testdata/broken-deps/main.go b/tests/integration/workflows/go_modules/testdata/broken-deps/main.go new file mode 100644 index 000000000..da29a2cad --- /dev/null +++ b/tests/integration/workflows/go_modules/testdata/broken-deps/main.go @@ -0,0 +1,4 @@ +package main + +func main() { +} diff --git a/tests/integration/workflows/go_modules/testdata/no-deps/go.mod b/tests/integration/workflows/go_modules/testdata/no-deps/go.mod new file mode 100644 index 000000000..846a0e06b --- /dev/null +++ b/tests/integration/workflows/go_modules/testdata/no-deps/go.mod @@ -0,0 +1 @@ +module github.com/awslabs/aws-lambda-builders diff --git a/tests/integration/workflows/go_modules/testdata/no-deps/main.go b/tests/integration/workflows/go_modules/testdata/no-deps/main.go new file mode 100644 index 000000000..da29a2cad --- /dev/null +++ b/tests/integration/workflows/go_modules/testdata/no-deps/main.go @@ -0,0 +1,4 @@ +package main + +func main() { +} diff --git a/tests/integration/workflows/go_modules/testdata/with-deps/go.mod b/tests/integration/workflows/go_modules/testdata/with-deps/go.mod new file mode 100644 index 000000000..18f6e0f05 --- /dev/null +++ b/tests/integration/workflows/go_modules/testdata/with-deps/go.mod @@ -0,0 +1,12 @@ +module github.com/awslabs/aws-lambda-builders + +require ( + github.com/BurntSushi/toml v0.3.1 // indirect + github.com/aws/aws-lambda-go v1.8.0 + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/stretchr/objx v0.1.1 // indirect + github.com/stretchr/testify v1.2.2 // indirect + gopkg.in/urfave/cli.v1 v1.20.0 // indirect + gopkg.in/yaml.v2 v2.2.2 // indirect +) diff --git a/tests/integration/workflows/go_modules/testdata/with-deps/go.sum b/tests/integration/workflows/go_modules/testdata/with-deps/go.sum new file mode 100644 index 000000000..2a45ed3a9 --- /dev/null +++ b/tests/integration/workflows/go_modules/testdata/with-deps/go.sum @@ -0,0 +1,10 @@ +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/aws/aws-lambda-go v1.8.0 h1:YMCzi9FP7MNVVj9AkGpYyaqh/mvFOjhqiDtnNlWtKTg= +github.com/aws/aws-lambda-go v1.8.0/go.mod h1:zUsUQhAUjYzR8AuduJPCfhBuKWUaDbQiPOG+ouzmE1A= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/urfave/cli.v1 v1.20.0/go.mod h1:vuBzUtMdQeixQj8LVd+/98pzhxNGQoyuPBlsXHOQNO0= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/tests/integration/workflows/go_modules/testdata/with-deps/main.go b/tests/integration/workflows/go_modules/testdata/with-deps/main.go new file mode 100644 index 000000000..a119afd60 --- /dev/null +++ b/tests/integration/workflows/go_modules/testdata/with-deps/main.go @@ -0,0 +1,17 @@ +package main + +import ( + "github.com/aws/aws-lambda-go/events" + "github.com/aws/aws-lambda-go/lambda" +) + +func handler(request events.APIGatewayProxyRequest) (events.APIGatewayProxyResponse, error) { + return events.APIGatewayProxyResponse{ + Body: "Hello World", + StatusCode: 200, + }, nil +} + +func main() { + lambda.Start(handler) +} diff --git a/tests/integration/workflows/java_gradle/test_java_gradle.py b/tests/integration/workflows/java_gradle/test_java_gradle.py new file mode 100644 index 000000000..d9fa06bb2 --- /dev/null +++ b/tests/integration/workflows/java_gradle/test_java_gradle.py @@ -0,0 +1,119 @@ +import os +import shutil +import tempfile + +from zipfile import ZipFile +from unittest import TestCase + +from aws_lambda_builders.builder import LambdaBuilder +from aws_lambda_builders.exceptions import WorkflowFailedError + + +class TestJavaGradle(TestCase): + SINGLE_BUILD_TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "testdata", "single-build") + MULTI_BUILD_TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "testdata", "multi-build") + + def setUp(self): + self.artifacts_dir = tempfile.mkdtemp() + self.scratch_dir = tempfile.mkdtemp() + self.builder = LambdaBuilder(language='java', dependency_manager='gradle', application_framework=None) + self.runtime = 'java' + + def tearDown(self): + shutil.rmtree(self.artifacts_dir) + shutil.rmtree(self.scratch_dir) + + def test_build_single_build_with_deps(self): + source_dir = os.path.join(self.SINGLE_BUILD_TEST_DATA_DIR, 'with-deps') + manifest_path = os.path.join(source_dir, 'build.gradle') + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, manifest_path, runtime=self.runtime) + expected_files = [p('aws', 'lambdabuilders', 'Main.class'), p('lib', 'annotations-2.1.0.jar')] + + self.assert_artifact_contains_files(expected_files) + + def test_build_single_build_with_resources(self): + source_dir = os.path.join(self.SINGLE_BUILD_TEST_DATA_DIR, 'with-resources') + manifest_path = os.path.join(source_dir, 'build.gradle') + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, manifest_path, runtime=self.runtime) + expected_files = [p('aws', 'lambdabuilders', 'Main.class'), p('some_data.txt'), + p('lib', 'annotations-2.1.0.jar')] + + self.assert_artifact_contains_files(expected_files) + + def test_build_single_build_with_test_deps_test_jars_not_included(self): + source_dir = os.path.join(self.SINGLE_BUILD_TEST_DATA_DIR, 'with-test-deps') + manifest_path = os.path.join(source_dir, 'build.gradle') + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, manifest_path, runtime=self.runtime) + expected_files = [p('aws', 'lambdabuilders', 'Main.class'), p('lib', 'annotations-2.1.0.jar')] + + self.assert_artifact_contains_files(expected_files) + self.assert_artifact_not_contains_file(p('lib', 's3-2.1.0.jar')) + + def test_build_single_build_with_deps_gradlew(self): + source_dir = os.path.join(self.SINGLE_BUILD_TEST_DATA_DIR, 'with-deps-gradlew') + manifest_path = os.path.join(source_dir, 'build.gradle') + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, manifest_path, runtime=self.runtime, + executable_search_paths=[source_dir]) + expected_files = [p('aws', 'lambdabuilders', 'Main.class'), p('lib', 'annotations-2.1.0.jar')] + + self.assert_artifact_contains_files(expected_files) + + def test_build_multi_build_with_deps_lambda1(self): + parent_dir = os.path.join(self.MULTI_BUILD_TEST_DATA_DIR, 'with-deps') + manifest_path = os.path.join(parent_dir, 'lambda1', 'build.gradle') + + lambda1_source = os.path.join(parent_dir, 'lambda1') + self.builder.build(lambda1_source, self.artifacts_dir, self.scratch_dir, manifest_path, + runtime=self.runtime) + + lambda1_expected_files = [p('aws', 'lambdabuilders', 'Lambda1_Main.class'), p('lib', 'annotations-2.1.0.jar')] + self.assert_artifact_contains_files(lambda1_expected_files) + + def test_build_multi_build_with_deps_lambda2(self): + parent_dir = os.path.join(self.MULTI_BUILD_TEST_DATA_DIR, 'with-deps') + manifest_path = os.path.join(parent_dir, 'lambda2', 'build.gradle') + + lambda2_source = os.path.join(parent_dir, 'lambda2') + self.builder.build(lambda2_source, self.artifacts_dir, self.scratch_dir, manifest_path, + runtime=self.runtime) + + lambda2_expected_files = [p('aws', 'lambdabuilders', 'Lambda2_Main.class'), p('lib', 'annotations-2.1.0.jar')] + self.assert_artifact_contains_files(lambda2_expected_files) + + def test_build_multi_build_with_deps_inter_module(self): + parent_dir = os.path.join(self.MULTI_BUILD_TEST_DATA_DIR, 'with-deps-inter-module') + manifest_path = os.path.join(parent_dir, 'lambda1', 'build.gradle') + + lambda1_source = os.path.join(parent_dir, 'lambda1') + self.builder.build(lambda1_source, self.artifacts_dir, self.scratch_dir, manifest_path, + runtime=self.runtime) + + lambda1_expected_files = [p('aws', 'lambdabuilders', 'Lambda1_Main.class'), p('lib', 'annotations-2.1.0.jar'), + p('lib', 'common.jar')] + self.assert_artifact_contains_files(lambda1_expected_files) + + def test_build_single_build_with_deps_broken(self): + source_dir = os.path.join(self.SINGLE_BUILD_TEST_DATA_DIR, 'with-deps-broken') + manifest_path = os.path.join(source_dir, 'build.gradle') + with self.assertRaises(WorkflowFailedError) as raised: + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, manifest_path, runtime=self.runtime) + self.assertTrue(raised.exception.args[0].startswith('JavaGradleWorkflow:GradleBuild - Gradle Failed')) + + def assert_artifact_contains_files(self, files): + for f in files: + self.assert_artifact_contains_file(f) + + def assert_artifact_contains_file(self, p): + self.assertTrue(os.path.exists(os.path.join(self.artifacts_dir, p))) + + def assert_artifact_not_contains_file(self, p): + self.assertFalse(os.path.exists(os.path.join(self.artifacts_dir, p))) + + def assert_zip_contains(self, zip_path, files): + with ZipFile(zip_path) as z: + zip_names = set(z.namelist()) + self.assertTrue(set(files).issubset(zip_names)) + + +def p(path, *comps): + return os.path.join(path, *comps) diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/build.gradle b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/build.gradle new file mode 100644 index 000000000..5c41c8421 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/build.gradle @@ -0,0 +1,5 @@ +allprojects { + repositories { + mavenCentral() + } +} diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/common/build.gradle b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/common/build.gradle new file mode 100644 index 000000000..e4af9df15 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/common/build.gradle @@ -0,0 +1,11 @@ +plugins { + id 'java' +} + +repositories { + mavenCentral() +} + +dependencies { + implementation 'software.amazon.awssdk:annotations:2.1.0' +} diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/common/src/main/java/aws/lambdabuilders/Foo.java b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/common/src/main/java/aws/lambdabuilders/Foo.java new file mode 100644 index 000000000..542be0095 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/common/src/main/java/aws/lambdabuilders/Foo.java @@ -0,0 +1,7 @@ +package aws.lambdabuilders; + +public class Foo { + public void sayHello() { + System.out.println("Hello world!"); + } +} diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/lambda1/build.gradle b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/lambda1/build.gradle new file mode 100644 index 000000000..f3648f8cc --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/lambda1/build.gradle @@ -0,0 +1,12 @@ +plugins { + id 'java' +} + +repositories { + mavenCentral() +} + +dependencies { + implementation 'software.amazon.awssdk:annotations:2.1.0' + implementation project(':common') +} diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/lambda1/src/main/java/aws/lambdabuilders/Lambda1_Main.java b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/lambda1/src/main/java/aws/lambdabuilders/Lambda1_Main.java new file mode 100644 index 000000000..fca189e70 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/lambda1/src/main/java/aws/lambdabuilders/Lambda1_Main.java @@ -0,0 +1,7 @@ +package aws.lambdabuilders; + +public class Lambda1_Main { + public static void main(String[] args) { + System.out.println("Hello AWS Lambda Builders!"); + } +} diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/lambda2/build.gradle b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/lambda2/build.gradle new file mode 100644 index 000000000..e4af9df15 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/lambda2/build.gradle @@ -0,0 +1,11 @@ +plugins { + id 'java' +} + +repositories { + mavenCentral() +} + +dependencies { + implementation 'software.amazon.awssdk:annotations:2.1.0' +} diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/lambda2/src/main/java/aws/lambdabuilders/Lambda2_Main.java b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/lambda2/src/main/java/aws/lambdabuilders/Lambda2_Main.java new file mode 100644 index 000000000..5f7fa59ef --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/lambda2/src/main/java/aws/lambdabuilders/Lambda2_Main.java @@ -0,0 +1,7 @@ +package aws.lambdabuilders; + +public class Lambda2_Main { + public static void main(String[] args) { + System.out.println("Hello AWS Lambda Builders!"); + } +} diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/settings.gradle b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/settings.gradle new file mode 100644 index 000000000..965186cb4 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps-inter-module/settings.gradle @@ -0,0 +1,2 @@ +rootProject.name = 'multi-build' +include 'lambda1', 'lambda2', 'common' diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/build.gradle b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/build.gradle new file mode 100644 index 000000000..5c41c8421 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/build.gradle @@ -0,0 +1,5 @@ +allprojects { + repositories { + mavenCentral() + } +} diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/lambda1/build.gradle b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/lambda1/build.gradle new file mode 100644 index 000000000..e4af9df15 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/lambda1/build.gradle @@ -0,0 +1,11 @@ +plugins { + id 'java' +} + +repositories { + mavenCentral() +} + +dependencies { + implementation 'software.amazon.awssdk:annotations:2.1.0' +} diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/lambda1/src/main/java/aws/lambdabuilders/Lambda1_Main.java b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/lambda1/src/main/java/aws/lambdabuilders/Lambda1_Main.java new file mode 100644 index 000000000..fca189e70 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/lambda1/src/main/java/aws/lambdabuilders/Lambda1_Main.java @@ -0,0 +1,7 @@ +package aws.lambdabuilders; + +public class Lambda1_Main { + public static void main(String[] args) { + System.out.println("Hello AWS Lambda Builders!"); + } +} diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/lambda2/build.gradle b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/lambda2/build.gradle new file mode 100644 index 000000000..e4af9df15 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/lambda2/build.gradle @@ -0,0 +1,11 @@ +plugins { + id 'java' +} + +repositories { + mavenCentral() +} + +dependencies { + implementation 'software.amazon.awssdk:annotations:2.1.0' +} diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/lambda2/src/main/java/aws/lambdabuilders/Lambda2_Main.java b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/lambda2/src/main/java/aws/lambdabuilders/Lambda2_Main.java new file mode 100644 index 000000000..5f7fa59ef --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/lambda2/src/main/java/aws/lambdabuilders/Lambda2_Main.java @@ -0,0 +1,7 @@ +package aws.lambdabuilders; + +public class Lambda2_Main { + public static void main(String[] args) { + System.out.println("Hello AWS Lambda Builders!"); + } +} diff --git a/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/settings.gradle b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/settings.gradle new file mode 100644 index 000000000..6c882858a --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/multi-build/with-deps/settings.gradle @@ -0,0 +1,2 @@ +rootProject.name = 'multi-build' +include 'lambda1', 'lambda2' diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-broken/build.gradle b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-broken/build.gradle new file mode 100644 index 000000000..71d034524 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-broken/build.gradle @@ -0,0 +1,11 @@ +plugins { + id 'java' +} + +repositories { + mavenCentral() +} + +dependencies { + implementation 'software.amazon.awssdk:does-not-exist:1.2.3' +} diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-broken/src/main/java/aws/lambdabuilders/Main.java b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-broken/src/main/java/aws/lambdabuilders/Main.java new file mode 100644 index 000000000..22c7a289d --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-broken/src/main/java/aws/lambdabuilders/Main.java @@ -0,0 +1,7 @@ +package aws.lambdabuilders; + +public class Main { + public static void main(String[] args) { + System.out.println("Hello AWS Lambda Builders!"); + } +} diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/build.gradle b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/build.gradle new file mode 100644 index 000000000..e4af9df15 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/build.gradle @@ -0,0 +1,11 @@ +plugins { + id 'java' +} + +repositories { + mavenCentral() +} + +dependencies { + implementation 'software.amazon.awssdk:annotations:2.1.0' +} diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/gradle/wrapper/gradle-wrapper.jar b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 000000000..87b738cbd Binary files /dev/null and b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/gradle/wrapper/gradle-wrapper.jar differ diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/gradle/wrapper/gradle-wrapper.properties b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 000000000..558870dad --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/gradlew b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/gradlew new file mode 100755 index 000000000..af6708ff2 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/gradlew @@ -0,0 +1,172 @@ +#!/usr/bin/env sh + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=$(save "$@") + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then + cd "$(dirname "$0")" +fi + +exec "$JAVACMD" "$@" diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/gradlew.bat b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/gradlew.bat new file mode 100644 index 000000000..0f8d5937c --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/gradlew.bat @@ -0,0 +1,84 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/src/main/java/aws/lambdabuilders/Main.java b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/src/main/java/aws/lambdabuilders/Main.java new file mode 100644 index 000000000..22c7a289d --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps-gradlew/src/main/java/aws/lambdabuilders/Main.java @@ -0,0 +1,7 @@ +package aws.lambdabuilders; + +public class Main { + public static void main(String[] args) { + System.out.println("Hello AWS Lambda Builders!"); + } +} diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-deps/build.gradle b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps/build.gradle new file mode 100644 index 000000000..e4af9df15 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps/build.gradle @@ -0,0 +1,11 @@ +plugins { + id 'java' +} + +repositories { + mavenCentral() +} + +dependencies { + implementation 'software.amazon.awssdk:annotations:2.1.0' +} diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-deps/src/main/java/aws/lambdabuilders/Main.java b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps/src/main/java/aws/lambdabuilders/Main.java new file mode 100644 index 000000000..22c7a289d --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-deps/src/main/java/aws/lambdabuilders/Main.java @@ -0,0 +1,7 @@ +package aws.lambdabuilders; + +public class Main { + public static void main(String[] args) { + System.out.println("Hello AWS Lambda Builders!"); + } +} diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-resources/build.gradle b/tests/integration/workflows/java_gradle/testdata/single-build/with-resources/build.gradle new file mode 100644 index 000000000..e4af9df15 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-resources/build.gradle @@ -0,0 +1,11 @@ +plugins { + id 'java' +} + +repositories { + mavenCentral() +} + +dependencies { + implementation 'software.amazon.awssdk:annotations:2.1.0' +} diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-resources/src/main/java/aws/lambdabuilders/Main.java b/tests/integration/workflows/java_gradle/testdata/single-build/with-resources/src/main/java/aws/lambdabuilders/Main.java new file mode 100644 index 000000000..22c7a289d --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-resources/src/main/java/aws/lambdabuilders/Main.java @@ -0,0 +1,7 @@ +package aws.lambdabuilders; + +public class Main { + public static void main(String[] args) { + System.out.println("Hello AWS Lambda Builders!"); + } +} diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-resources/src/main/resources/some_data.txt b/tests/integration/workflows/java_gradle/testdata/single-build/with-resources/src/main/resources/some_data.txt new file mode 100644 index 000000000..e69de29bb diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-test-deps/build.gradle b/tests/integration/workflows/java_gradle/testdata/single-build/with-test-deps/build.gradle new file mode 100644 index 000000000..a413ecf82 --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-test-deps/build.gradle @@ -0,0 +1,12 @@ +plugins { + id 'java' +} + +repositories { + mavenCentral() +} + +dependencies { + implementation 'software.amazon.awssdk:annotations:2.1.0' + testImplementation 'software.amazon.awssdk:s3:2.1.0' +} diff --git a/tests/integration/workflows/java_gradle/testdata/single-build/with-test-deps/src/main/java/aws/lambdabuilders/Main.java b/tests/integration/workflows/java_gradle/testdata/single-build/with-test-deps/src/main/java/aws/lambdabuilders/Main.java new file mode 100644 index 000000000..22c7a289d --- /dev/null +++ b/tests/integration/workflows/java_gradle/testdata/single-build/with-test-deps/src/main/java/aws/lambdabuilders/Main.java @@ -0,0 +1,7 @@ +package aws.lambdabuilders; + +public class Main { + public static void main(String[] args) { + System.out.println("Hello AWS Lambda Builders!"); + } +} diff --git a/tests/integration/workflows/nodejs_npm/test_nodejs_npm.py b/tests/integration/workflows/nodejs_npm/test_nodejs_npm.py index 980281e16..a8533d5ce 100644 --- a/tests/integration/workflows/nodejs_npm/test_nodejs_npm.py +++ b/tests/integration/workflows/nodejs_npm/test_nodejs_npm.py @@ -56,6 +56,22 @@ def test_builds_project_with_remote_dependencies(self): output_modules = set(os.listdir(os.path.join(self.artifacts_dir, "node_modules"))) self.assertEquals(expected_modules, output_modules) + def test_builds_project_with_npmrc(self): + source_dir = os.path.join(self.TEST_DATA_FOLDER, "npmrc") + + self.builder.build(source_dir, self.artifacts_dir, self.scratch_dir, + os.path.join(source_dir, "package.json"), + runtime=self.runtime) + + expected_files = {"package.json", "included.js", "node_modules"} + output_files = set(os.listdir(self.artifacts_dir)) + + self.assertEquals(expected_files, output_files) + + expected_modules = {"fake-http-request"} + output_modules = set(os.listdir(os.path.join(self.artifacts_dir, "node_modules"))) + self.assertEquals(expected_modules, output_modules) + def test_fails_if_npm_cannot_resolve_dependencies(self): source_dir = os.path.join(self.TEST_DATA_FOLDER, "broken-deps") diff --git a/tests/integration/workflows/nodejs_npm/testdata/npmrc/.npmrc b/tests/integration/workflows/nodejs_npm/testdata/npmrc/.npmrc new file mode 100644 index 000000000..8f1b27978 --- /dev/null +++ b/tests/integration/workflows/nodejs_npm/testdata/npmrc/.npmrc @@ -0,0 +1 @@ +optional=false \ No newline at end of file diff --git a/tests/integration/workflows/nodejs_npm/testdata/npmrc/excluded.js b/tests/integration/workflows/nodejs_npm/testdata/npmrc/excluded.js new file mode 100644 index 000000000..8bf8be437 --- /dev/null +++ b/tests/integration/workflows/nodejs_npm/testdata/npmrc/excluded.js @@ -0,0 +1,2 @@ +//excluded +const x = 1; diff --git a/tests/integration/workflows/nodejs_npm/testdata/npmrc/included.js b/tests/integration/workflows/nodejs_npm/testdata/npmrc/included.js new file mode 100644 index 000000000..e8f963aee --- /dev/null +++ b/tests/integration/workflows/nodejs_npm/testdata/npmrc/included.js @@ -0,0 +1,2 @@ +//included +const x = 1; diff --git a/tests/integration/workflows/nodejs_npm/testdata/npmrc/package.json b/tests/integration/workflows/nodejs_npm/testdata/npmrc/package.json new file mode 100644 index 000000000..549f8165b --- /dev/null +++ b/tests/integration/workflows/nodejs_npm/testdata/npmrc/package.json @@ -0,0 +1,15 @@ +{ + "name": "npmdeps", + "version": "1.0.0", + "description": "", + "files": ["included.js"], + "keywords": [], + "author": "", + "license": "APACHE2.0", + "dependencies": { + "fake-http-request": "*" + }, + "optionalDependencies": { + "minimal-request-promise": "*" + } +} diff --git a/tests/integration/workflows/python_pip/test_python_pip.py b/tests/integration/workflows/python_pip/test_python_pip.py index c4a4f4e41..ed2356d43 100644 --- a/tests/integration/workflows/python_pip/test_python_pip.py +++ b/tests/integration/workflows/python_pip/test_python_pip.py @@ -6,7 +6,7 @@ from unittest import TestCase from aws_lambda_builders.builder import LambdaBuilder -from aws_lambda_builders.exceptions import WorkflowFailedError, MisMatchRuntimeError +from aws_lambda_builders.exceptions import WorkflowFailedError class TestPythonPipWorkflow(TestCase): @@ -53,13 +53,13 @@ def test_must_build_python_project(self): self.assertEquals(expected_files, output_files) def test_mismatch_runtime_python_project(self): - with self.assertRaises(MisMatchRuntimeError) as mismatch_error: + # NOTE : Build still works if other versions of python are accessible on the path. eg: /usr/bin/python2.7 + # is still accessible within a python 3 virtualenv. + try: self.builder.build(self.source_dir, self.artifacts_dir, self.scratch_dir, self.manifest_path_valid, runtime=self.runtime_mismatch[self.runtime]) - self.assertEquals(mismatch_error.msg, - MisMatchRuntimeError(language="python", - required_runtime=self.runtime_mismatch[self.runtime], - found_runtime=self.runtime).MESSAGE) + except WorkflowFailedError as ex: + self.assertIn("Binary validation failed!", str(ex)) def test_runtime_validate_python_project_fail_open_unsupported_runtime(self): with self.assertRaises(WorkflowFailedError): diff --git a/tests/unit/test_builder.py b/tests/unit/test_builder.py index f611fe1d4..f5f7051b2 100644 --- a/tests/unit/test_builder.py +++ b/tests/unit/test_builder.py @@ -78,9 +78,11 @@ def __init__(self, manifest_path, runtime=None, optimizations=None, - options=None): + options=None, + executable_search_paths=None): super(MyWorkflow, self).__init__(source_dir, artifacts_dir, scratch_dir, manifest_path, - runtime=runtime, optimizations=optimizations, options=options) + runtime=runtime, optimizations=optimizations, options=options, + executable_search_paths=executable_search_paths) # Don't load any other workflows. The above class declaration will automatically load the workflow into registry builder = LambdaBuilder(self.lang, self.lang_framework, self.app_framework, supported_workflows=[]) @@ -115,17 +117,18 @@ def test_with_mocks(self, scratch_dir_exists, get_workflow_mock, importlib_mock, get_workflow_mock.return_value = workflow_cls - with patch.object(LambdaBuilder, "_validate_runtime"): - builder = LambdaBuilder(self.lang, self.lang_framework, self.app_framework, supported_workflows=[]) - - builder.build("source_dir", "artifacts_dir", "scratch_dir", "manifest_path", - runtime="runtime", optimizations="optimizations", options="options") + builder = LambdaBuilder(self.lang, self.lang_framework, self.app_framework, supported_workflows=[]) - workflow_cls.assert_called_with("source_dir", "artifacts_dir", "scratch_dir", "manifest_path", - runtime="runtime", optimizations="optimizations", options="options") - workflow_instance.run.assert_called_once() - os_mock.path.exists.assert_called_once_with("scratch_dir") - if scratch_dir_exists: - os_mock.makedirs.not_called() - else: - os_mock.makedirs.assert_called_once_with("scratch_dir") + builder.build("source_dir", "artifacts_dir", "scratch_dir", "manifest_path", + runtime="runtime", optimizations="optimizations", options="options", + executable_search_paths="executable_search_paths") + + workflow_cls.assert_called_with("source_dir", "artifacts_dir", "scratch_dir", "manifest_path", + runtime="runtime", optimizations="optimizations", options="options", + executable_search_paths="executable_search_paths") + workflow_instance.run.assert_called_once() + os_mock.path.exists.assert_called_once_with("scratch_dir") + if scratch_dir_exists: + os_mock.makedirs.not_called() + else: + os_mock.makedirs.assert_called_once_with("scratch_dir") diff --git a/tests/unit/test_path_resolver.py b/tests/unit/test_path_resolver.py new file mode 100644 index 000000000..3295f6a3c --- /dev/null +++ b/tests/unit/test_path_resolver.py @@ -0,0 +1,27 @@ +from unittest import TestCase + +import os +import mock + +from aws_lambda_builders import utils +from aws_lambda_builders.path_resolver import PathResolver + + +class TestPathResolver(TestCase): + + def setUp(self): + self.path_resolver = PathResolver(runtime="chitti2.0", binary="chitti") + + def test_inits(self): + self.assertEquals(self.path_resolver.runtime, "chitti2.0") + self.assertEquals(self.path_resolver.binary, "chitti") + + def test_which_fails(self): + with self.assertRaises(ValueError): + utils.which = lambda x: None + self.path_resolver._which() + + def test_which_success_immediate(self): + with mock.patch.object(self.path_resolver, '_which') as which_mock: + which_mock.return_value = os.getcwd() + self.assertEquals(self.path_resolver.exec_paths, os.getcwd()) diff --git a/tests/unit/test_validator.py b/tests/unit/test_validator.py new file mode 100644 index 000000000..b3f37d8dd --- /dev/null +++ b/tests/unit/test_validator.py @@ -0,0 +1,15 @@ +from unittest import TestCase + +from aws_lambda_builders.validator import RuntimeValidator + + +class TestRuntimeValidator(TestCase): + + def setUp(self): + self.validator = RuntimeValidator(runtime="chitti2.0") + + def test_inits(self): + self.assertEquals(self.validator.runtime, "chitti2.0") + + def test_validate_runtime(self): + self.validator.validate("/usr/bin/chitti") diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index b137f8f13..08900950a 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -1,7 +1,15 @@ - +import os +import sys from unittest import TestCase from mock import Mock, call +try: + import pathlib +except ImportError: + import pathlib2 as pathlib + +from aws_lambda_builders.binary_path import BinaryPath +from aws_lambda_builders.validator import RuntimeValidator from aws_lambda_builders.workflow import BaseWorkflow, Capability from aws_lambda_builders.registry import get_workflow, DEFAULT_REGISTRY from aws_lambda_builders.exceptions import WorkflowFailedError, WorkflowUnknownError @@ -87,6 +95,7 @@ class MyWorkflow(BaseWorkflow): def test_must_initialize_variables(self): self.work = self.MyWorkflow("source_dir", "artifacts_dir", "scratch_dir", "manifest_path", runtime="runtime", + executable_search_paths=[str(sys.executable)], optimizations={"a": "b"}, options={"c": "d"}) @@ -95,6 +104,7 @@ def test_must_initialize_variables(self): self.assertEquals(self.work.scratch_dir, "scratch_dir") self.assertEquals(self.work.manifest_path, "manifest_path") self.assertEquals(self.work.runtime, "runtime") + self.assertEquals(self.work.executable_search_paths, [str(sys.executable)]) self.assertEquals(self.work.optimizations, {"a": "b"}) self.assertEquals(self.work.options, {"c": "d"}) @@ -111,6 +121,7 @@ class MyWorkflow(BaseWorkflow): def setUp(self): self.work = self.MyWorkflow("source_dir", "artifacts_dir", "scratch_dir", "manifest_path", runtime="runtime", + executable_search_paths=[], optimizations={"a": "b"}, options={"c": "d"}) @@ -148,18 +159,40 @@ class MyWorkflow(BaseWorkflow): def setUp(self): self.work = self.MyWorkflow("source_dir", "artifacts_dir", "scratch_dir", "manifest_path", runtime="runtime", + executable_search_paths=[], optimizations={"a": "b"}, options={"c": "d"}) + def test_get_binaries(self): + self.assertIsNotNone(self.work.binaries) + for binary, binary_path in self.work.binaries.items(): + self.assertTrue(isinstance(binary_path, BinaryPath)) + + def test_get_validator(self): + self.assertIsNotNone(self.work.get_validators()) + for validator in self.work.get_validators(): + self.assertTrue(isinstance(validator, RuntimeValidator)) + def test_must_execute_actions_in_sequence(self): action_mock = Mock() + validator_mock = Mock() + validator_mock.validate = Mock() + validator_mock.validate.return_value = '/usr/bin/binary' + resolver_mock = Mock() + resolver_mock.exec_paths = ['/usr/bin/binary'] + binaries_mock = Mock() + binaries_mock.return_value = [] + + self.work.get_validators = lambda: validator_mock + self.work.get_resolvers = lambda: resolver_mock self.work.actions = [action_mock.action1, action_mock.action2, action_mock.action3] - + self.work.binaries = {"binary": BinaryPath(resolver=resolver_mock, validator=validator_mock, binary="binary")} self.work.run() self.assertEquals(action_mock.method_calls, [ call.action1.execute(), call.action2.execute(), call.action3.execute() ]) + self.assertTrue(validator_mock.validate.call_count, 1) def test_must_raise_with_no_actions(self): self.work.actions = [] @@ -193,6 +226,18 @@ def test_must_raise_if_action_crashed(self): self.assertIn("somevalueerror", str(ctx.exception)) + def test_supply_executable_path(self): + # Run workflow with supplied executable path to search for executables. + action_mock = Mock() + + self.work = self.MyWorkflow("source_dir", "artifacts_dir", "scratch_dir", "manifest_path", + runtime="runtime", + executable_search_paths=[str(pathlib.Path(os.getcwd()).parent)], + optimizations={"a": "b"}, + options={"c": "d"}) + self.work.actions = [action_mock.action1, action_mock.action2, action_mock.action3] + self.work.run() + class TestBaseWorkflow_repr(TestCase): @@ -216,6 +261,7 @@ def setUp(self): self.work = self.MyWorkflow("source_dir", "artifacts_dir", "scratch_dir", "manifest_path", runtime="runtime", + executable_search_paths=[], optimizations={"a": "b"}, options={"c": "d"}) diff --git a/tests/unit/workflows/go_dep/__init__.py b/tests/unit/workflows/go_dep/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit/workflows/go_dep/test_actions.py b/tests/unit/workflows/go_dep/test_actions.py new file mode 100644 index 000000000..fbefa8c18 --- /dev/null +++ b/tests/unit/workflows/go_dep/test_actions.py @@ -0,0 +1,69 @@ +from unittest import TestCase +from mock import patch + +from aws_lambda_builders.actions import ActionFailedError + +from aws_lambda_builders.workflows.go_dep.actions import DepEnsureAction, GoBuildAction +from aws_lambda_builders.workflows.go_dep.subproc_exec import ExecutionError + + +class TestDepEnsureAction(TestCase): + @patch("aws_lambda_builders.workflows.go_dep.subproc_exec.SubprocessExec") + def test_runs_dep_ensure(self, SubProcMock): + """ + tests the happy path of running `dep ensure` + """ + + sub_proc_dep = SubProcMock.return_value + action = DepEnsureAction("base", sub_proc_dep) + + action.execute() + + sub_proc_dep.run.assert_called_with(["ensure"], cwd="base") + + @patch("aws_lambda_builders.workflows.go_dep.subproc_exec.SubprocessExec") + def test_fails_dep_ensure(self, SubProcMock): + """ + tests failure, something being returned on stderr + """ + + sub_proc_dep = SubProcMock.return_value + sub_proc_dep.run.side_effect = ExecutionError(message="boom!") + action = DepEnsureAction("base", sub_proc_dep) + + with self.assertRaises(ActionFailedError) as raised: + action.execute() + + self.assertEqual(raised.exception.args[0], "Exec Failed: boom!") + + +class TestGoBuildAction(TestCase): + @patch("aws_lambda_builders.workflows.go_dep.subproc_exec.SubprocessExec") + def test_runs_go_build(self, SubProcMock): + """ + tests the happy path of running `dep ensure` + """ + + sub_proc_go = SubProcMock.return_value + action = GoBuildAction("base", "source", "output", sub_proc_go, env={}) + + action.execute() + + sub_proc_go.run.assert_called_with(["build", "-o", "output", "source"], + cwd="source", + env={"GOOS": "linux", "GOARCH": "amd64"}) + + @patch("aws_lambda_builders.workflows.go_dep.subproc_exec.SubprocessExec") + def test_fails_go_build(self, SubProcMock): + """ + tests failure, something being returned on stderr + """ + + sub_proc_go = SubProcMock.return_value + sub_proc_go.run.side_effect = ExecutionError(message="boom!") + action = GoBuildAction("base", "source", "output", sub_proc_go, env={}) + + with self.assertRaises(ActionFailedError) as raised: + action.execute() + + self.assertEqual(raised.exception.args[0], "Exec Failed: boom!") diff --git a/tests/unit/workflows/go_dep/test_exec.py b/tests/unit/workflows/go_dep/test_exec.py new file mode 100644 index 000000000..13a20f8aa --- /dev/null +++ b/tests/unit/workflows/go_dep/test_exec.py @@ -0,0 +1,72 @@ +from unittest import TestCase +from mock import patch + +from aws_lambda_builders.workflows.go_dep.subproc_exec import SubprocessExec, ExecutionError + + +class FakePopen: + def __init__(self, out=b"out", err=b"err", retcode=0): + self.out = out + self.err = err + self.returncode = retcode + + def communicate(self): + return self.out, self.err + + +class TestSubprocessExec(TestCase): + + @patch("aws_lambda_builders.workflows.go_dep.utils.OSUtils") + def setUp(self, OSUtilMock): + self.osutils = OSUtilMock.return_value + self.osutils.pipe = "PIPE" + self.popen = FakePopen() + self.osutils.popen.side_effect = [self.popen] + self.under_test = SubprocessExec(self.osutils, "bin") + + def test_run_executes_bin_on_nixes(self): + self.osutils.is_windows.side_effect = [False] + + self.under_test.run(["did", "thing"]) + + self.osutils.popen.assert_called_with(["bin", "did", "thing"], cwd=None, env=None, stderr="PIPE", stdout="PIPE") + + def test_uses_cwd_if_supplied(self): + self.under_test.run(["did", "thing"], cwd="/a/cwd") + + self.osutils.popen.assert_called_with(["bin", "did", "thing"], + cwd="/a/cwd", env=None, stderr="PIPE", stdout="PIPE") + + def test_uses_env_if_supplied(self): + self.under_test.run(["did", "thing"], env={"foo": "bar"}) + + self.osutils.popen.assert_called_with(["bin", "did", "thing"], + cwd=None, env={"foo": "bar"}, stderr="PIPE", stdout="PIPE") + + def test_returns_popen_out_decoded_if_retcode_is_0(self): + self.popen.out = "some encoded text\n\n" + + result = self.under_test.run(["did"]) + + self.assertEqual(result, "some encoded text") + + def test_raises_ExecutionError_with_err_text_if_retcode_is_not_0(self): + self.popen.returncode = 1 + self.popen.err = "some error text\n\n" + + with self.assertRaises(ExecutionError) as raised: + self.under_test.run(["did"]) + + self.assertEqual(raised.exception.args[0], "Exec Failed: some error text") + + def test_raises_ValueError_if_args_not_a_list(self): + with self.assertRaises(ValueError) as raised: + self.under_test.run(("pack")) + + self.assertEqual(raised.exception.args[0], "args must be a list") + + def test_raises_ValueError_if_args_empty(self): + with self.assertRaises(ValueError) as raised: + self.under_test.run([]) + + self.assertEqual(raised.exception.args[0], "requires at least one arg") diff --git a/tests/unit/workflows/go_dep/test_workflow.py b/tests/unit/workflows/go_dep/test_workflow.py new file mode 100644 index 000000000..000e666c0 --- /dev/null +++ b/tests/unit/workflows/go_dep/test_workflow.py @@ -0,0 +1,21 @@ +from unittest import TestCase + +from aws_lambda_builders.workflows.go_dep.workflow import GoDepWorkflow +from aws_lambda_builders.workflows.go_dep.actions import DepEnsureAction, GoBuildAction + + +class TestGoDepWorkflow(TestCase): + """ + The workflow requires an external tool, dep to run. It will need to be tested with integration + tests. These are just tests to provide quick feedback if anything breaks + """ + + def test_workflow_sets_up_workflow(self): + workflow = GoDepWorkflow("source", + "artifacts", + "scratch", + "manifest", + options={"artifact_executable_name": "foo"}) + self.assertEqual(len(workflow.actions), 2) + self.assertIsInstance(workflow.actions[0], DepEnsureAction) + self.assertIsInstance(workflow.actions[1], GoBuildAction) diff --git a/tests/unit/workflows/go_modules/__init__.py b/tests/unit/workflows/go_modules/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit/workflows/go_modules/test_actions.py b/tests/unit/workflows/go_modules/test_actions.py new file mode 100644 index 000000000..b126442c6 --- /dev/null +++ b/tests/unit/workflows/go_modules/test_actions.py @@ -0,0 +1,24 @@ +from unittest import TestCase +from mock import patch + +from aws_lambda_builders.actions import ActionFailedError +from aws_lambda_builders.workflows.go_modules.actions import GoModulesBuildAction +from aws_lambda_builders.workflows.go_modules.builder import BuilderError + + +class TestGoModulesBuildAction(TestCase): + @patch("aws_lambda_builders.workflows.go_modules.builder.GoModulesBuilder") + def test_runs_bundle_install(self, BuilderMock): + builder = BuilderMock.return_value + action = GoModulesBuildAction("source_dir", "output_path", builder) + action.execute() + builder.build.assert_called_with("source_dir", "output_path") + + @patch("aws_lambda_builders.workflows.go_modules.builder.GoModulesBuilder") + def test_raises_action_failed_on_failure(self, BuilderMock): + builder = BuilderMock.return_value + builder.build.side_effect = BuilderError(message="Fail") + action = GoModulesBuildAction("source_dir", "output_path", builder) + with self.assertRaises(ActionFailedError) as raised: + action.execute() + self.assertEqual(raised.exception.args[0], "Builder Failed: Fail") diff --git a/tests/unit/workflows/go_modules/test_builder.py b/tests/unit/workflows/go_modules/test_builder.py new file mode 100644 index 000000000..9e69db981 --- /dev/null +++ b/tests/unit/workflows/go_modules/test_builder.py @@ -0,0 +1,55 @@ +from unittest import TestCase + +from mock import patch, Mock + +from aws_lambda_builders.binary_path import BinaryPath +from aws_lambda_builders.workflows.go_modules.builder import GoModulesBuilder, BuilderError + + +class FakePopen: + def __init__(self, out=b'out', err=b'err', retcode=0): + self.out = out + self.err = err + self.returncode = retcode + + def communicate(self): + return self.out, self.err + + +class TestGoBuilder(TestCase): + + @patch("aws_lambda_builders.workflows.go_modules.utils.OSUtils") + def setUp(self, OSUtilMock): + self.osutils = OSUtilMock.return_value + self.osutils.pipe = 'PIPE' + self.popen = FakePopen() + self.osutils.popen.side_effect = [self.popen] + self.binaries = { + "go": BinaryPath(resolver=Mock(), validator=Mock(), + binary="go", binary_path="/path/to/go") + } + self.under_test = GoModulesBuilder(self.osutils, self.binaries) + + def test_run_executes_bundler_on_nixes(self): + self.osutils.is_windows.side_effect = [False] + self.under_test = GoModulesBuilder(self.osutils, self.binaries) + self.under_test.build("source_dir", "output_path") + self.osutils.popen.assert_called_with( + ["/path/to/go", "build", "-o", "output_path", "source_dir"], + cwd="source_dir", + env={'GOOS': 'linux', 'GOARCH': 'amd64'}, + stderr='PIPE', + stdout='PIPE', + ) + + def test_returns_popen_out_decoded_if_retcode_is_0(self): + self.popen.out = b'some encoded text\n\n' + result = self.under_test.build("source_dir", "output_path") + self.assertEqual(result, 'some encoded text') + + def test_raises_BuilderError_with_err_text_if_retcode_is_not_0(self): + self.popen.returncode = 1 + self.popen.err = b'some error text\n\n' + with self.assertRaises(BuilderError) as raised: + self.under_test.build("source_dir", "output_path") + self.assertEqual(raised.exception.args[0], "Builder Failed: some error text") diff --git a/tests/unit/workflows/go_modules/test_validator.py b/tests/unit/workflows/go_modules/test_validator.py new file mode 100644 index 000000000..9e3931de7 --- /dev/null +++ b/tests/unit/workflows/go_modules/test_validator.py @@ -0,0 +1,62 @@ +from unittest import TestCase + +import mock +from parameterized import parameterized + +from aws_lambda_builders.exceptions import MisMatchRuntimeError +from aws_lambda_builders.workflows.go_modules.validator import GoRuntimeValidator + + +class MockSubProcess(object): + + def __init__(self, returncode, out=b"", err=b""): + self.returncode = returncode + self.out = out + self.err = err + + def communicate(self): + return (self.out, self.err) + + +class TestGoRuntimeValidator(TestCase): + + def setUp(self): + self.validator = GoRuntimeValidator(runtime="go1.x") + + @parameterized.expand([ + "go1.x", + ]) + def test_supported_runtimes(self, runtime): + validator = GoRuntimeValidator(runtime=runtime) + self.assertTrue(validator.has_runtime()) + + def test_runtime_validate_unsupported_language_fail_open(self): + validator = GoRuntimeValidator(runtime="go2.x") + validator.validate(runtime_path="/usr/bin/go2") + + def test_runtime_validate_supported_version_runtime(self): + with mock.patch("subprocess.Popen") as mock_subprocess: + mock_subprocess.return_value = MockSubProcess(0, out=b"go version go1.11.2 test") + self.validator.validate(runtime_path="/usr/bin/go") + self.assertTrue(mock_subprocess.call_count, 1) + + def test_runtime_validate_mismatch_nonzero_exit(self): + with mock.patch("subprocess.Popen") as mock_subprocess: + mock_subprocess.return_value = MockSubProcess(1) + with self.assertRaises(MisMatchRuntimeError): + self.validator.validate(runtime_path="/usr/bin/go") + self.assertTrue(mock_subprocess.call_count, 1) + + def test_runtime_validate_mismatch_invalid_version(self): + with mock.patch("subprocess.Popen") as mock_subprocess: + mock_subprocess.return_value = MockSubProcess(0, out=b"go version") + with self.assertRaises(MisMatchRuntimeError): + self.validator.validate(runtime_path="/usr/bin/go") + self.assertTrue(mock_subprocess.call_count, 1) + + def test_runtime_validate_mismatch_minor_version(self): + with mock.patch("subprocess.Popen") as mock_subprocess: + mock_subprocess.return_value = MockSubProcess(0, out=b"go version go1.10.2 test") + with self.assertRaises(MisMatchRuntimeError): + self.validator.validate(runtime_path="/usr/bin/go") + self.assertTrue(mock_subprocess.call_count, 1) diff --git a/tests/unit/workflows/go_modules/test_workflow.py b/tests/unit/workflows/go_modules/test_workflow.py new file mode 100644 index 000000000..6a85df8bb --- /dev/null +++ b/tests/unit/workflows/go_modules/test_workflow.py @@ -0,0 +1,19 @@ +from unittest import TestCase + +from aws_lambda_builders.workflows.go_modules.workflow import GoModulesWorkflow +from aws_lambda_builders.workflows.go_modules.actions import GoModulesBuildAction + + +class TestGoModulesWorkflow(TestCase): + """ + the workflow requires an external utility (builder) to run, so it is extensively tested in integration tests. + this is just a quick wiring test to provide fast feedback if things are badly broken + """ + + def test_workflow_sets_up_builder_actions(self): + workflow = GoModulesWorkflow( + "source", "artifacts", "scratch_dir", "manifest", + runtime="go1.x", + options={"artifact_executable_name": "main"}) + self.assertEqual(len(workflow.actions), 1) + self.assertIsInstance(workflow.actions[0], GoModulesBuildAction) diff --git a/tests/unit/workflows/java_gradle/__init__.py b/tests/unit/workflows/java_gradle/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit/workflows/java_gradle/test_actions.py b/tests/unit/workflows/java_gradle/test_actions.py new file mode 100644 index 000000000..f69a97075 --- /dev/null +++ b/tests/unit/workflows/java_gradle/test_actions.py @@ -0,0 +1,103 @@ +from unittest import TestCase +from mock import patch +import os + +from aws_lambda_builders.actions import ActionFailedError +from aws_lambda_builders.workflows.java_gradle.actions import JavaGradleBuildAction, JavaGradleCopyArtifactsAction, \ + GradleExecutionError + + +class TestJavaGradleBuildAction(TestCase): + + @patch("aws_lambda_builders.workflows.java_gradle.utils.OSUtils") + @patch("aws_lambda_builders.workflows.java_gradle.gradle.SubprocessGradle") + def setUp(self, MockSubprocessGradle, MockOSUtils): + self.subprocess_gradle = MockSubprocessGradle.return_value + self.os_utils = MockOSUtils.return_value + self.os_utils.copy.side_effect = lambda src, dst: dst + self.source_dir = os.path.join('source_dir') + self.manifest_path = os.path.join(self.source_dir, 'manifest') + self.artifacts_dir = os.path.join('artifacts_dir') + self.scratch_dir = os.path.join('scratch_dir') + + def test_calls_gradle_build(self): + action = JavaGradleBuildAction(self.source_dir, + self.manifest_path, + self.subprocess_gradle, + self.scratch_dir, + self.os_utils) + action.execute() + self.subprocess_gradle.build.assert_called_with(self.source_dir, + self.manifest_path, + os.path.join(self.scratch_dir, + JavaGradleBuildAction.GRADLE_CACHE_DIR_NAME), + os.path.join(self.scratch_dir, + JavaGradleBuildAction.INIT_SCRIPT), + {JavaGradleBuildAction.SCRATCH_DIR_PROPERTY: os.path.abspath( + self.scratch_dir)}) + + def test_error_in_init_file_copy_raises_action_error(self): + self.os_utils.copy.side_effect = Exception("Copy failed!") + action = JavaGradleBuildAction(self.source_dir, + self.manifest_path, + self.subprocess_gradle, + self.scratch_dir, + self.os_utils) + with self.assertRaises(ActionFailedError) as raised: + action.execute() + self.assertEquals(raised.exception.args[0], "Copy failed!") + + def test_error_building_project_raises_action_error(self): + self.subprocess_gradle.build.side_effect = GradleExecutionError(message='Build failed!') + action = JavaGradleBuildAction(self.source_dir, + self.manifest_path, + self.subprocess_gradle, + self.scratch_dir, + self.os_utils) + with self.assertRaises(ActionFailedError) as raised: + action.execute() + self.assertEquals(raised.exception.args[0], 'Gradle Failed: Build failed!') + + def test_computes_correct_cache_dir(self): + action = JavaGradleBuildAction(self.source_dir, + self.manifest_path, + self.subprocess_gradle, + self.scratch_dir, + self.os_utils) + self.assertEquals(action.gradle_cache_dir, + os.path.join(self.scratch_dir, JavaGradleBuildAction.GRADLE_CACHE_DIR_NAME)) + + +class TestJavaGradleCopyArtifactsAction(TestCase): + + @patch("aws_lambda_builders.workflows.java_gradle.utils.OSUtils") + def setUp(self, MockOSUtils): + self.os_utils = MockOSUtils.return_value + self.os_utils.copy.side_effect = lambda src, dst: dst + self.source_dir = "source_dir" + self.artifacts_dir = "artifacts_dir" + self.scratch_dir = "scratch_dir" + self.build_dir = os.path.join(self.scratch_dir, 'build1') + + def test_copies_artifacts(self): + self.os_utils.copytree.side_effect = lambda src, dst: None + self.os_utils.copy.side_effect = lambda src, dst: None + + action = JavaGradleCopyArtifactsAction(self.source_dir, + self.artifacts_dir, + self.build_dir, + self.os_utils) + action.execute() + + self.os_utils.copytree.assert_called_with( + os.path.join(self.build_dir, 'build', 'distributions', 'lambda-build'), self.artifacts_dir) + + def test_error_in_artifact_copy_raises_action_error(self): + self.os_utils.copytree.side_effect = Exception("scandir failed!") + action = JavaGradleCopyArtifactsAction(self.source_dir, + self.artifacts_dir, + self.build_dir, + self.os_utils) + with self.assertRaises(ActionFailedError) as raised: + action.execute() + self.assertEquals(raised.exception.args[0], "scandir failed!") diff --git a/tests/unit/workflows/java_gradle/test_gradle.py b/tests/unit/workflows/java_gradle/test_gradle.py new file mode 100644 index 000000000..b4376cafc --- /dev/null +++ b/tests/unit/workflows/java_gradle/test_gradle.py @@ -0,0 +1,93 @@ +import subprocess + +from unittest import TestCase +from mock import patch + +from aws_lambda_builders.binary_path import BinaryPath +from aws_lambda_builders.workflows.java_gradle.gradle import SubprocessGradle, GradleExecutionError, \ + BuildFileNotFoundError + + +class FakePopen: + def __init__(self, out=b'out', err=b'err', retcode=0): + self.out = out + self.err = err + self.returncode = retcode + + def communicate(self): + return self.out, self.err + + def wait(self): + pass + + +class TestSubprocessGradle(TestCase): + + @patch("aws_lambda_builders.workflows.java_gradle.utils.OSUtils") + def setUp(self, MockOSUtils): + self.os_utils = MockOSUtils.return_value + self.os_utils.exists.side_effect = lambda d: True + self.popen = FakePopen() + self.os_utils.popen.side_effect = [self.popen] + self.gradle_path = '/path/to/gradle' + self.gradle_binary = BinaryPath(None, None, 'gradle', binary_path=self.gradle_path) + self.source_dir = '/foo/bar/baz' + self.manifest_path = '/foo/bar/baz/build.gradle' + self.init_script = '/path/to/init' + + def test_no_os_utils_build_init_throws(self): + with self.assertRaises(ValueError) as err_assert: + SubprocessGradle(gradle_binary=self.gradle_binary) + self.assertEquals(err_assert.exception.args[0], 'Must provide OSUtils') + + def test_no_gradle_exec_init_throws(self): + with self.assertRaises(ValueError) as err_assert: + SubprocessGradle(None) + self.assertEquals(err_assert.exception.args[0], 'Must provide Gradle BinaryPath') + + def test_no_build_file_throws(self): + self.os_utils.exists.side_effect = lambda d: False + gradle = SubprocessGradle(gradle_binary=self.gradle_binary, os_utils=self.os_utils) + with self.assertRaises(BuildFileNotFoundError) as raised: + gradle.build(self.source_dir, self.manifest_path) + self.assertEquals(raised.exception.args[0], + 'Gradle Failed: Gradle build file not found: %s' % self.manifest_path) + + def test_build_no_init_script(self): + gradle = SubprocessGradle(gradle_binary=self.gradle_binary, os_utils=self.os_utils) + gradle.build(self.source_dir, self.manifest_path) + self.os_utils.popen.assert_called_with([self.gradle_path, 'build', '--build-file', self.manifest_path], + cwd=self.source_dir, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE) + + def test_gradlew_path_is_dummy_uses_gradle_binary(self): + gradle = SubprocessGradle(gradle_binary=self.gradle_binary, os_utils=self.os_utils) + gradle.build(self.source_dir, self.manifest_path) + self.os_utils.popen.assert_called_with([self.gradle_path, 'build', '--build-file', self.manifest_path], + cwd=self.source_dir, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE) + + def test_build_with_init_script(self): + gradle = SubprocessGradle(gradle_binary=self.gradle_binary, os_utils=self.os_utils) + gradle.build(self.source_dir, self.manifest_path, init_script_path=self.init_script) + self.os_utils.popen.assert_called_with( + [self.gradle_path, 'build', '--build-file', self.manifest_path, '--init-script', self.init_script], + cwd=self.source_dir, stderr=subprocess.PIPE, stdout=subprocess.PIPE) + + def test_raises_exception_if_retcode_not_0(self): + self.popen = FakePopen(retcode=1, err=b'Some Error Message') + self.os_utils.popen.side_effect = [self.popen] + gradle = SubprocessGradle(gradle_binary=self.gradle_binary, os_utils=self.os_utils) + with self.assertRaises(GradleExecutionError) as err: + gradle.build(self.source_dir, self.manifest_path) + self.assertEquals(err.exception.args[0], 'Gradle Failed: Some Error Message') + + def test_includes_build_properties_in_command(self): + gradle = SubprocessGradle(gradle_binary=self.gradle_binary, os_utils=self.os_utils) + gradle.build(self.source_dir, self.manifest_path, init_script_path=self.init_script, properties={'foo': 'bar'}) + self.os_utils.popen.assert_called_with( + [self.gradle_path, 'build', '--build-file', self.manifest_path, '-Dfoo=bar', '--init-script', + self.init_script], + cwd=self.source_dir, stderr=subprocess.PIPE, stdout=subprocess.PIPE) diff --git a/tests/unit/workflows/java_gradle/test_gradle_validator.py b/tests/unit/workflows/java_gradle/test_gradle_validator.py new file mode 100644 index 000000000..cdadf52fa --- /dev/null +++ b/tests/unit/workflows/java_gradle/test_gradle_validator.py @@ -0,0 +1,75 @@ +from unittest import TestCase + +from mock import patch, Mock +from parameterized import parameterized +from aws_lambda_builders.workflows.java_gradle.gradle_validator import GradleValidator + + +class FakePopen(object): + def __init__(self, stdout=None, stderr=None, returncode=0): + self._stdout = stdout + self._stderr = stderr + self._returncode = returncode + + def communicate(self): + return self._stdout, self._stderr + + @property + def returncode(self): + return self._returncode + + +class TestGradleBinaryValidator(TestCase): + + @patch("aws_lambda_builders.workflows.java_gradle.utils.OSUtils") + def setUp(self, MockOSUtils): + self.mock_os_utils = MockOSUtils.return_value + self.mock_log = Mock() + self.gradle_path = '/path/to/gradle' + + @parameterized.expand([ + '1.7.0', + '1.8.9', + '11.0.0' + ]) + def test_accepts_any_jvm_mv(self, version): + version_string = ('JVM: %s' % version).encode() + self.mock_os_utils.popen.side_effect = [FakePopen(stdout=version_string)] + validator = GradleValidator(os_utils=self.mock_os_utils) + self.assertTrue(validator.validate(gradle_path=self.gradle_path)) + self.assertEqual(validator.validated_binary_path, self.gradle_path) + + def test_emits_warning_when_jvm_mv_greater_than_8(self): + version_string = 'JVM: 9.0.0'.encode() + self.mock_os_utils.popen.side_effect = [FakePopen(stdout=version_string)] + validator = GradleValidator(os_utils=self.mock_os_utils, log=self.mock_log) + self.assertTrue(validator.validate(gradle_path=self.gradle_path)) + self.assertEqual(validator.validated_binary_path, self.gradle_path) + self.mock_log.warning.assert_called_with(GradleValidator.MAJOR_VERSION_WARNING, self.gradle_path, '9') + + @parameterized.expand([ + '1.6.0', + '1.7.0', + '1.8.9' + ]) + def test_does_not_emit_warning_when_jvm_mv_8_or_less(self, version): + version_string = ('JVM: %s' % version).encode() + self.mock_os_utils.popen.side_effect = [FakePopen(stdout=version_string)] + validator = GradleValidator(os_utils=self.mock_os_utils, log=self.mock_log) + self.assertTrue(validator.validate(gradle_path=self.gradle_path)) + self.assertEqual(validator.validated_binary_path, self.gradle_path) + self.mock_log.warning.assert_not_called() + + def test_emits_warning_when_gradle_excutable_fails(self): + version_string = 'JVM: 9.0.0'.encode() + self.mock_os_utils.popen.side_effect = [FakePopen(stdout=version_string, returncode=1)] + validator = GradleValidator(os_utils=self.mock_os_utils, log=self.mock_log) + validator.validate(gradle_path=self.gradle_path) + self.mock_log.warning.assert_called_with(GradleValidator.VERSION_STRING_WARNING, self.gradle_path) + + def test_emits_warning_when_version_string_not_found(self): + version_string = 'The Java Version: 9.0.0'.encode() + self.mock_os_utils.popen.side_effect = [FakePopen(stdout=version_string, returncode=0)] + validator = GradleValidator(os_utils=self.mock_os_utils, log=self.mock_log) + validator.validate(gradle_path=self.gradle_path) + self.mock_log.warning.assert_called_with(GradleValidator.VERSION_STRING_WARNING, self.gradle_path) diff --git a/tests/unit/workflows/java_gradle/test_gradlew_resolver.py b/tests/unit/workflows/java_gradle/test_gradlew_resolver.py new file mode 100644 index 000000000..10b784c82 --- /dev/null +++ b/tests/unit/workflows/java_gradle/test_gradlew_resolver.py @@ -0,0 +1,44 @@ +from unittest import TestCase + +from mock import patch +from parameterized import parameterized +from aws_lambda_builders.workflows.java_gradle.gradle_resolver import GradleResolver + + +class TestGradleResolver(TestCase): + + @patch("aws_lambda_builders.workflows.java_gradle.utils.OSUtils") + def setUp(self, MockOSUtils): + self.mock_os_utils = MockOSUtils.return_value + self.mock_os_utils.is_windows.side_effect = [False] + + def test_gradlew_exists_returns_gradlew(self): + gradlew_path = '/path/to/gradlew' + self.mock_os_utils.which.side_effect = lambda executable, executable_search_paths: [gradlew_path] + + resolver = GradleResolver(os_utils=self.mock_os_utils) + self.assertEquals(resolver.exec_paths, [gradlew_path]) + + def test_gradlew_not_exists_returns_gradle_on_path(self): + gradle_path = '/path/to/gradle' + self.mock_os_utils.which.side_effect = lambda executable, executable_search_paths: \ + [] if executable == 'gradlew' else [gradle_path] + + resolver = GradleResolver(os_utils=self.mock_os_utils) + self.assertEquals(resolver.exec_paths, [gradle_path]) + + def test_throws_value_error_if_no_exec_found(self): + self.mock_os_utils.which.side_effect = lambda executable, executable_search_paths: [] + resolver = GradleResolver(os_utils=self.mock_os_utils) + with self.assertRaises(ValueError) as raised: + resolver.exec_paths() + self.assertEquals(raised.exception.args[0], 'No Gradle executable found!') + + @parameterized.expand([ + [True, 'gradlew.bat'], + [False, 'gradlew'] + ]) + def test_uses_correct_gradlew_name(self, is_windows, expected_wrapper_name): + self.mock_os_utils.is_windows.side_effect = [is_windows] + resolver = GradleResolver(os_utils=self.mock_os_utils) + self.assertEquals(resolver.wrapper_name, expected_wrapper_name) diff --git a/tests/unit/workflows/java_gradle/test_workflow.py b/tests/unit/workflows/java_gradle/test_workflow.py new file mode 100644 index 000000000..1a17da9d5 --- /dev/null +++ b/tests/unit/workflows/java_gradle/test_workflow.py @@ -0,0 +1,50 @@ +from unittest import TestCase + +import hashlib +import os +from aws_lambda_builders.workflows.java_gradle.workflow import JavaGradleWorkflow +from aws_lambda_builders.workflows.java_gradle.actions import JavaGradleBuildAction, JavaGradleCopyArtifactsAction +from aws_lambda_builders.workflows.java_gradle.gradle_resolver import GradleResolver +from aws_lambda_builders.workflows.java_gradle.gradle_validator import GradleValidator + + +class TestJavaGradleWorkflow(TestCase): + """ + the workflow requires an external utility (gradle) to run, so it is extensively tested in integration tests. + this is just a quick wiring test to provide fast feedback if things are badly broken + """ + + def test_workflow_sets_up_gradle_actions(self): + workflow = JavaGradleWorkflow("source", "artifacts", "scratch_dir", "manifest") + + self.assertEqual(len(workflow.actions), 2) + + self.assertIsInstance(workflow.actions[0], JavaGradleBuildAction) + + self.assertIsInstance(workflow.actions[1], JavaGradleCopyArtifactsAction) + + def test_workflow_sets_up_resolvers(self): + workflow = JavaGradleWorkflow("source", "artifacts", "scratch_dir", "manifest") + + resolvers = workflow.get_resolvers() + self.assertEqual(len(resolvers), 1) + + self.assertIsInstance(resolvers[0], GradleResolver) + + def test_workflow_sets_up_validators(self): + workflow = JavaGradleWorkflow("source", "artifacts", "scratch_dir", "manifest") + + validators = workflow.get_validators() + self.assertEqual(len(validators), 1) + + self.assertIsInstance(validators[0], GradleValidator) + + def test_computes_correct_build_dir(self): + workflow = JavaGradleWorkflow("source", "artifacts", "scratch_dir", "manifest") + + sha1 = hashlib.sha1() + sha1.update(os.path.abspath(workflow.source_dir).encode('utf8')) + + expected_build_dir = os.path.join(workflow.scratch_dir, sha1.hexdigest()) + + self.assertEqual(expected_build_dir, workflow.build_output_dir) diff --git a/tests/unit/workflows/nodejs_npm/test_actions.py b/tests/unit/workflows/nodejs_npm/test_actions.py index f6d0a46fb..612ccb189 100644 --- a/tests/unit/workflows/nodejs_npm/test_actions.py +++ b/tests/unit/workflows/nodejs_npm/test_actions.py @@ -2,7 +2,8 @@ from mock import patch from aws_lambda_builders.actions import ActionFailedError -from aws_lambda_builders.workflows.nodejs_npm.actions import NodejsNpmPackAction, NodejsNpmInstallAction +from aws_lambda_builders.workflows.nodejs_npm.actions import \ + NodejsNpmPackAction, NodejsNpmInstallAction, NodejsNpmrcCopyAction, NodejsNpmrcCleanUpAction from aws_lambda_builders.workflows.nodejs_npm.npm import NpmExecutionError @@ -78,3 +79,77 @@ def test_raises_action_failed_when_npm_fails(self, SubprocessNpmMock): action.execute() self.assertEqual(raised.exception.args[0], "NPM Failed: boom!") + + +class TestNodejsNpmrcCopyAction(TestCase): + + @patch("aws_lambda_builders.workflows.nodejs_npm.utils.OSUtils") + def test_copies_npmrc_into_a_project(self, OSUtilMock): + osutils = OSUtilMock.return_value + osutils.joinpath.side_effect = lambda a, b: "{}/{}".format(a, b) + + action = NodejsNpmrcCopyAction("artifacts", + "source", + osutils=osutils) + osutils.file_exists.side_effect = [True] + action.execute() + + osutils.file_exists.assert_called_with("source/.npmrc") + osutils.copy_file.assert_called_with("source/.npmrc", "artifacts") + + @patch("aws_lambda_builders.workflows.nodejs_npm.utils.OSUtils") + def test_skips_copying_npmrc_into_a_project_if_npmrc_doesnt_exist(self, OSUtilMock): + osutils = OSUtilMock.return_value + osutils.joinpath.side_effect = lambda a, b: "{}/{}".format(a, b) + + action = NodejsNpmrcCopyAction("artifacts", + "source", + osutils=osutils) + osutils.file_exists.side_effect = [False] + action.execute() + + osutils.file_exists.assert_called_with("source/.npmrc") + osutils.copy_file.assert_not_called() + + @patch("aws_lambda_builders.workflows.nodejs_npm.utils.OSUtils") + def test_raises_action_failed_when_copying_fails(self, OSUtilMock): + osutils = OSUtilMock.return_value + osutils.joinpath.side_effect = lambda a, b: "{}/{}".format(a, b) + + osutils.copy_file.side_effect = OSError() + + action = NodejsNpmrcCopyAction("artifacts", + "source", + osutils=osutils) + + with self.assertRaises(ActionFailedError): + action.execute() + + +class TestNodejsNpmrcCleanUpAction(TestCase): + + @patch("aws_lambda_builders.workflows.nodejs_npm.utils.OSUtils") + def test_removes_npmrc_if_npmrc_exists(self, OSUtilMock): + osutils = OSUtilMock.return_value + osutils.joinpath.side_effect = lambda a, b: "{}/{}".format(a, b) + + action = NodejsNpmrcCleanUpAction( + "artifacts", + osutils=osutils) + osutils.file_exists.side_effect = [True] + action.execute() + + osutils.remove_file.assert_called_with("artifacts/.npmrc") + + @patch("aws_lambda_builders.workflows.nodejs_npm.utils.OSUtils") + def test_skips_npmrc_removal_if_npmrc_doesnt_exist(self, OSUtilMock): + osutils = OSUtilMock.return_value + osutils.joinpath.side_effect = lambda a, b: "{}/{}".format(a, b) + + action = NodejsNpmrcCleanUpAction( + "artifacts", + osutils=osutils) + osutils.file_exists.side_effect = [False] + action.execute() + + osutils.remove_file.assert_not_called() diff --git a/tests/unit/workflows/nodejs_npm/test_workflow.py b/tests/unit/workflows/nodejs_npm/test_workflow.py index 1d8e0d63c..c2fe05be3 100644 --- a/tests/unit/workflows/nodejs_npm/test_workflow.py +++ b/tests/unit/workflows/nodejs_npm/test_workflow.py @@ -2,7 +2,8 @@ from aws_lambda_builders.actions import CopySourceAction from aws_lambda_builders.workflows.nodejs_npm.workflow import NodejsNpmWorkflow -from aws_lambda_builders.workflows.nodejs_npm.actions import NodejsNpmPackAction, NodejsNpmInstallAction +from aws_lambda_builders.workflows.nodejs_npm.actions import \ + NodejsNpmPackAction, NodejsNpmInstallAction, NodejsNpmrcCopyAction, NodejsNpmrcCleanUpAction class TestNodejsNpmWorkflow(TestCase): @@ -16,10 +17,14 @@ def test_workflow_sets_up_npm_actions(self): workflow = NodejsNpmWorkflow("source", "artifacts", "scratch_dir", "manifest") - self.assertEqual(len(workflow.actions), 3) + self.assertEqual(len(workflow.actions), 5) self.assertIsInstance(workflow.actions[0], NodejsNpmPackAction) - self.assertIsInstance(workflow.actions[1], CopySourceAction) + self.assertIsInstance(workflow.actions[1], NodejsNpmrcCopyAction) - self.assertIsInstance(workflow.actions[2], NodejsNpmInstallAction) + self.assertIsInstance(workflow.actions[2], CopySourceAction) + + self.assertIsInstance(workflow.actions[3], NodejsNpmInstallAction) + + self.assertIsInstance(workflow.actions[4], NodejsNpmrcCleanUpAction) diff --git a/tests/unit/workflows/python_pip/test_actions.py b/tests/unit/workflows/python_pip/test_actions.py index 1f691efff..c2ed21c2b 100644 --- a/tests/unit/workflows/python_pip/test_actions.py +++ b/tests/unit/workflows/python_pip/test_actions.py @@ -1,8 +1,10 @@ +import sys from unittest import TestCase -from mock import patch +from mock import patch, Mock from aws_lambda_builders.actions import ActionFailedError +from aws_lambda_builders.binary_path import BinaryPath from aws_lambda_builders.workflows.python_pip.actions import PythonPipBuildAction from aws_lambda_builders.workflows.python_pip.packager import PackagerError @@ -15,7 +17,11 @@ def test_action_must_call_builder(self, PythonPipDependencyBuilderMock): builder_instance = PythonPipDependencyBuilderMock.return_value action = PythonPipBuildAction("artifacts", "scratch_dir", - "manifest", "runtime") + "manifest", "runtime", + { + "python": BinaryPath(resolver=Mock(), validator=Mock(), + binary="python", binary_path=sys.executable) + }) action.execute() builder_instance.build_dependencies.assert_called_with("artifacts", @@ -28,7 +34,11 @@ def test_must_raise_exception_on_failure(self, PythonPipDependencyBuilderMock): builder_instance.build_dependencies.side_effect = PackagerError() action = PythonPipBuildAction("artifacts", "scratch_dir", - "manifest", "runtime") + "manifest", "runtime", + { + "python": BinaryPath(resolver=Mock(), validator=Mock(), + binary="python", binary_path=sys.executable) + }) with self.assertRaises(ActionFailedError): action.execute() diff --git a/tests/unit/workflows/python_pip/test_packager.py b/tests/unit/workflows/python_pip/test_packager.py index 228735e33..e1d58dd74 100644 --- a/tests/unit/workflows/python_pip/test_packager.py +++ b/tests/unit/workflows/python_pip/test_packager.py @@ -1,3 +1,4 @@ +import sys from collections import namedtuple import mock @@ -47,7 +48,9 @@ def calls(self): def pip_factory(): def create_pip_runner(osutils=None): pip = FakePip() - pip_runner = PipRunner(pip, osutils=osutils) + pip_runner = PipRunner(python_exe=sys.executable, + pip=pip, + osutils=osutils) return pip, pip_runner return create_pip_runner diff --git a/tests/unit/test_runtime.py b/tests/unit/workflows/python_pip/test_validator.py similarity index 50% rename from tests/unit/test_runtime.py rename to tests/unit/workflows/python_pip/test_validator.py index 6f9e01168..6047ab299 100644 --- a/tests/unit/test_runtime.py +++ b/tests/unit/workflows/python_pip/test_validator.py @@ -1,10 +1,10 @@ from unittest import TestCase import mock +from parameterized import parameterized from aws_lambda_builders.exceptions import MisMatchRuntimeError -from aws_lambda_builders.validate import validate_python_cmd -from aws_lambda_builders.validate import RuntimeValidator +from aws_lambda_builders.workflows.python_pip.validator import PythonRuntimeValidator class MockSubProcess(object): @@ -13,38 +13,43 @@ def __init__(self, returncode): self.returncode = returncode def communicate(self): - return b'python3,6', None + pass -class TestRuntime(TestCase): +class TestPythonRuntimeValidator(TestCase): - def test_supported_runtimes(self): - self.assertTrue(RuntimeValidator.has_runtime("python2.7")) - self.assertTrue(RuntimeValidator.has_runtime("python3.6")) - self.assertFalse(RuntimeValidator.has_runtime("test_language")) + def setUp(self): + self.validator = PythonRuntimeValidator(runtime='python3.7') - def test_runtime_validate_unsupported_language_fail_open(self): - RuntimeValidator.validate_runtime("test_language", "test_language2.7") + @parameterized.expand([ + "python2.7", + "python3.6", + "python3.7" + ]) + def test_supported_runtimes(self, runtime): + validator = PythonRuntimeValidator(runtime=runtime) + self.assertTrue(validator.has_runtime()) - def test_runtime_validate_unsupported_runtime_version_fail_open(self): - RuntimeValidator.validate_runtime("python", "python2.8") + def test_runtime_validate_unsupported_language_fail_open(self): + validator = PythonRuntimeValidator(runtime='python2.6') + validator.validate(runtime_path='/usr/bin/python2.6') def test_runtime_validate_supported_version_runtime(self): with mock.patch('subprocess.Popen') as mock_subprocess: mock_subprocess.return_value = MockSubProcess(0) - RuntimeValidator.validate_runtime("python", "python3.6") + self.validator.validate(runtime_path='/usr/bin/python3.7') self.assertTrue(mock_subprocess.call_count, 1) def test_runtime_validate_mismatch_version_runtime(self): with mock.patch('subprocess.Popen') as mock_subprocess: mock_subprocess.return_value = MockSubProcess(1) with self.assertRaises(MisMatchRuntimeError): - RuntimeValidator.validate_runtime("python", "python2.7") + self.validator.validate(runtime_path='/usr/bin/python3.6') self.assertTrue(mock_subprocess.call_count, 1) def test_python_command(self): - cmd = validate_python_cmd("python", "python2.7") - version_strings = ["sys.stdout.write", "sys.version_info.major == 2", + cmd = self.validator._validate_python_cmd(runtime_path='/usr/bin/python3.7') + version_strings = ["sys.version_info.major == 3", "sys.version_info.minor == 7"] for version_string in version_strings: - self.assertTrue(any([part for part in cmd if version_string in part])) + self.assertTrue(all([part for part in cmd if version_string in part])) diff --git a/tests/unit/workflows/python_pip/test_workflow.py b/tests/unit/workflows/python_pip/test_workflow.py new file mode 100644 index 000000000..aec99e28b --- /dev/null +++ b/tests/unit/workflows/python_pip/test_workflow.py @@ -0,0 +1,20 @@ +from unittest import TestCase + +from aws_lambda_builders.actions import CopySourceAction +from aws_lambda_builders.workflows.python_pip.validator import PythonRuntimeValidator +from aws_lambda_builders.workflows.python_pip.workflow import PythonPipBuildAction, PythonPipWorkflow + + +class TestPythonPipWorkflow(TestCase): + + def setUp(self): + self.workflow = PythonPipWorkflow("source", "artifacts", "scratch_dir", "manifest", runtime="python3.7") + + def test_workflow_sets_up_actions(self): + self.assertEqual(len(self.workflow.actions), 2) + self.assertIsInstance(self.workflow.actions[0], PythonPipBuildAction) + self.assertIsInstance(self.workflow.actions[1], CopySourceAction) + + def test_workflow_validator(self): + for validator in self.workflow.get_validators(): + self.assertTrue(isinstance(validator, PythonRuntimeValidator))