diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b2ad09df46..b253a6c206 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -104,8 +104,9 @@ jobs: 3.7 3.8 3.9 + 3.10 ${{ matrix.python }} - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v4 with: go-version: '1.19' - uses: ruby/setup-ruby@v1 diff --git a/appveyor-ubuntu.yml b/appveyor-ubuntu.yml index 3f31f11be7..67bc3fb189 100644 --- a/appveyor-ubuntu.yml +++ b/appveyor-ubuntu.yml @@ -35,6 +35,7 @@ environment: NOSE_PARAMETERIZED_NO_WARN: 1 INSTALL_PY_38_PIP: 1 INSTALL_PY_39_PIP: 1 + INSTALL_PY_310_PIP: 1 APPVEYOR_CONSOLE_DISABLE_PTY: true - PYTHON_HOME: "C:\\Python38-x64" @@ -44,6 +45,7 @@ environment: NOSE_PARAMETERIZED_NO_WARN: 1 INSTALL_PY_37_PIP: 1 INSTALL_PY_39_PIP: 1 + INSTALL_PY_310_PIP: 1 APPVEYOR_CONSOLE_DISABLE_PTY: true - PYTHON_HOME: "C:\\Python39-x64" @@ -53,6 +55,7 @@ environment: NOSE_PARAMETERIZED_NO_WARN: 1 INSTALL_PY_37_PIP: 1 INSTALL_PY_38_PIP: 1 + INSTALL_PY_310_PIP: 1 APPVEYOR_CONSOLE_DISABLE_PTY: true install: @@ -87,17 +90,20 @@ install: - sh: "sudo apt-get -y install python3.7" - sh: "sudo apt-get -y install python3.8" - sh: "sudo apt-get -y install python3.9 python3.9-dev python3.9-venv" + - sh: "sudo apt-get -y install python3.10 python3.10-dev python3.10-venv" - sh: "which python3.8" - sh: "which python3.7" - sh: "which python3.9" + - sh: "which python3.10" - - sh: "PATH=$PATH:/usr/bin/python3.9:/usr/bin/python3.8:/usr/bin/python3.7" + - sh: "PATH=$PATH:/usr/bin/python3.9:/usr/bin/python3.8:/usr/bin/python3.7:/usr/bin/python3.10" - sh: "curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py" - sh: "curl https://bootstrap.pypa.io/pip/3.6/get-pip.py -o get-pip-36.py" - sh: "sudo apt-get -y install python3-distutils" - sh: "sudo apt-get -y install python3.9-distutils" + - ps: "If ($env:INSTALL_PY_310_PIP) {python3.10 get-pip.py --user}" - ps: "If ($env:INSTALL_PY_39_PIP) {python3.9 get-pip.py --user}" - ps: "If ($env:INSTALL_PY_38_PIP) {python3.8 get-pip.py --user}" - ps: "If ($env:INSTALL_PY_37_PIP) {python3.7 get-pip.py --user}" diff --git a/appveyor-windows.yml b/appveyor-windows.yml index 87a0108493..44504c9593 100644 --- a/appveyor-windows.yml +++ b/appveyor-windows.yml @@ -69,7 +69,7 @@ install: # Make sure the temp directory exists for Python to use. - ps: "mkdir -Force C:\\tmp" - - 'set PATH=%PYTHON_HOME%;C:\Ruby27-x64\bin;%PATH%;C:\Python37-x64;C:\Python39-x64' + - 'set PATH=%PYTHON_HOME%;C:\Ruby27-x64\bin;%PATH%;C:\Python37-x64;C:\Python39-x64;C:\Python310-x64' - "echo %PYTHON_HOME%" - "echo %PATH%" - "python --version" @@ -84,6 +84,7 @@ install: # Install pip for the python versions which is used by the tests - "C:\\Python37-x64\\python.exe -m pip install --upgrade pip" - "C:\\Python39-x64\\python.exe -m pip install --upgrade pip" + - "C:\\Python310-x64\\python.exe -m pip install --upgrade pip" # Install AWS CLI Globally via pip3 - "pip install awscli" diff --git a/requirements/base.txt b/requirements/base.txt index bd168980d2..ef73ebfe86 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -10,10 +10,10 @@ cookiecutter~=2.1.1 aws-sam-translator==1.62.0 #docker minor version updates can include breaking changes. Auto update micro version only. docker~=4.2.0 -dateparser~=1.0 +dateparser~=1.1 requests==2.28.2 serverlessrepo==0.1.10 -aws_lambda_builders==1.27.0 +aws_lambda_builders==1.28.0 tomlkit==0.11.6 watchdog==2.1.2 pyopenssl==23.0.0 diff --git a/requirements/dev.txt b/requirements/dev.txt index c50f513dd1..5da6863837 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -23,7 +23,7 @@ pytest-forked==1.6.0 pytest-timeout==2.1.0 pytest-rerunfailures==11.1.2 pytest-json-report==1.5.0 -filelock==3.9.0 +filelock==3.10.0 # formatter black==22.6.0 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 5ce3a37311..4fcb903125 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -15,9 +15,9 @@ attrs==20.3.0 \ # jschema-to-python # jsonschema # sarif-om -aws-lambda-builders==1.27.0 \ - --hash=sha256:8019d5d5d7de32b159e33ff447672308fc20dd58bc7452f82af15f6667037e1f \ - --hash=sha256:ca4374209755af250ae5c9b6e7c5a315ec39f41e7b901af050d242e1eaeedf27 +aws-lambda-builders==1.28.0 \ + --hash=sha256:6ea2fb607057436f03e2a8a857b5c5cbd18f7b2b907c53c2b461e65f843a4f38 \ + --hash=sha256:bd6566772e7c5d887d05f32cf7e61a57293658388eef4fe8301f65bef432fe39 # via aws-sam-cli (setup.py) aws-sam-translator==1.62.0 \ --hash=sha256:2db24633fbc76b8e6eb76adaf0c1001a0d749288af91d85e7d9007e3b05479fa \ @@ -47,16 +47,16 @@ binaryornot==0.4.4 \ --hash=sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061 \ --hash=sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4 # via cookiecutter -boto3==1.21.30 \ - --hash=sha256:ef210f8e85cdb6d26a38ebad1cfe9cefdef2ab269207e5987653555375a7ef6b \ - --hash=sha256:f0af8f4ef5fe6353c794cd3cce627d469a618b58ace7ca75a63cfd719df615ce +boto3==1.26.94 \ + --hash=sha256:619022059e255731f33cd9fe083b8fd62406efcbc07dc15660037bcaa1ba1255 \ + --hash=sha256:9f156f4da4b0a15924196e1a8e3439d1b99cd4a463588e4bb103d1cfaf5618fa # via # aws-sam-cli (setup.py) # aws-sam-translator # serverlessrepo -botocore==1.24.30 \ - --hash=sha256:af4bdc51eeecbe9fdcdadbed9ad58c5c91380ef30f3560022bbc2ee1d78f0ad6 \ - --hash=sha256:c622751093e3d0bf61343e66d6d06190ef30bf42b1557d5070ca84e9efa06d4b +botocore==1.29.97 \ + --hash=sha256:0df677eb2bef3ba18ac69e007633559b4426df310eee99df9882437b5faf498a \ + --hash=sha256:176740221714c0f031c2cd773879df096dbc0f977c63b3e2ed6a956205f02e82 # via # boto3 # s3transfer @@ -230,32 +230,34 @@ cookiecutter==2.1.1 \ --hash=sha256:9f3ab027cec4f70916e28f03470bdb41e637a3ad354b4d65c765d93aad160022 \ --hash=sha256:f3982be8d9c53dac1261864013fdec7f83afd2e42ede6f6dd069c5e149c540d5 # via aws-sam-cli (setup.py) -cryptography==39.0.1 \ - --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ - --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ - --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ - --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ - --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ - --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ - --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ - --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ - --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ - --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ - --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ - --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ - --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ - --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ - --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ - --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ - --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ - --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ - --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ - --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ - --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 +cryptography==39.0.2 \ + --hash=sha256:103e8f7155f3ce2ffa0049fe60169878d47a4364b277906386f8de21c9234aa1 \ + --hash=sha256:23df8ca3f24699167daf3e23e51f7ba7334d504af63a94af468f468b975b7dd7 \ + --hash=sha256:2725672bb53bb92dc7b4150d233cd4b8c59615cd8288d495eaa86db00d4e5c06 \ + --hash=sha256:30b1d1bfd00f6fc80d11300a29f1d8ab2b8d9febb6ed4a38a76880ec564fae84 \ + --hash=sha256:35d658536b0a4117c885728d1a7032bdc9a5974722ae298d6c533755a6ee3915 \ + --hash=sha256:50cadb9b2f961757e712a9737ef33d89b8190c3ea34d0fb6675e00edbe35d074 \ + --hash=sha256:5f8c682e736513db7d04349b4f6693690170f95aac449c56f97415c6980edef5 \ + --hash=sha256:6236a9610c912b129610eb1a274bdc1350b5df834d124fa84729ebeaf7da42c3 \ + --hash=sha256:788b3921d763ee35dfdb04248d0e3de11e3ca8eb22e2e48fef880c42e1f3c8f9 \ + --hash=sha256:8bc0008ef798231fac03fe7d26e82d601d15bd16f3afaad1c6113771566570f3 \ + --hash=sha256:8f35c17bd4faed2bc7797d2a66cbb4f986242ce2e30340ab832e5d99ae60e011 \ + --hash=sha256:b49a88ff802e1993b7f749b1eeb31134f03c8d5c956e3c125c75558955cda536 \ + --hash=sha256:bc0521cce2c1d541634b19f3ac661d7a64f9555135e9d8af3980965be717fd4a \ + --hash=sha256:bc5b871e977c8ee5a1bbc42fa8d19bcc08baf0c51cbf1586b0e87a2694dde42f \ + --hash=sha256:c43ac224aabcbf83a947eeb8b17eaf1547bce3767ee2d70093b461f31729a480 \ + --hash=sha256:d15809e0dbdad486f4ad0979753518f47980020b7a34e9fc56e8be4f60702fac \ + --hash=sha256:d7d84a512a59f4412ca8549b01f94be4161c94efc598bf09d027d67826beddc0 \ + --hash=sha256:e029b844c21116564b8b61216befabca4b500e6816fa9f0ba49527653cae2108 \ + --hash=sha256:e8a0772016feeb106efd28d4a328e77dc2edae84dfbac06061319fdb669ff828 \ + --hash=sha256:e944fe07b6f229f4c1a06a7ef906a19652bdd9fd54c761b0ff87e83ae7a30354 \ + --hash=sha256:eb40fe69cfc6f5cdab9a5ebd022131ba21453cf7b8a7fd3631f45bbf52bed612 \ + --hash=sha256:fa507318e427169ade4e9eccef39e9011cdc19534f55ca2f36ec3f388c1f70f3 \ + --hash=sha256:ffd394c7896ed7821a6d13b24657c6a34b6e2650bd84ae063cf11ccffa4f1a97 # via pyopenssl -dateparser==1.0.0 \ - --hash=sha256:159cc4e01a593706a15cd4e269a0b3345edf3aef8bf9278a57dac8adf5bf1e4a \ - --hash=sha256:17202df32c7a36e773136ff353aa3767e987f8b3e27374c39fd21a30a803d6f8 +dateparser==1.1.7 \ + --hash=sha256:fbed8b738a24c9cd7f47c4f2089527926566fe539e1a06125eddba75917b1eef \ + --hash=sha256:ff047d9cffad4d3113ead8ec0faf8a7fc43bab7d853ac8715e071312b53c465a # via aws-sam-cli (setup.py) docker==4.2.2 \ --hash=sha256:03a46400c4080cb6f7aa997f881ddd84fef855499ece219d75fbdb53289c17ab \ @@ -426,9 +428,9 @@ pyopenssl==23.0.0 \ pyrsistent==0.17.3 \ --hash=sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e # via jsonschema -python-dateutil==2.8.1 \ - --hash=sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c \ - --hash=sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via # arrow # botocore @@ -566,9 +568,9 @@ ruamel-yaml-clib==0.2.7 \ --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 # via ruamel-yaml -s3transfer==0.5.0 \ - --hash=sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c \ - --hash=sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803 +s3transfer==0.6.0 \ + --hash=sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd \ + --hash=sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947 # via boto3 sarif-om==1.0.4 \ --hash=sha256:539ef47a662329b1c8502388ad92457425e95dc0aaaf995fe46f4984c4771911 \ diff --git a/samcli/__init__.py b/samcli/__init__.py index 237d5cd9ed..51256cc9c0 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.77.0" +__version__ = "1.78.0" diff --git a/samcli/commands/build/command.py b/samcli/commands/build/command.py index d5fdeace17..43d0a300c7 100644 --- a/samcli/commands/build/command.py +++ b/samcli/commands/build/command.py @@ -44,7 +44,7 @@ \b Supported Runtimes ------------------ -1. Python 3.7, 3.8, 3.9 using PIP\n +1. Python 3.7, 3.8, 3.9, 3.10 using PIP\n 2. Nodejs 18.x, 16.x, 14.x, 12.x using NPM\n 3. Ruby 2.7 using Bundler\n 4. Java 8, Java 11 using Gradle and Maven\n diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 3125f14fce..275d11dc70 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -271,6 +271,8 @@ def do_cli( s3_bucket=s3_bucket, image_repository=image_repository, image_repositories=image_repositories, + resolve_s3=resolve_s3, + resolve_image_repos=resolve_image_repos, s3_prefix=s3_prefix, region=region, profile=profile, @@ -320,8 +322,8 @@ def do_cli( ) as package_context: package_context.run() - # 500ms of sleep time between stack checks and describe stack events. - DEFAULT_POLL_DELAY = 0.5 + # 5s of sleep time between stack checks and describe stack events. + DEFAULT_POLL_DELAY = 5 try: poll_delay = float(os.getenv("SAM_CLI_POLL_DELAY", str(DEFAULT_POLL_DELAY))) except ValueError: diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 4c04d611b7..4b65526d84 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -24,7 +24,7 @@ from samcli.commands.deploy.guided_config import GuidedConfig from samcli.commands.deploy.utils import sanitize_parameter_overrides from samcli.lib.bootstrap.bootstrap import manage_stack -from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager +from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager, sync_ecr_stack from samcli.lib.config.samconfig import DEFAULT_CONFIG_FILE_NAME, DEFAULT_ENV from samcli.lib.intrinsic_resolver.intrinsics_symbol_table import IntrinsicsSymbolTable from samcli.lib.package.ecr_utils import is_ecr_url @@ -49,6 +49,8 @@ def __init__( image_repository, image_repositories, s3_prefix, + resolve_s3=False, + resolve_image_repos=False, region=None, profile=None, confirm_changeset=None, @@ -83,6 +85,8 @@ def __init__( self.guided_s3_prefix = None self.guided_region = None self.guided_profile = None + self.resolve_s3 = resolve_s3 + self.resolve_image_repositories = resolve_image_repos self.signing_profiles = signing_profiles self._capabilities = None self._parameter_overrides = None @@ -177,17 +181,30 @@ def guided_prompts(self, parameter_override_keys): ) click.echo("\n\tLooking for resources needed for deployment:") - s3_bucket = manage_stack(profile=self.profile, region=region) - click.secho(f"\n\tManaged S3 bucket: {s3_bucket}", bold=True) - click.echo("\tA different default S3 bucket can be set in samconfig.toml") + managed_s3_bucket = manage_stack(profile=self.profile, region=region) + click.secho(f"\n\tManaged S3 bucket: {managed_s3_bucket}", bold=True) + click.echo( + "\tA different default S3 bucket can be set in samconfig.toml" + " and auto resolution of buckets turned off by setting resolve_s3=False" + ) - image_repositories = self.prompt_image_repository( - stack_name, stacks, self.image_repositories, region, s3_bucket, self.s3_prefix + image_repositories = ( + sync_ecr_stack( + self.template_file, stack_name, region, managed_s3_bucket, self.s3_prefix, self.image_repositories + ) + if self.resolve_image_repositories + else self.prompt_image_repository( + stack_name, stacks, self.image_repositories, region, managed_s3_bucket, self.s3_prefix + ) ) self.guided_stack_name = stack_name - self.guided_s3_bucket = s3_bucket + self.guided_s3_bucket = managed_s3_bucket self.guided_image_repositories = image_repositories + # NOTE(sriram-mv): The resultant s3 bucket is ALWAYS the managed_s3_bucket. There is no user flow to set it + # within guided. + self.resolve_s3 = True if self.guided_s3_bucket else False + self.guided_s3_prefix = stack_name self.guided_region = region self.guided_profile = self.profile @@ -403,7 +420,9 @@ def prompt_specify_repos( ) if not is_ecr_url(image_uri): raise GuidedDeployFailedError(f"Invalid Image Repository ECR URI: {image_uri}") - + # NOTE(sriram-mv): If a prompt to accept an ECR URI succeeded, then one does not any longer + # resolve image repositories automatically. + self.resolve_image_repositories = False updated_repositories[function_logical_id] = image_uri return updated_repositories @@ -510,7 +529,7 @@ def prompt_delete_unreferenced_repos( click.echo( "\t #The deployment was aborted to prevent " "unreferenced managed ECR repositories from being deleted.\n" - "\t #You may remove repositories from the SAMCLI " + "\t #You may remove repositories from the AWS SAM CLI " "managed stack to retain them and resolve this unreferenced check." ) raise GuidedDeployFailedError("Unreferenced Auto Created ECR Repos Must Be Deleted.") @@ -562,9 +581,10 @@ def run(self): self.config_env or DEFAULT_ENV, self.config_file or DEFAULT_CONFIG_FILE_NAME, stack_name=self.guided_stack_name, - s3_bucket=self.guided_s3_bucket, + resolve_s3=self.resolve_s3, s3_prefix=self.guided_s3_prefix, - image_repositories=self.guided_image_repositories, + image_repositories=self.guided_image_repositories if not self.resolve_image_repositories else None, + resolve_image_repos=self.resolve_image_repositories, region=self.guided_region, profile=self.guided_profile, confirm_changeset=self.confirm_changeset, diff --git a/samcli/commands/package/package_context.py b/samcli/commands/package/package_context.py index cfb2d01f28..c030bf23b2 100644 --- a/samcli/commands/package/package_context.py +++ b/samcli/commands/package/package_context.py @@ -17,7 +17,7 @@ import json import logging import os -from typing import Optional +from typing import List, Optional import boto3 import click @@ -30,9 +30,11 @@ from samcli.lib.package.ecr_uploader import ECRUploader from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.uploaders import Uploaders -from samcli.lib.providers.provider import ResourceIdentifier, get_resource_full_path_by_id +from samcli.lib.providers.provider import ResourceIdentifier, Stack, get_resource_full_path_by_id from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider from samcli.lib.utils.boto_utils import get_boto_config_with_user_agent +from samcli.lib.utils.preview_runtimes import PREVIEW_RUNTIMES +from samcli.lib.utils.resources import AWS_LAMBDA_FUNCTION, AWS_SERVERLESS_FUNCTION from samcli.yamlhelper import yaml_dump LOG = logging.getLogger(__name__) @@ -102,6 +104,7 @@ def run(self): self.template_file, global_parameter_overrides=self._global_parameter_overrides, ) + self._warn_preview_runtime(stacks) self.image_repositories = self.image_repositories if self.image_repositories is not None else {} updated_repo = {} for image_repo_func_id, image_repo_uri in self.image_repositories.items(): @@ -165,6 +168,22 @@ def _export(self, template_path, use_json): return exported_str + @staticmethod + def _warn_preview_runtime(stacks: List[Stack]) -> None: + for stack in stacks: + for _, resource_dict in stack.resources.items(): + if resource_dict.get("Type") not in [AWS_SERVERLESS_FUNCTION, AWS_LAMBDA_FUNCTION]: + continue + if resource_dict.get("Properties", {}).get("Runtime", "") in PREVIEW_RUNTIMES: + click.secho( + "Warning: This stack contains one or more Lambda functions using a runtime which is not " + "yet generally available. This runtime should not be used for production applications. " + "For more information on supported runtimes, see " + "https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html.", + fg="yellow", + ) + return + @staticmethod def write_output(output_file_name: Optional[str], data: str) -> None: if output_file_name is None: diff --git a/samcli/commands/sync/command.py b/samcli/commands/sync/command.py index dddf10244e..0032e8b5b0 100644 --- a/samcli/commands/sync/command.py +++ b/samcli/commands/sync/command.py @@ -145,6 +145,13 @@ is_flag=True, help="Separate dependencies of individual function into a Lambda layer for improved performance.", ) +@click.option( + "--skip-deploy-sync/--no-skip-deploy-sync", + default=True, + is_flag=True, + help="This option will skip the initial infrastructure deployment if it is not required" + " by comparing the local template with the template deployed in cloud.", +) @stack_name_option(required=True) # pylint: disable=E1120 @base_dir_option @use_container_build_option @@ -178,6 +185,7 @@ def cli( resource_id: Optional[List[str]], resource: Optional[List[str]], dependency_layer: bool, + skip_deploy_sync: bool, stack_name: str, base_dir: Optional[str], parameter_overrides: dict, @@ -208,6 +216,7 @@ def cli( resource_id, resource, dependency_layer, + skip_deploy_sync, stack_name, ctx.region, ctx.profile, @@ -238,6 +247,7 @@ def do_cli( resource_id: Optional[List[str]], resource: Optional[List[str]], dependency_layer: bool, + skip_deploy_sync: bool, stack_name: str, region: str, profile: str, @@ -365,17 +375,17 @@ def do_cli( on_failure=None, ) as deploy_context: with SyncContext( - dependency_layer, build_context.build_dir, build_context.cache_dir + dependency_layer, build_context.build_dir, build_context.cache_dir, skip_deploy_sync ) as sync_context: if watch: execute_watch( - template_file, - build_context, - package_context, - deploy_context, - sync_context, - dependency_layer, - code, + template=template_file, + build_context=build_context, + package_context=package_context, + deploy_context=deploy_context, + sync_context=sync_context, + auto_dependency_layer=dependency_layer, + disable_infra_syncs=code, ) elif code: execute_code_sync( @@ -493,7 +503,7 @@ def execute_watch( deploy_context: "DeployContext", sync_context: "SyncContext", auto_dependency_layer: bool, - skip_infra_syncs: bool, + disable_infra_syncs: bool, ): """Start sync watch execution @@ -511,11 +521,20 @@ def execute_watch( SyncContext object that obtains sync information. auto_dependency_layer: bool Boolean flag to whether enable certain sync flows for auto dependency layer feature. - skip_infra_syncs: bool - Boolean flag to determine if only ececute code syncs. + disable_infra_syncs: bool + Boolean flag to determine if sam sync only executes code syncs. """ + # Note: disable_infra_syncs is different from skip_deploy_sync, + # disable_infra_syncs completely disables infra syncs and + # skip_deploy_sync skips the initial infra sync if it's not required. watch_manager = WatchManager( - template, build_context, package_context, deploy_context, sync_context, auto_dependency_layer, skip_infra_syncs + template, + build_context, + package_context, + deploy_context, + sync_context, + auto_dependency_layer, + disable_infra_syncs, ) watch_manager.start() diff --git a/samcli/commands/sync/core/options.py b/samcli/commands/sync/core/options.py index 692c0ef035..d8e6f4b6cd 100644 --- a/samcli/commands/sync/core/options.py +++ b/samcli/commands/sync/core/options.py @@ -31,6 +31,7 @@ ADDITIONAL_OPTIONS: List[str] = [ "watch", "code", + "skip_deploy_sync", "dependency_layer", "use_container", "resource_id", diff --git a/samcli/commands/sync/sync_context.py b/samcli/commands/sync/sync_context.py index d3b80d545f..a9541d3fcf 100644 --- a/samcli/commands/sync/sync_context.py +++ b/samcli/commands/sync/sync_context.py @@ -155,10 +155,12 @@ class SyncContext: _build_dir: Path _cache_dir: Path _file_path: Path + skip_deploy_sync: bool - def __init__(self, dependency_layer: bool, build_dir: str, cache_dir: str): + def __init__(self, dependency_layer: bool, build_dir: str, cache_dir: str, skip_deploy_sync: bool): self._current_state = SyncState(dependency_layer, dict(), None) self._previous_state = None + self.skip_deploy_sync = skip_deploy_sync self._build_dir = Path(build_dir) self._cache_dir = Path(cache_dir) self._file_path = Path(build_dir).parent.joinpath(DEFAULT_SYNC_STATE_FILE_NAME) diff --git a/samcli/lib/build/workflow_config.py b/samcli/lib/build/workflow_config.py index bdfd42be36..d8f61da8f1 100644 --- a/samcli/lib/build/workflow_config.py +++ b/samcli/lib/build/workflow_config.py @@ -88,6 +88,7 @@ def get_layer_subfolder(build_workflow: str) -> str: "python3.7": "python", "python3.8": "python", "python3.9": "python", + "python3.10": "python", "nodejs4.3": "nodejs", "nodejs6.10": "nodejs", "nodejs8.10": "nodejs", @@ -151,6 +152,7 @@ def get_workflow_config( "python3.7": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "python3.8": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "python3.9": BasicWorkflowSelector(PYTHON_PIP_CONFIG), + "python3.10": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "nodejs12.x": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "nodejs14.x": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "nodejs16.x": BasicWorkflowSelector(NODEJS_NPM_CONFIG), diff --git a/samcli/lib/sync/infra_sync_executor.py b/samcli/lib/sync/infra_sync_executor.py index 1242a3ffe9..7b4a6ecde4 100644 --- a/samcli/lib/sync/infra_sync_executor.py +++ b/samcli/lib/sync/infra_sync_executor.py @@ -5,7 +5,8 @@ import logging import re from datetime import datetime -from typing import Dict, Optional, Set +from pathlib import Path +from typing import TYPE_CHECKING, Dict, Optional, Set from boto3 import Session from botocore.exceptions import ClientError @@ -14,9 +15,9 @@ from samcli.commands.build.build_context import BuildContext from samcli.commands.deploy.deploy_context import DeployContext from samcli.commands.package.package_context import PackageContext -from samcli.commands.sync.sync_context import SyncContext from samcli.lib.providers.provider import ResourceIdentifier from samcli.lib.providers.sam_stack_provider import is_local_path +from samcli.lib.telemetry.event import EventTracker from samcli.lib.utils.boto_utils import get_boto_client_provider_from_session_with_config from samcli.lib.utils.resources import ( AWS_APIGATEWAY_RESTAPI, @@ -36,6 +37,9 @@ ) from samcli.yamlhelper import yaml_parse +if TYPE_CHECKING: # pragma: no cover + from samcli.commands.sync.sync_context import SyncContext + LOG = logging.getLogger(__name__) GENERAL_REMOVAL_MAP = { @@ -106,7 +110,7 @@ def __init__( build_context: BuildContext, package_context: PackageContext, deploy_context: DeployContext, - sync_context: SyncContext, + sync_context: "SyncContext", ): """Constructs the sync for infra executor. @@ -170,40 +174,40 @@ def execute_infra_sync(self, first_sync: bool = False) -> InfraSyncResult: days_since_last_infra_sync = (current_time - last_infra_sync_time).days # Will not combine the comparisons in order to save operation cost - if first_sync and (days_since_last_infra_sync <= AUTO_INFRA_SYNC_DAYS): - # Reminder: Add back after sync infra skip ready for release - # try: - # if self._auto_skip_infra_sync( - # self._package_context.output_template_file, - # self._package_context.template_file, - # self._deploy_context.stack_name, - # ): - # We have a threshold on number of sync flows we initiate - # If higher than the threshold, we perform infra sync to improve performance - # if len(self.code_sync_resources) < SYNC_FLOW_THRESHOLD: - # pass - # LOG.info("Template haven't been changed since last deployment, skipping infra sync...") - # return InfraSyncResult(False, self.code_sync_resources) - # else: - # LOG.info( - # "The number of resources that needs an update exceeds %s, \ - # an infra sync will be executed for an CloudFormation deployment to improve performance", - # SYNC_FLOW_THRESHOLD) - # pass - # except Exception: - # LOG.debug( - # "Could not skip infra sync by comparing to a previously deployed template, starting infra sync" - # ) - pass - - # Will be added with the sync infra skip is ready for release - # if days_since_last_infra_sync > AUTO_INFRA_SYNC_DAYS: - # LOG.info( - # "Infrastructure Sync hasn't been run in the last %s days, sam sync will be queuing up the stack" - # " deployment to minimize the drift in CloudFormation.", - # AUTO_INFRA_SYNC_DAYS, - # ) + if self._sync_context.skip_deploy_sync and first_sync and (days_since_last_infra_sync <= AUTO_INFRA_SYNC_DAYS): + EventTracker.track_event("SyncFlowStart", "SkipInfraSyncExecute") + try: + if self._auto_skip_infra_sync( + self._package_context.output_template_file, + self._package_context.template_file, + self._deploy_context.stack_name, + ): + # We have a threshold on number of sync flows we initiate + # If higher than the threshold, we perform infra sync to improve performance + if len(self.code_sync_resources) < SYNC_FLOW_THRESHOLD: + LOG.info("Template haven't been changed since last deployment, skipping infra sync...") + EventTracker.track_event("SyncFlowEnd", "SkipInfraSyncExecute") + return InfraSyncResult(False, self.code_sync_resources) + else: + LOG.info( + "The number of resources that needs an update exceeds %s, \ +an infra sync will be executed for an CloudFormation deployment to improve performance", + SYNC_FLOW_THRESHOLD, + ) + except Exception: + LOG.debug( + "Could not skip infra sync by comparing to a previously deployed template, starting infra sync" + ) + + EventTracker.track_event("SyncFlowStart", "InfraSyncExecute") + if days_since_last_infra_sync > AUTO_INFRA_SYNC_DAYS: + LOG.info( + "Infrastructure Sync hasn't been run in the last %s days, sam sync will be queuing up the stack" + " deployment to minimize the drift in CloudFormation.", + AUTO_INFRA_SYNC_DAYS, + ) self._deploy_context.run() + EventTracker.track_event("SyncFlowEnd", "InfraSyncExecute") # Update latest infra sync time in sync state self._sync_context.update_infra_sync_time() @@ -309,12 +313,17 @@ def _auto_skip_infra_sync( if isinstance(template_location, dict): continue # For other scenarios, template location will be a string (local or s3 URL) - elif not self._auto_skip_infra_sync( - resource_dict.get("Properties", {}).get(template_field), + nested_template_location = ( current_built_template.get("Resources", {}) .get(resource_logical_id, {}) .get("Properties", {}) - .get(template_field), + .get(template_field) + ) + if is_local_path(nested_template_location): + nested_template_location = str(Path(built_template_path).parent.joinpath(nested_template_location)) + if not self._auto_skip_infra_sync( + resource_dict.get("Properties", {}).get(template_field), + nested_template_location, stack_resource_detail.get("StackResourceDetail", {}).get("PhysicalResourceId", ""), nested_prefix + resource_logical_id + "/" if nested_prefix else resource_logical_id + "/", ): @@ -324,7 +333,10 @@ def _auto_skip_infra_sync( return True def _sanitize_template( - self, template_dict: Dict, linked_resources: Set[str] = set(), built_template_dict: Optional[Dict] = None + self, + template_dict: Dict, + linked_resources: Optional[Set[str]] = None, + built_template_dict: Optional[Dict] = None, ) -> Set[str]: """ Fields skipped during template comparison because sync --code can handle the difference: @@ -360,9 +372,11 @@ def _sanitize_template( Set[str] The list of resource IDs that got changed during sanitization """ + linked_resources = linked_resources or set() resources = template_dict.get("Resources", {}) processed_resources: Set[str] = set() + built_resource_dict = None for resource_logical_id in resources: resource_dict = resources.get(resource_logical_id, {}) @@ -399,7 +413,7 @@ def _remove_resource_field( resource_logical_id: str, resource_type: str, resource_dict: Dict, - linked_resources: Set[str] = set(), + linked_resources: Optional[Set[str]] = None, built_resource_dict: Optional[Dict] = None, ) -> Optional[str]: """ @@ -413,7 +427,7 @@ def _remove_resource_field( Resource type resource_dict: Dict The resource level dict containing Properties field - linked_resources: Set[str] + linked_resources: Optional[Set[str]] The corresponding resources in the other template that got processed built_resource_dict: Optional[Dict] Only passed in for current template sanitization to determine if local @@ -423,6 +437,7 @@ def _remove_resource_field( Optional[str] The processed resource ID """ + linked_resources = linked_resources or set() processed_logical_id = None if resource_type == AWS_LAMBDA_FUNCTION: diff --git a/samcli/lib/sync/sync_flow.py b/samcli/lib/sync/sync_flow.py index d5011d59db..7bebeef48b 100644 --- a/samcli/lib/sync/sync_flow.py +++ b/samcli/lib/sync/sync_flow.py @@ -14,6 +14,7 @@ from samcli.lib.sync.exceptions import MissingLockException, MissingPhysicalResourceError from samcli.lib.utils.boto_utils import get_boto_client_provider_from_session_with_config from samcli.lib.utils.lock_distributor import LockChain, LockDistributor +from samcli.lib.utils.resources import RESOURCES_WITH_LOCAL_PATHS if TYPE_CHECKING: # pragma: no cover from samcli.commands.build.build_context import BuildContext @@ -419,8 +420,15 @@ def get_definition_path( Optional[Path] A resolved absolute path for the definition file """ + definition_field_names = RESOURCES_WITH_LOCAL_PATHS.get(resource.get("Type", "")) + if not definition_field_names: + LOG.error("Couldn't find definition field name for resource {}", identifier) + return None + definition_field_name = definition_field_names[0] + LOG.debug("Found definition field name as {}", definition_field_name) + properties = resource.get("Properties", {}) - definition_file = properties.get("DefinitionUri") + definition_file = properties.get(definition_field_name) definition_path = None if definition_file: definition_path = Path(base_dir).joinpath(definition_file) diff --git a/samcli/lib/sync/watch_manager.py b/samcli/lib/sync/watch_manager.py index ae048f48af..08fcb5346a 100644 --- a/samcli/lib/sync/watch_manager.py +++ b/samcli/lib/sync/watch_manager.py @@ -44,7 +44,7 @@ class WatchManager: _waiting_infra_sync: bool _color: Colored _auto_dependency_layer: bool - _skip_infra_syncs: bool + _disable_infra_syncs: bool def __init__( self, @@ -54,7 +54,7 @@ def __init__( deploy_context: "DeployContext", sync_context: "SyncContext", auto_dependency_layer: bool, - skip_infra_syncs: bool, + disable_infra_syncs: bool, ): """Manager for sync watch execution logic. This manager will observe template and its code resources. @@ -78,7 +78,7 @@ def __init__( self._deploy_context = deploy_context self._sync_context = sync_context self._auto_dependency_layer = auto_dependency_layer - self._skip_infra_syncs = skip_infra_syncs + self._disable_infra_syncs = disable_infra_syncs self._sync_flow_factory = None self._sync_flow_executor = ContinuousSyncFlowExecutor() @@ -94,7 +94,7 @@ def queue_infra_sync(self) -> None: """Queue up an infra structure sync. A simple bool flag is suffice """ - if self._skip_infra_syncs: + if self._disable_infra_syncs: LOG.info( self._color.yellow( "You have enabled the --code flag, which limits sam sync updates to code changes only. To do a " @@ -190,7 +190,7 @@ def start(self) -> None: # This is a wrapper for gracefully handling Ctrl+C or other termination cases. try: self.queue_infra_sync() - if self._skip_infra_syncs: + if self._disable_infra_syncs: self._start_sync() LOG.info(self._color.green("Sync watch started.")) self._start() @@ -220,7 +220,7 @@ def _start_sync(self) -> None: def _execute_infra_sync(self, first_sync: bool = False) -> None: """Logic to execute infra sync.""" - LOG.info(self._color.cyan("Queued infra sync. Wating for in progress code syncs to complete...")) + LOG.info(self._color.cyan("Queued infra sync. Waiting for in progress code syncs to complete...")) self._waiting_infra_sync = False self._stop_code_sync() try: @@ -244,7 +244,11 @@ def _execute_infra_sync(self, first_sync: bool = False) -> None: # This is for initiating code sync for all resources # To improve: only initiate code syncs for ones with template changes self._queue_up_code_syncs(infra_sync_result.code_sync_resources) - LOG.info(self._color.green("Skipped infra sync and queued up required code syncs.")) + LOG.info( + self._color.green("Skipped infra sync as the local template is in sync with the cloud template.") + ) + if len(infra_sync_result.code_sync_resources) != 0: + LOG.info("Required code syncs are queued up.") else: LOG.info(self._color.green("Infra sync completed.")) diff --git a/samcli/lib/telemetry/event.py b/samcli/lib/telemetry/event.py index 54a03add28..cff129adaf 100644 --- a/samcli/lib/telemetry/event.py +++ b/samcli/lib/telemetry/event.py @@ -52,6 +52,8 @@ class EventType: "RestApiSyncFlow", "StepFunctionsSyncFlow", "ZipFunctionSyncFlow", + "InfraSyncExecute", + "SkipInfraSyncExecute", ] _WORKFLOWS = [f"{config.language}-{config.dependency_manager}" for config in ALL_CONFIGS] diff --git a/samcli/lib/utils/architecture.py b/samcli/lib/utils/architecture.py index c5c4602b7c..6025becdcf 100644 --- a/samcli/lib/utils/architecture.py +++ b/samcli/lib/utils/architecture.py @@ -21,6 +21,7 @@ "python3.7": [X86_64], "python3.8": [ARM64, X86_64], "python3.9": [ARM64, X86_64], + "python3.10": [ARM64, X86_64], "ruby2.7": [ARM64, X86_64], "java8": [X86_64], "java8.al2": [ARM64, X86_64], diff --git a/samcli/lib/utils/preview_runtimes.py b/samcli/lib/utils/preview_runtimes.py new file mode 100644 index 0000000000..3d6a5b9662 --- /dev/null +++ b/samcli/lib/utils/preview_runtimes.py @@ -0,0 +1,7 @@ +""" +Keeps list of preview runtimes, which can be used with sam build or sam local commands. +But deployment of them would probably fail until their GA date +""" +from typing import Set + +PREVIEW_RUNTIMES: Set[str] = {"python3.10"} diff --git a/samcli/local/common/runtime_template.py b/samcli/local/common/runtime_template.py index e268e704f2..8be2ba00ee 100644 --- a/samcli/local/common/runtime_template.py +++ b/samcli/local/common/runtime_template.py @@ -16,7 +16,7 @@ RUNTIME_DEP_TEMPLATE_MAPPING = { "python": [ { - "runtimes": ["python3.9", "python3.8", "python3.7"], + "runtimes": ["python3.10", "python3.9", "python3.8", "python3.7"], "dependency_manager": "pip", "init_location": os.path.join(_templates, "cookiecutter-aws-sam-hello-python"), "build": True, @@ -114,6 +114,7 @@ def get_local_lambda_images_location(mapping, runtime): "provided.al2", "provided", # python runtimes in descending order + "python3.10", "python3.9", "python3.8", "python3.7", @@ -135,6 +136,7 @@ def get_local_lambda_images_location(mapping, runtime): "nodejs16.x": "amazon/nodejs16.x-base", "nodejs14.x": "amazon/nodejs14.x-base", "nodejs12.x": "amazon/nodejs12.x-base", + "python3.10": "amazon/python3.10-base", "python3.9": "amazon/python3.9-base", "python3.8": "amazon/python3.8-base", "python3.7": "amazon/python3.7-base", @@ -152,6 +154,7 @@ def get_local_lambda_images_location(mapping, runtime): "python3.7": "Python36", "python3.8": "Python36", "python3.9": "Python36", + "python3.10": "Python36", "dotnet6": "dotnetcore3.1", "go1.x": "Go1", } diff --git a/samcli/local/docker/lambda_debug_settings.py b/samcli/local/docker/lambda_debug_settings.py index d8ce80b8c4..b26a3f49df 100644 --- a/samcli/local/docker/lambda_debug_settings.py +++ b/samcli/local/docker/lambda_debug_settings.py @@ -169,6 +169,10 @@ def get_debug_settings(debug_port, debug_args_list, _container_env_vars, runtime entry + ["/var/lang/bin/python3.9"] + debug_args_list + ["/var/runtime/bootstrap.py"], container_env_vars=_container_env_vars, ), + Runtime.python310.value: lambda: DebugSettings( + entry + ["/var/lang/bin/python3.10"] + debug_args_list + ["/var/runtime/bootstrap.py"], + container_env_vars=_container_env_vars, + ), } try: return entrypoint_mapping[runtime]() diff --git a/samcli/local/docker/lambda_image.py b/samcli/local/docker/lambda_image.py index 0086a6fd83..dfc7848953 100644 --- a/samcli/local/docker/lambda_image.py +++ b/samcli/local/docker/lambda_image.py @@ -34,6 +34,7 @@ class Runtime(Enum): python37 = "python3.7" python38 = "python3.8" python39 = "python3.9" + python310 = "python3.10" ruby27 = "ruby2.7" java8 = "java8" java8al2 = "java8.al2" diff --git a/samcli/runtime_config.json b/samcli/runtime_config.json index a892298296..2145a84553 100644 --- a/samcli/runtime_config.json +++ b/samcli/runtime_config.json @@ -1,3 +1,3 @@ { - "app_template_repo_commit": "36739f2c2c845a8dd3b6b4c785c408addc2c0eb9" + "app_template_repo_commit": "0c2d560a3e372d273c093ad8e24fb8c1eb8260b0" } diff --git a/tests/integration/buildcmd/test_build_cmd.py b/tests/integration/buildcmd/test_build_cmd.py index f69f845685..5b98ec8950 100644 --- a/tests/integration/buildcmd/test_build_cmd.py +++ b/tests/integration/buildcmd/test_build_cmd.py @@ -398,11 +398,13 @@ def _validate_skipped_built_function( ("template.yaml", "Function", True, "python3.7", "Python", False, False, "CodeUri"), ("template.yaml", "Function", True, "python3.8", "Python", False, False, "CodeUri"), ("template.yaml", "Function", True, "python3.9", "Python", False, False, "CodeUri"), + ("template.yaml", "Function", True, "python3.10", "Python", False, False, "CodeUri"), ("template.yaml", "Function", True, "python3.7", "PythonPEP600", False, False, "CodeUri"), ("template.yaml", "Function", True, "python3.8", "PythonPEP600", False, False, "CodeUri"), ("template.yaml", "Function", True, "python3.7", "Python", "use_container", False, "CodeUri"), ("template.yaml", "Function", True, "python3.8", "Python", "use_container", False, "CodeUri"), ("template.yaml", "Function", True, "python3.9", "Python", "use_container", False, "CodeUri"), + ("template.yaml", "Function", True, "python3.10", "Python", "use_container", False, "CodeUri"), ], ) class TestBuildCommand_PythonFunctions(BuildIntegPythonBase): diff --git a/tests/integration/sync/test_sync_infra.py b/tests/integration/sync/test_sync_infra.py index 93efb29f50..e9cd985203 100644 --- a/tests/integration/sync/test_sync_infra.py +++ b/tests/integration/sync/test_sync_infra.py @@ -36,6 +36,32 @@ @skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only") @parameterized_class([{"dependency_layer": True}, {"dependency_layer": False}]) class TestSyncInfra(SyncIntegBase): + def setUp(self): + self.test_dir = Path(tempfile.mkdtemp()) + shutil.rmtree(self.test_dir) + shutil.copytree(self.test_data_path, self.test_dir) + super().setUp() + + def tearDown(self): + shutil.rmtree(self.test_dir) + super().tearDown() + + def _verify_infra_changes(self, resources): + # Lambda + lambda_functions = resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertIn("extra_message", lambda_response) + self.assertEqual(lambda_response.get("message"), "9") + + # APIGW + rest_api = resources.get(AWS_APIGATEWAY_RESTAPI)[0] + self.assertEqual(self._get_api_message(rest_api), '{"message": "hello 2"}') + + # SFN + state_machine = resources.get(AWS_STEPFUNCTIONS_STATEMACHINE)[0] + self.assertEqual(self._get_sfn_response(state_machine), '"World 2"') + @skipIf( IS_WINDOWS, "Skip sync ruby tests in windows", @@ -44,7 +70,7 @@ class TestSyncInfra(SyncIntegBase): @parameterized.expand([["ruby", False], ["python", False], ["python", True]]) def test_sync_infra(self, runtime, use_container): template_before = f"infra/template-{runtime}-before.yaml" - template_path = str(self.test_data_path.joinpath(template_before)) + template_path = str(self.test_dir.joinpath(template_before)) stack_name = self._method_to_stack_name(self.id()) self.stacks.append({"name": stack_name}) @@ -63,7 +89,7 @@ def test_sync_infra(self, runtime, use_container): use_container=use_container, ) - sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode()) + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode(), cwd=self.test_dir) self.assertEqual(sync_process_execute.process.returncode, 0) self.assertIn("Stack creation succeeded. Sync infra completed.", str(sync_process_execute.stderr)) @@ -84,7 +110,7 @@ def test_sync_infra(self, runtime, use_container): self.assertEqual(self._get_sfn_response(state_machine), '"World 1"') template_after = f"infra/template-{runtime}-after.yaml" - template_path = str(self.test_data_path.joinpath(template_after)) + template_path = str(self.test_dir.joinpath(template_after)) # Run infra sync sync_command_list = self.get_sync_command_list( @@ -101,7 +127,7 @@ def test_sync_infra(self, runtime, use_container): use_container=use_container, ) - sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode()) + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode(), cwd=self.test_dir) self.assertEqual(sync_process_execute.process.returncode, 0) self.assertIn("Stack update succeeded. Sync infra completed.", str(sync_process_execute.stderr)) self.assertNotIn("Commands you can use next", str(sync_process_execute.stderr)) @@ -109,108 +135,128 @@ def test_sync_infra(self, runtime, use_container): # CFN Api call here to collect all the stack resources self.stack_resources = self._get_stacks(stack_name) # Lambda Api call here, which tests both the python function and the layer - lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) - for lambda_function in lambda_functions: - lambda_response = json.loads(self._get_lambda_response(lambda_function)) - self.assertIn("extra_message", lambda_response) - self.assertEqual(lambda_response.get("message"), "9") - if runtime == "python": - # ApiGateway Api call here, which tests the RestApi - rest_api = self.stack_resources.get(AWS_APIGATEWAY_RESTAPI)[0] - self.assertEqual(self._get_api_message(rest_api), '{"message": "hello 2"}') - # SFN Api call here, which tests the StateMachine - state_machine = self.stack_resources.get(AWS_STEPFUNCTIONS_STATEMACHINE)[0] - self.assertEqual(self._get_sfn_response(state_machine), '"World 2"') - - # Reminder: Add back after sync infra skip ready for release - # @skipIf( - # IS_WINDOWS, - # "Skip sync ruby tests in windows", - # ) - # @pytest.mark.flaky(reruns=3) - # @parameterized.expand([["python", False], ["python", True]]) - # def test_sync_infra_auto_skip(self, runtime, use_container): - # template_before = f"infra/template-{runtime}-before.yaml" - # template_path = str(self.test_data_path.joinpath(template_before)) - # stack_name = self._method_to_stack_name(self.id()) - # self.stacks.append({"name": stack_name}) - - # # Run infra sync - # sync_command_list = self.get_sync_command_list( - # template_file=template_path, - # code=False, - # watch=False, - # dependency_layer=self.dependency_layer, - # stack_name=stack_name, - # parameter_overrides="Parameter=Clarity", - # image_repository=self.ecr_repo_name, - # s3_prefix=self.s3_prefix, - # kms_key_id=self.kms_key, - # tags="integ=true clarity=yes foo_bar=baz", - # use_container=use_container, - # ) - - # sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode()) - # self.assertEqual(sync_process_execute.process.returncode, 0) - # self.assertIn("Stack creation succeeded. Sync infra completed.", str(sync_process_execute.stderr)) - - # # CFN Api call here to collect all the stack resources - # self.stack_resources = self._get_stacks(stack_name) - # # Lambda Api call here, which tests both the python function and the layer - # lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) - # for lambda_function in lambda_functions: - # lambda_response = json.loads(self._get_lambda_response(lambda_function)) - # self.assertIn("extra_message", lambda_response) - # self.assertEqual(lambda_response.get("message"), "7") - # if runtime == "python": - # # ApiGateway Api call here, which tests the RestApi - # rest_api = self.stack_resources.get(AWS_APIGATEWAY_RESTAPI)[0] - # self.assertEqual(self._get_api_message(rest_api), '{"message": "hello 1"}') - # # SFN Api call here, which tests the StateMachine - # state_machine = self.stack_resources.get(AWS_STEPFUNCTIONS_STATEMACHINE)[0] - # self.assertEqual(self._get_sfn_response(state_machine), '"World 1"') - - # template_after = f"infra/template-{runtime}-auto-skip.yaml" - # template_path = str(self.test_data_path.joinpath(template_after)) - - # # Run infra sync - # sync_command_list = self.get_sync_command_list( - # template_file=template_path, - # code=False, - # watch=False, - # dependency_layer=self.dependency_layer, - # stack_name=stack_name, - # parameter_overrides="Parameter=Clarity", - # image_repository=self.ecr_repo_name, - # s3_prefix=self.s3_prefix, - # kms_key_id=self.kms_key, - # tags="integ=true clarity=yes foo_bar=baz", - # use_container=use_container, - # ) - - # sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode()) - # self.assertEqual(sync_process_execute.process.returncode, 0) - # self.assertIn( - # "Template haven't been changed since last deployment, skipping infra sync...", - # str(sync_process_execute.stderr), - # ) - # self.assertIn( - # "The following resources will be code synced for an update: ", - # str(sync_process_execute.stderr), - # ) - - # # CFN Api call here to collect all the stack resources - # self.stack_resources = self._get_stacks(stack_name) - # # Lambda Api call here, which tests both the python function and the layer - # lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) - # for lambda_function in lambda_functions: - # lambda_response = json.loads(self._get_lambda_response(lambda_function)) - # self.assertIn("extra_message", lambda_response) - # self.assertEqual(lambda_response.get("message"), "8") + if not runtime == "python": + lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertIn("extra_message", lambda_response) + self.assertEqual(lambda_response.get("message"), "9") + else: + self._verify_infra_changes(self.stack_resources) + + @pytest.mark.flaky(reruns=3) + @parameterized.expand([["python", False], ["python", True]]) + def test_sync_infra_auto_skip(self, runtime, use_container): + template_before = f"infra/template-{runtime}-before.yaml" + template_path = str(self.test_dir.joinpath(template_before)) + stack_name = self._method_to_stack_name(self.id()) + self.stacks.append({"name": stack_name}) + + # Run infra sync + sync_command_list = self.get_sync_command_list( + template_file=template_path, + code=False, + watch=False, + dependency_layer=self.dependency_layer, + stack_name=stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + tags="integ=true clarity=yes foo_bar=baz", + use_container=use_container, + ) + + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode(), cwd=self.test_dir) + self.assertEqual(sync_process_execute.process.returncode, 0) + self.assertIn("Stack creation succeeded. Sync infra completed.", str(sync_process_execute.stderr)) + + template_after = f"infra/template-{runtime}-auto-skip.yaml" + template_path = str(self.test_dir.joinpath(template_after)) + + # Run infra sync + sync_command_list = self.get_sync_command_list( + template_file=template_path, + code=False, + watch=False, + dependency_layer=self.dependency_layer, + stack_name=stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + tags="integ=true clarity=yes foo_bar=baz", + use_container=use_container, + ) + + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode(), cwd=self.test_dir) + self.assertEqual(sync_process_execute.process.returncode, 0) + self.assertIn( + "Template haven't been changed since last deployment, skipping infra sync...", + str(sync_process_execute.stderr), + ) + self.assertIn( + "Queuing up code sync for the resources that require an update", + str(sync_process_execute.stderr), + ) + + # CFN Api call here to collect all the stack resources + self.stack_resources = self._get_stacks(stack_name) + # Lambda Api call here, which tests both the python function and the layer + self._verify_infra_changes(self.stack_resources) + + @pytest.mark.flaky(reruns=3) + @parameterized.expand([["python", False], ["python", True]]) + def test_sync_infra_auto_skip_nested(self, runtime, use_container): + template_before = str(Path("infra", "parent-stack.yaml")) + template_path = str(self.test_dir.joinpath(template_before)) + + stack_name = self._method_to_stack_name(self.id()) + self.stacks.append({"name": stack_name}) + + # Run infra sync + sync_command_list = self.get_sync_command_list( + template_file=template_path, + code=False, + watch=False, + dependency_layer=self.dependency_layer, + stack_name=stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + tags="integ=true clarity=yes foo_bar=baz", + use_container=use_container, + ) + + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode(), cwd=self.test_dir) + self.assertEqual(sync_process_execute.process.returncode, 0) + self.assertIn("Stack creation succeeded. Sync infra completed.", str(sync_process_execute.stderr)) + + self.update_file( + self.test_dir.joinpath("infra", f"template-{runtime}-auto-skip.yaml"), + self.test_dir.joinpath("infra", f"template-{runtime}-before.yaml"), + ) + + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode(), cwd=self.test_dir) + self.assertEqual(sync_process_execute.process.returncode, 0) + self.assertIn( + "Template haven't been changed since last deployment, skipping infra sync...", + str(sync_process_execute.stderr), + ) + self.assertIn( + "Queuing up code sync for the resources that require an update", + str(sync_process_execute.stderr), + ) + + # CFN Api call here to collect all the stack resources + self.stack_resources = self._get_stacks(stack_name) + # Lambda Api call here, which tests both the python function and the layer + self._verify_infra_changes(self.stack_resources) @parameterized.expand(["infra/template-python-before.yaml"]) def test_sync_infra_no_confirm(self, template_file): - template_path = str(self.test_data_path.joinpath(template_file)) + template_path = str(self.test_dir.joinpath(template_file)) stack_name = self._method_to_stack_name(self.id()) # Run infra sync @@ -226,14 +272,14 @@ def test_sync_infra_no_confirm(self, template_file): kms_key_id=self.kms_key, tags="integ=true clarity=yes foo_bar=baz", ) - sync_process_execute = run_command_with_input(sync_command_list, "n\n".encode()) + sync_process_execute = run_command_with_input(sync_command_list, "n\n".encode(), cwd=self.test_dir) self.assertEqual(sync_process_execute.process.returncode, 0) self.assertNotIn("Build Succeeded", str(sync_process_execute.stderr)) @parameterized.expand(["infra/template-python-before.yaml"]) def test_sync_infra_no_stack_name(self, template_file): - template_path = str(self.test_data_path.joinpath(template_file)) + template_path = str(self.test_dir.joinpath(template_file)) # Run infra sync sync_command_list = self.get_sync_command_list( @@ -248,13 +294,13 @@ def test_sync_infra_no_stack_name(self, template_file): tags="integ=true clarity=yes foo_bar=baz", ) - sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode()) + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode(), cwd=self.test_dir) self.assertEqual(sync_process_execute.process.returncode, 2) self.assertIn("Error: Missing option '--stack-name'.", str(sync_process_execute.stderr)) @parameterized.expand(["infra/template-python-before.yaml"]) def test_sync_infra_no_capabilities(self, template_file): - template_path = str(self.test_data_path.joinpath(template_file)) + template_path = str(self.test_dir.joinpath(template_file)) stack_name = self._method_to_stack_name(self.id()) self.stacks.append({"name": stack_name}) @@ -273,7 +319,7 @@ def test_sync_infra_no_capabilities(self, template_file): tags="integ=true clarity=yes foo_bar=baz", ) - sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode()) + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode(), cwd=self.test_dir) self.assertEqual(sync_process_execute.process.returncode, 1) self.assertIn( "An error occurred (InsufficientCapabilitiesException) when calling the CreateStack operation: \ @@ -283,7 +329,7 @@ def test_sync_infra_no_capabilities(self, template_file): @parameterized.expand(["infra/template-python-before.yaml"]) def test_sync_infra_s3_bucket_option(self, template_file): - template_path = str(self.test_data_path.joinpath(template_file)) + template_path = str(self.test_dir.joinpath(template_file)) stack_name = self._method_to_stack_name(self.id()) sync_command_list = self.get_sync_command_list( @@ -301,7 +347,7 @@ def test_sync_infra_s3_bucket_option(self, template_file): tags="integ=true clarity=yes foo_bar=baz", ) - sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode()) + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode(), cwd=self.test_dir) self.assertEqual(sync_process_execute.process.returncode, 0) self.assertIn("Stack creation succeeded. Sync infra completed.", str(sync_process_execute.stderr)) diff --git a/tests/integration/sync/test_sync_watch.py b/tests/integration/sync/test_sync_watch.py index 6408376841..e64f084465 100644 --- a/tests/integration/sync/test_sync_watch.py +++ b/tests/integration/sync/test_sync_watch.py @@ -568,91 +568,201 @@ def test_sync_watch_code(self): ) -# Reminder: Add back after sync infra skip ready for release -# @skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only") -# @parameterized_class( -# [{"runtime": "python", "dependency_layer": True}, {"runtime": "python", "dependency_layer": False}] -# ) -# class TestSyncWatchAutoSkipInfra(SyncIntegBase): -# def setUp(self): -# super().setUp() -# self.test_dir = Path(tempfile.mkdtemp()) -# shutil.rmtree(self.test_dir) -# shutil.copytree(self.test_data_path, self.test_dir) - -# def tearDown(self): -# kill_process(self.watch_process) -# shutil.rmtree(self.test_dir) -# super().tearDown() - -# @pytest.mark.flaky(reruns=3) -# def test_sync_watch_auto_skip_infra(self): -# template_before = f"infra/template-{self.runtime}-before.yaml" -# template_path = str(self.test_dir.joinpath(template_before)) -# stack_name = self._method_to_stack_name(self.id()) -# self.stacks.append({"name": stack_name}) - -# # Run infra sync -# sync_command_list = self.get_sync_command_list( -# template_file=str(template_path), -# code=False, -# watch=False, -# dependency_layer=self.dependency_layer, -# stack_name=stack_name, -# parameter_overrides="Parameter=Clarity", -# image_repository=self.ecr_repo_name, -# s3_prefix=self.s3_prefix, -# kms_key_id=self.kms_key, -# tags="integ=true clarity=yes foo_bar=baz", -# use_container=False, -# ) - -# sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode(), cwd=self.test_dir) -# self.assertEqual(sync_process_execute.process.returncode, 0) -# self.assertIn("Stack creation succeeded. Sync infra completed.", str(sync_process_execute.stderr)) - -# # CFN Api call here to collect all the stack resources -# self.stack_resources = self._get_stacks(stack_name) -# # Lambda Api call here, which tests both the python function and the layer -# lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) -# for lambda_function in lambda_functions: -# lambda_response = json.loads(self._get_lambda_response(lambda_function)) -# self.assertIn("extra_message", lambda_response) -# self.assertEqual(lambda_response.get("message"), "7") - -# template_after = f"infra/template-{self.runtime}-auto-skip.yaml" -# template_path = str(self.test_dir.joinpath(template_after)) -# # Start watch -# sync_command_list = self.get_sync_command_list( -# template_file=template_path, -# code=False, -# watch=True, -# dependency_layer=self.dependency_layer, -# stack_name=stack_name, -# parameter_overrides="Parameter=Clarity", -# image_repository=self.ecr_repo_name, -# s3_prefix=self.s3_prefix, -# kms_key_id=self.kms_key, -# tags="integ=true clarity=yes foo_bar=baz", -# ) -# self.watch_process = start_persistent_process(sync_command_list, cwd=self.test_dir) - -# read_until_string(self.watch_process, "Enter Y to proceed with the command, or enter N to cancel:\n") -# self.watch_process.stdin.write("y\n") - -# read_until_string( -# self.watch_process, -# "Template haven't been changed since last deployment, skipping infra sync...\n", -# timeout=100, -# ) - -# read_until_string( -# self.watch_process, "\x1b[32mFinished syncing Lambda Function HelloWorldFunction.\x1b[0m\n", timeout=30 -# ) - -# self.stack_resources = self._get_stacks(stack_name) -# lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) -# for lambda_function in lambda_functions: -# lambda_response = json.loads(self._get_lambda_response(lambda_function)) -# self.assertIn("extra_message", lambda_response) -# self.assertEqual(lambda_response.get("message"), "8") +@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only") +@parameterized_class( + [{"runtime": "python", "dependency_layer": True}, {"runtime": "python", "dependency_layer": False}] +) +class TestSyncWatchAutoSkipInfra(SyncIntegBase): + def setUp(self): + self.runtime = "python" + self.dependency_layer = True + super().setUp() + self.test_dir = Path(tempfile.mkdtemp()) + shutil.rmtree(self.test_dir) + shutil.copytree(self.test_data_path, self.test_dir) + + def tearDown(self): + kill_process(self.watch_process) + shutil.rmtree(self.test_dir) + super().tearDown() + + @pytest.mark.flaky(reruns=3) + def test_sync_watch_auto_skip_infra(self): + template_before = f"code/before/template-{self.runtime}.yaml" + template_path = str(self.test_dir.joinpath(template_before)) + stack_name = self._method_to_stack_name(self.id()) + self.stacks.append({"name": stack_name}) + + # Run infra sync + sync_command_list = self.get_sync_command_list( + template_file=str(template_path), + code=False, + watch=False, + dependency_layer=self.dependency_layer, + stack_name=stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + tags="integ=true clarity=yes foo_bar=baz", + use_container=False, + ) + + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode(), cwd=self.test_dir) + self.assertEqual(sync_process_execute.process.returncode, 0) + self.assertIn("Stack creation succeeded. Sync infra completed.", str(sync_process_execute.stderr)) + + # Start watch + sync_command_list = self.get_sync_command_list( + template_file=template_path, + code=False, + watch=True, + dependency_layer=self.dependency_layer, + stack_name=stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + tags="integ=true clarity=yes foo_bar=baz", + ) + self.watch_process = start_persistent_process(sync_command_list, cwd=self.test_dir) + + read_until_string( + self.watch_process, + "Template haven't been changed since last deployment, skipping infra sync...\n", + timeout=100, + ) + + kill_process(self.watch_process) + + # Test Lambda Function + self.update_file( + self.test_dir.joinpath("code", "after", "function", "app.py"), + self.test_dir.joinpath("code", "before", "function", "app.py"), + ) + + # Start watch + sync_command_list = self.get_sync_command_list( + template_file=template_path, + code=False, + watch=True, + dependency_layer=self.dependency_layer, + stack_name=stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + tags="integ=true clarity=yes foo_bar=baz", + ) + self.watch_process = start_persistent_process(sync_command_list, cwd=self.test_dir) + + read_until_string( + self.watch_process, "\x1b[32mFinished syncing Lambda Function HelloWorldFunction.\x1b[0m\n", timeout=100 + ) + + kill_process(self.watch_process) + + self.stack_resources = self._get_stacks(stack_name) + lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertIn("extra_message", lambda_response) + self.assertEqual(lambda_response.get("message"), "8") + + # Test Lambda Layer + self.update_file( + self.test_dir.joinpath("code", "after", "layer", "layer_method.py"), + self.test_dir.joinpath("code", "before", "layer", "layer_method.py"), + ) + + # Start watch + sync_command_list = self.get_sync_command_list( + template_file=template_path, + code=False, + watch=True, + dependency_layer=self.dependency_layer, + stack_name=stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + tags="integ=true clarity=yes foo_bar=baz", + ) + self.watch_process = start_persistent_process(sync_command_list, cwd=self.test_dir) + + read_until_string( + self.watch_process, + "\x1b[32mFinished syncing Function Layer Reference Sync HelloWorldFunction.\x1b[0m\n", + timeout=100, + ) + + kill_process(self.watch_process) + + lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertIn("extra_message", lambda_response) + self.assertEqual(lambda_response.get("message"), "9") + + # Test APIGW + self.update_file( + self.test_dir.joinpath("code", "after", "apigateway", "definition.json"), + self.test_dir.joinpath("code", "before", "apigateway", "definition.json"), + ) + + # Start watch + sync_command_list = self.get_sync_command_list( + template_file=template_path, + code=False, + watch=True, + dependency_layer=self.dependency_layer, + stack_name=stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + tags="integ=true clarity=yes foo_bar=baz", + ) + self.watch_process = start_persistent_process(sync_command_list, cwd=self.test_dir) + + read_until_string( + self.watch_process, + "\x1b[32mFinished syncing RestApi HelloWorldApi.\x1b[0m\n", + timeout=100, + ) + time.sleep(API_SLEEP) + kill_process(self.watch_process) + + rest_api = self.stack_resources.get(AWS_APIGATEWAY_RESTAPI)[0] + self.assertEqual(self._get_api_message(rest_api), '{"message": "hello 2"}') + + # Test SFN + self.update_file( + self.test_dir.joinpath("code", "after", "statemachine", "function.asl.json"), + self.test_dir.joinpath("code", "before", "statemachine", "function.asl.json"), + ) + + # Start watch + sync_command_list = self.get_sync_command_list( + template_file=template_path, + code=False, + watch=True, + dependency_layer=self.dependency_layer, + stack_name=stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + tags="integ=true clarity=yes foo_bar=baz", + ) + self.watch_process = start_persistent_process(sync_command_list, cwd=self.test_dir) + + read_until_string( + self.watch_process, + "\x1b[32mFinished syncing StepFunctions HelloStepFunction.\x1b[0m\n", + timeout=100, + ) + time.sleep(SFN_SLEEP) + + state_machine = self.stack_resources.get(AWS_STEPFUNCTIONS_STATEMACHINE)[0] + self.assertEqual(self._get_sfn_response(state_machine), '"World 2"') diff --git a/tests/integration/testdata/buildcmd/Python/requirements.txt b/tests/integration/testdata/buildcmd/Python/requirements.txt index ce4af48039..cfc5fd779a 100644 --- a/tests/integration/testdata/buildcmd/Python/requirements.txt +++ b/tests/integration/testdata/buildcmd/Python/requirements.txt @@ -1,7 +1,8 @@ # These are some hard packages to build. Using them here helps us verify that building works on various platforms # NOTE: Fixing to <1.20.3 as numpy1.20.3 started to use a new wheel naming convention (PEP 600) -numpy<1.20.3 +numpy<1.20.3; python_version <= '3.9' +numpy==1.23.5; python_version >= '3.10' # `cryptography` has a dependency on `pycparser` which, for some reason doesn't build inside a Docker container. # Turning this off until we resolve this issue: https://github.com/awslabs/aws-lambda-builders/issues/29 # cryptography~=2.4 diff --git a/tests/integration/testdata/sync/infra/template-python-auto-skip.yaml b/tests/integration/testdata/sync/infra/template-python-auto-skip.yaml index cbd3ad3a4f..5275a0bf6f 100644 --- a/tests/integration/testdata/sync/infra/template-python-auto-skip.yaml +++ b/tests/integration/testdata/sync/infra/template-python-auto-skip.yaml @@ -20,19 +20,19 @@ Resources: Type: AWS::Serverless::Api Properties: StageName: prod - DefinitionUri: before/Python/apigateway/definition.json + DefinitionUri: after/Python/apigateway/definition.json HelloWorldApiV2: Type: AWS::Serverless::HttpApi Properties: StageName: beta - DefinitionUri: before/Python/apigateway2/definition.yaml + DefinitionUri: after/Python/apigateway2/definition.yaml HelloWorldLayer: Type: AWS::Serverless::LayerVersion Properties: Description: Hello World Layer - ContentUri: before/Python/layer/ + ContentUri: after/Python/layer/ CompatibleRuntimes: - python3.7 Metadata: @@ -41,7 +41,7 @@ Resources: HelloStepFunction: Type: AWS::Serverless::StateMachine Properties: - DefinitionUri: before/Python/statemachine/function.asl.json + DefinitionUri: after/Python/statemachine/function.asl.json Policies: - LambdaInvokePolicy: - FunctionName: !Ref HelloWorldFunction + FunctionName: !Ref HelloWorldFunction \ No newline at end of file diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index 4376114362..bb96c16079 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -221,6 +221,157 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( on_failure=self.on_failure, ) + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + @patch("samcli.commands.deploy.guided_context.manage_stack") + @patch("samcli.commands.deploy.guided_context.auth_per_resource") + @patch("samcli.commands.deploy.guided_context.get_template_parameters") + @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") + @patch("samcli.commands.deploy.guided_context.signer_config_per_function") + @patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config()))) + @patch("samcli.commands.deploy.guided_context.prompt") + @patch("samcli.commands.deploy.guided_context.confirm") + @patch("samcli.commands.deploy.guided_context.tag_translation") + @patch("samcli.commands.deploy.guided_context.sync_ecr_stack") + def test_all_args_guided_use_defaults( + self, + mock_sync_ecr_stack, + mock_tag_translation, + mock_confirm, + mock_prompt, + mock_signer_config_per_function, + mock_sam_function_provider, + mock_get_buildable_stacks, + mock_get_template_parameters, + mockauth_per_resource, + mock_managed_stack, + mock_deploy_context, + mock_deploy_click, + mock_package_context, + mock_package_click, + ): + mock_get_buildable_stacks.return_value = (Mock(), []) + mock_tag_translation.return_value = "helloworld-123456-v1" + + context_mock = Mock() + function_mock = MagicMock() + function_mock.packagetype = IMAGE + function_mock.imageuri = "helloworld:v1" + function_mock.full_path = "HelloWorldFunction" + mock_sam_function_provider.return_value.get_all.return_value = [function_mock] + mockauth_per_resource.return_value = [("HelloWorldResource", False)] + mock_deploy_context.return_value.__enter__.return_value = context_mock + mock_confirm.side_effect = [True, False, True, True, True, True, True] + mock_prompt.side_effect = [ + "sam-app", + "us-east-1", + "guidedParameter", + "secure", + ("CAPABILITY_IAM",), + "testconfig.toml", + "test-env", + ] + + mock_get_template_parameters.return_value = { + "Myparameter": {"Type": "String"}, + "MyNoEchoParameter": {"Type": "String", "NoEcho": True}, + } + + mock_managed_stack.return_value = "managed-s3-bucket" + mock_sync_ecr_stack.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/managed-ecr" + } + + mock_signer_config_per_function.return_value = ({}, {}) + + self.resolve_s3 = True + self.resolve_image_repos = True + with patch.object(GuidedConfig, "save_config", MagicMock(return_value=True)) as mock_save_config: + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=None, + image_repository=None, + image_repositories=None, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + use_json=self.use_json, + metadata=self.metadata, + guided=True, + confirm_changeset=True, + signing_profiles=self.signing_profiles, + resolve_s3=self.resolve_s3, + config_env=self.config_env, + config_file=self.config_file, + resolve_image_repos=self.resolve_image_repos, + disable_rollback=self.disable_rollback, + on_failure=self.on_failure, + ) + + mock_deploy_context.assert_called_with( + template_file=ANY, + stack_name="sam-app", + s3_bucket="managed-s3-bucket", + image_repository=None, + image_repositories={"HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/managed-ecr"}, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + s3_prefix="sam-app", + kms_key_id=self.kms_key_id, + parameter_overrides={"Myparameter": "guidedParameter", "MyNoEchoParameter": "secure"}, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region="us-east-1", + profile=self.profile, + confirm_changeset=True, + signing_profiles=self.signing_profiles, + use_changeset=self.use_changeset, + disable_rollback=True, + poll_delay=5, + on_failure=self.on_failure, + ) + + context_mock.run.assert_called_with() + mock_save_config.assert_called_with( + { + "Myparameter": {"Value": "guidedParameter", "Hidden": False}, + "MyNoEchoParameter": {"Value": "secure", "Hidden": True}, + }, + "test-env", + "testconfig.toml", + capabilities=("CAPABILITY_IAM",), + confirm_changeset=True, + profile=self.profile, + region="us-east-1", + resolve_s3=True, + resolve_image_repos=True, + image_repositories=None, + stack_name="sam-app", + s3_prefix="sam-app", + signing_profiles=self.signing_profiles, + disable_rollback=True, + ) + mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") + self.assertEqual(context_mock.run.call_count, 1) + @patch("samcli.commands.package.command.click") @patch("samcli.commands.package.package_context.PackageContext") @patch("samcli.commands.deploy.command.click") @@ -338,7 +489,7 @@ def test_all_args_guided( signing_profiles=self.signing_profiles, use_changeset=self.use_changeset, disable_rollback=True, - poll_delay=0.5, + poll_delay=5, on_failure=self.on_failure, ) @@ -354,7 +505,8 @@ def test_all_args_guided( confirm_changeset=True, profile=self.profile, region="us-east-1", - s3_bucket="managed-s3-bucket", + resolve_s3=True, + resolve_image_repos=False, image_repositories={"HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1"}, stack_name="sam-app", s3_prefix="sam-app", @@ -488,7 +640,7 @@ def test_all_args_guided_no_save_echo_param_to_config( signing_profiles=self.signing_profiles, use_changeset=self.use_changeset, disable_rollback=True, - poll_delay=0.5, + poll_delay=5, on_failure=self.on_failure, ) @@ -501,7 +653,7 @@ def test_all_args_guided_no_save_echo_param_to_config( MOCK_SAM_CONFIG.put.call_args_list, [ call(["deploy"], "parameters", "stack_name", "sam-app", env="test-env"), - call(["deploy"], "parameters", "s3_bucket", "managed-s3-bucket", env="test-env"), + call(["deploy"], "parameters", "resolve_s3", True, env="test-env"), call(["deploy"], "parameters", "s3_prefix", "sam-app", env="test-env"), call(["deploy"], "parameters", "region", "us-east-1", env="test-env"), call(["deploy"], "parameters", "confirm_changeset", True, env="test-env"), @@ -642,7 +794,7 @@ def test_all_args_guided_no_params_save_config( signing_profiles=self.signing_profiles, use_changeset=self.use_changeset, disable_rollback=True, - poll_delay=0.5, + poll_delay=5, on_failure=self.on_failure, ) @@ -655,7 +807,7 @@ def test_all_args_guided_no_params_save_config( MOCK_SAM_CONFIG.put.call_args_list, [ call(["deploy"], "parameters", "stack_name", "sam-app", env="test-env"), - call(["deploy"], "parameters", "s3_bucket", "managed-s3-bucket", env="test-env"), + call(["deploy"], "parameters", "resolve_s3", True, env="test-env"), call(["deploy"], "parameters", "s3_prefix", "sam-app", env="test-env"), call(["deploy"], "parameters", "region", "us-east-1", env="test-env"), call(["deploy"], "parameters", "confirm_changeset", True, env="test-env"), @@ -780,7 +932,7 @@ def test_all_args_guided_no_params_no_save_config( signing_profiles=self.signing_profiles, use_changeset=self.use_changeset, disable_rollback=self.disable_rollback, - poll_delay=0.5, + poll_delay=5, on_failure=self.on_failure, ) @@ -856,7 +1008,7 @@ def test_all_args_resolve_s3( signing_profiles=self.signing_profiles, use_changeset=self.use_changeset, disable_rollback=self.disable_rollback, - poll_delay=0.5, + poll_delay=5, on_failure=self.on_failure, ) @@ -971,7 +1123,7 @@ def test_all_args_resolve_image_repos( signing_profiles=self.signing_profiles, use_changeset=True, disable_rollback=self.disable_rollback, - poll_delay=0.5, + poll_delay=5, on_failure=self.on_failure, ) diff --git a/tests/unit/commands/package/test_package_context.py b/tests/unit/commands/package/test_package_context.py index 542187a6f9..682dfba17c 100644 --- a/tests/unit/commands/package/test_package_context.py +++ b/tests/unit/commands/package/test_package_context.py @@ -1,6 +1,7 @@ """Test sam package command""" from unittest import TestCase from unittest.mock import patch, MagicMock, Mock, call, ANY +from parameterized import parameterized import tempfile @@ -9,6 +10,7 @@ from samcli.lib.package.artifact_exporter import Template from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider from samcli.lib.samlib.resource_metadata_normalizer import ResourceMetadataNormalizer +from samcli.lib.utils.resources import AWS_LAMBDA_FUNCTION, AWS_SERVERLESS_FUNCTION class TestPackageCommand(TestCase): @@ -35,7 +37,8 @@ def setUp(self): def test_template_permissions_error(self, patched_boto, patched_get_stacks): patched_get_stacks.return_value = Mock(), Mock() with self.assertRaises(PackageFailedError): - self.package_command_context.run() + with patch.object(self.package_command_context, "_warn_preview_runtime") as patched_warn_preview_runtime: + self.package_command_context.run() @patch.object(ResourceMetadataNormalizer, "normalize", MagicMock()) @patch.object(Template, "export", MagicMock(return_value={})) @@ -104,6 +107,7 @@ def test_template_path_valid_no_json(self, patched_boto): ) package_command_context.run() + @patch("samcli.commands.package.package_context.PackageContext._warn_preview_runtime") @patch("samcli.commands.package.package_context.get_resource_full_path_by_id") @patch.object(SamLocalStackProvider, "get_stacks") @patch.object(Template, "export", MagicMock(return_value={})) @@ -117,6 +121,7 @@ def test_boto_clients_created_with_config( patched_boto_session, patched_get_stacks, patched_get_resource_full_path_by_id, + patched_warn_preview_runtime, ): patched_get_stacks.return_value = Mock(), Mock() patched_get_resource_full_path_by_id.return_value = None @@ -141,9 +146,44 @@ def test_boto_clients_created_with_config( patched_boto_client.assert_has_calls([call("s3", config=ANY)]) patched_boto_client.assert_has_calls([call("ecr", config=ANY)]) patched_boto_client.assert_has_calls([call("signer", config=ANY)]) + patched_warn_preview_runtime.assert_called_with(patched_get_stacks()[0]) patched_get_config.assert_has_calls( [call(region_name=ANY, signature_version=ANY), call(region_name=ANY), call(region_name=ANY)] ) - print("hello") + @parameterized.expand( + [ + ( + "preview_runtime", + True, + AWS_SERVERLESS_FUNCTION, + ), + ( + "ga_runtime", + False, + AWS_SERVERLESS_FUNCTION, + ), + ( + "preview_runtime", + True, + AWS_LAMBDA_FUNCTION, + ), + ( + "ga_runtime", + False, + AWS_LAMBDA_FUNCTION, + ), + ] + ) + @patch("samcli.commands.package.package_context.PREVIEW_RUNTIMES", {"preview_runtime"}) + @patch("samcli.commands.package.package_context.click") + def test_warn_preview_runtime(self, runtime, should_warn, function_type, patched_click): + resources = {"MyFunction": {"Type": function_type, "Properties": {"Runtime": runtime}}} + + self.package_command_context._warn_preview_runtime([Mock(resources=resources)]) + + if should_warn: + patched_click.secho.assert_called_once() + else: + patched_click.secho.assert_not_called() diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py index dbe5a99580..1ff1217138 100644 --- a/tests/unit/commands/samconfig/test_samconfig.py +++ b/tests/unit/commands/samconfig/test_samconfig.py @@ -988,6 +988,7 @@ def test_sync( (), (), True, + True, "mystack", "myregion", None, diff --git a/tests/unit/commands/sync/test_command.py b/tests/unit/commands/sync/test_command.py index 34041e8016..94f0dd118f 100644 --- a/tests/unit/commands/sync/test_command.py +++ b/tests/unit/commands/sync/test_command.py @@ -61,12 +61,11 @@ def setUp(self): @parameterized.expand( [ - # Reminder: Add back after sync infra skip ready for release - # (False, False, True, False, InfraSyncResult(False, {ResourceIdentifier("Function")})), - (False, False, True, False, InfraSyncResult(True)), - (False, False, False, False, InfraSyncResult(True)), - (False, False, True, True, InfraSyncResult(True)), - (False, False, False, True, InfraSyncResult(True)), + (False, False, True, True, False, InfraSyncResult(False, {ResourceIdentifier("Function")})), + (False, False, True, True, False, InfraSyncResult(True)), + (False, False, False, False, False, InfraSyncResult(True)), + (False, False, True, True, True, InfraSyncResult(True)), + (False, False, False, False, True, InfraSyncResult(True)), ] ) @patch("os.environ", {**os.environ, "SAM_CLI_POLL_DELAY": 10}) @@ -88,6 +87,7 @@ def test_infra_must_succeed_sync( code, watch, auto_dependency_layer, + skip_deploy_sync, use_container, infra_sync_result, check_enable_adl_mock, @@ -123,6 +123,7 @@ def test_infra_must_succeed_sync( self.resource_id, self.resource, auto_dependency_layer, + skip_deploy_sync, self.stack_name, self.region, self.profile, @@ -217,18 +218,21 @@ def test_infra_must_succeed_sync( if not infra_sync_result.infra_sync_executed: execute_code_sync_mock.assert_called_with( - ANY, - build_context_mock, - deploy_context_mock, - sync_context_mock, - ("Function",), - None, - auto_dependency_layer, + template=self.template_file, + build_context=build_context_mock, + deploy_context=deploy_context_mock, + sync_context=sync_context_mock, + resource_ids=["Function"], + resource_types=None, + auto_dependency_layer=auto_dependency_layer, + use_built_resources=True, ) else: execute_code_sync_mock.assert_not_called() - @parameterized.expand([(False, True, False, False), (False, True, False, True), (False, False, False, True)]) + @parameterized.expand( + [(False, True, False, True, False), (False, True, False, False, True), (False, False, False, False, True)] + ) @patch("samcli.commands.sync.command.click") @patch("samcli.commands.sync.command.execute_watch") @patch("samcli.commands.build.command.click") @@ -245,6 +249,7 @@ def test_watch_must_succeed_sync( code, watch, auto_dependency_layer, + skip_deploy_sync, use_container, SyncContextMock, manage_stack_mock, @@ -258,7 +263,7 @@ def test_watch_must_succeed_sync( execute_watch_mock, click_mock, ): - skip_infra_syncs = watch and code + disable_infra_syncs = watch and code build_context_mock = Mock() BuildContextMock.return_value.__enter__.return_value = build_context_mock package_context_mock = Mock() @@ -275,6 +280,7 @@ def test_watch_must_succeed_sync( self.resource_id, self.resource, auto_dependency_layer, + skip_deploy_sync, self.stack_name, self.region, self.profile, @@ -359,16 +365,16 @@ def test_watch_must_succeed_sync( on_failure=None, ) execute_watch_mock.assert_called_once_with( - self.template_file, - build_context_mock, - package_context_mock, - deploy_context_mock, - sync_context_mock, - auto_dependency_layer, - skip_infra_syncs, + template=self.template_file, + build_context=build_context_mock, + package_context=package_context_mock, + deploy_context=deploy_context_mock, + sync_context=sync_context_mock, + auto_dependency_layer=auto_dependency_layer, + disable_infra_syncs=disable_infra_syncs, ) - @parameterized.expand([(True, False, True, False), (True, False, False, True)]) + @parameterized.expand([(True, False, True, True, False), (True, False, False, False, True)]) @patch("samcli.commands.sync.command.click") @patch("samcli.commands.sync.command.execute_code_sync") @patch("samcli.commands.build.command.click") @@ -386,6 +392,7 @@ def test_code_must_succeed_sync( code, watch, auto_dependency_layer, + skip_deploy_sync, use_container, check_enable_adl_mock, SyncContextMock, @@ -418,6 +425,7 @@ def test_code_must_succeed_sync( self.resource_id, self.resource, auto_dependency_layer, + skip_deploy_sync, self.stack_name, self.region, self.profile, @@ -764,7 +772,7 @@ def test_execute_watch( watch_manager_mock, click_mock, ): - skip_infra_syncs = code + disable_infra_syncs = code execute_watch( self.template_file, self.build_context, @@ -772,7 +780,7 @@ def test_execute_watch( self.deploy_context, self.sync_context, auto_dependency_layer, - skip_infra_syncs, + disable_infra_syncs, ) watch_manager_mock.assert_called_once_with( @@ -782,7 +790,7 @@ def test_execute_watch( self.deploy_context, self.sync_context, auto_dependency_layer, - skip_infra_syncs, + disable_infra_syncs, ) watch_manager_mock.return_value.start.assert_called_once_with() diff --git a/tests/unit/commands/sync/test_sync_context.py b/tests/unit/commands/sync/test_sync_context.py index 2611377d37..9093228fa5 100644 --- a/tests/unit/commands/sync/test_sync_context.py +++ b/tests/unit/commands/sync/test_sync_context.py @@ -199,14 +199,17 @@ def test_none_toml_table_should_return_none(self): self.assertIsNone(_toml_document_to_sync_state(tomlkit.document())) -@parameterized_class([{"dependency_layer": True}, {"dependency_layer": False}]) +@parameterized_class( + [{"dependency_layer": True, "skip_deploy_sync": True}, {"dependency_layer": False, "skip_deploy_sync": False}] +) class TestSyncContext(TestCase): dependency_layer: bool + skip_deploy_sync: bool def setUp(self) -> None: self.build_dir = "build_dir" self.cache_dir = "cache_dir" - self.sync_context = SyncContext(self.dependency_layer, self.build_dir, self.cache_dir) + self.sync_context = SyncContext(self.dependency_layer, self.build_dir, self.cache_dir, self.skip_deploy_sync) @parameterized.expand([(True,), (False,)]) @patch("samcli.commands.sync.sync_context.rmtree_if_exists") diff --git a/tests/unit/lib/sync/test_infra_sync_executor.py b/tests/unit/lib/sync/test_infra_sync_executor.py index 438c152ea5..9142df482a 100644 --- a/tests/unit/lib/sync/test_infra_sync_executor.py +++ b/tests/unit/lib/sync/test_infra_sync_executor.py @@ -1,9 +1,11 @@ +from pathlib import Path from unittest import TestCase -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock, patch, call from samcli.lib.providers.provider import ResourceIdentifier from samcli.lib.sync.infra_sync_executor import datetime, InfraSyncExecutor from botocore.exceptions import ClientError from parameterized import parameterized +from samcli.lib.telemetry.event import Event, EventTracker class TestInfraSyncExecutor(TestCase): @@ -12,6 +14,7 @@ def setUp(self): self.package_context = MagicMock() self.deploy_context = MagicMock() self.sync_context = MagicMock() + EventTracker.clear_trackers() @parameterized.expand([(True,), (False,)]) @patch("samcli.lib.sync.infra_sync_executor.InfraSyncExecutor._auto_skip_infra_sync") @@ -20,12 +23,12 @@ def setUp(self): def test_execute_infra_sync(self, auto_skip_infra_sync, datetime_mock, session_mock, auto_skip_infra_sync_mock): datetime_mock.utcnow.return_value = datetime(2023, 2, 8, 12, 12, 12) last_infra_sync_time = datetime(2023, 2, 4, 12, 12, 12) + self.sync_context.skip_deploy_sync = True self.sync_context.get_latest_infra_sync_time.return_value = last_infra_sync_time infra_sync_executor = InfraSyncExecutor( self.build_context, self.package_context, self.deploy_context, self.sync_context ) auto_skip_infra_sync_mock.return_value = auto_skip_infra_sync - self.sync_context.get_latest_infra_sync_time.return_value = datetime.utcnow() infra_sync_result = infra_sync_executor.execute_infra_sync(True) @@ -40,9 +43,16 @@ def test_execute_infra_sync(self, auto_skip_infra_sync, datetime_mock, session_m self.deploy_context.run.assert_called_once() self.sync_context.update_infra_sync_time.assert_called_once() self.assertEqual(code_sync_resources, set()) + self.assertEqual(len(EventTracker.get_tracked_events()), 3) + self.assertIn(Event("SyncFlowStart", "SkipInfraSyncExecute"), EventTracker.get_tracked_events()) + self.assertIn(Event("SyncFlowStart", "InfraSyncExecute"), EventTracker.get_tracked_events()) + self.assertIn(Event("SyncFlowEnd", "InfraSyncExecute"), EventTracker.get_tracked_events()) + else: + self.assertEqual(len(EventTracker.get_tracked_events()), 2) + self.assertIn(Event("SyncFlowStart", "SkipInfraSyncExecute"), EventTracker.get_tracked_events()) + self.assertIn(Event("SyncFlowEnd", "SkipInfraSyncExecute"), EventTracker.get_tracked_events()) - # Reminder: Add back after sync infra skip ready for release - # self.assertEqual(executed, not auto_skip_infra_sync) + self.assertEqual(executed, not auto_skip_infra_sync) @patch("samcli.lib.sync.infra_sync_executor.InfraSyncExecutor._auto_skip_infra_sync") @patch("samcli.lib.sync.infra_sync_executor.Session") @@ -50,12 +60,11 @@ def test_execute_infra_sync(self, auto_skip_infra_sync, datetime_mock, session_m def test_7_days_auto_execute_infra_sync(self, datetime_mock, session_mock, auto_skip_infra_sync_mock): datetime_mock.utcnow.return_value = datetime(2023, 2, 8, 12, 12, 12) last_infra_sync_time = datetime(2023, 1, 31, 12, 12, 12) + self.sync_context.skip_deploy_sync = True self.sync_context.get_latest_infra_sync_time.return_value = last_infra_sync_time infra_sync_executor = InfraSyncExecutor( self.build_context, self.package_context, self.deploy_context, self.sync_context ) - auto_skip_infra_sync_mock.return_value = False - self.sync_context.get_latest_infra_sync_time.return_value = datetime.utcnow() infra_sync_result = infra_sync_executor.execute_infra_sync(True) executed = infra_sync_result.infra_sync_executed @@ -70,6 +79,10 @@ def test_7_days_auto_execute_infra_sync(self, datetime_mock, session_mock, auto_ self.sync_context.update_infra_sync_time.assert_called_once() self.assertEqual(code_sync_resources, set()) + self.assertEqual(len(EventTracker.get_tracked_events()), 2) + self.assertIn(Event("SyncFlowStart", "InfraSyncExecute"), EventTracker.get_tracked_events()) + self.assertIn(Event("SyncFlowEnd", "InfraSyncExecute"), EventTracker.get_tracked_events()) + @patch("samcli.lib.sync.infra_sync_executor.SYNC_FLOW_THRESHOLD", 1) @patch("samcli.lib.sync.infra_sync_executor.InfraSyncExecutor._auto_skip_infra_sync") @patch("samcli.lib.sync.infra_sync_executor.Session") @@ -77,6 +90,7 @@ def test_7_days_auto_execute_infra_sync(self, datetime_mock, session_mock, auto_ def test_execute_infra_sync_exceed_threshold(self, datetime_mock, session_mock, auto_skip_infra_sync_mock): datetime_mock.utcnow.return_value = datetime(2023, 2, 8, 12, 12, 12) last_infra_sync_time = datetime(2023, 2, 4, 12, 12, 12) + self.sync_context.skip_deploy_sync = True self.sync_context.get_latest_infra_sync_time.return_value = last_infra_sync_time infra_sync_executor = InfraSyncExecutor( self.build_context, self.package_context, self.deploy_context, self.sync_context @@ -287,7 +301,10 @@ def test_auto_skip_infra_sync_all_resources(self, session_mock, get_template_moc def test_auto_skip_infra_sync_nested_stack(self, session_mock, get_template_mock, local_path_mock): built_template_dict = { "Resources": { - "ServerlessApplication": {"Type": "AWS::Serverless::Application", "Properties": {"Location": "local/"}}, + "ServerlessApplication": { + "Type": "AWS::Serverless::Application", + "Properties": {"Location": str(Path("local") / "template.yaml")}, + }, } } @@ -302,7 +319,7 @@ def test_auto_skip_infra_sync_nested_stack(self, session_mock, get_template_mock built_nested_dict = { "Resources": { - "ServerlessFunction": {"Type": "AWS::Serverless::Function", "Properties": {"CodeUri": "local/"}} + "ServerlessFunction": {"Type": "AWS::Serverless::Function", "Properties": {"CodeUri": "function/"}} } } @@ -320,16 +337,20 @@ def test_auto_skip_infra_sync_nested_stack(self, session_mock, get_template_mock ) infra_sync_executor._cfn_client.get_template.side_effect = [ { - "TemplateBody": """{ - "Resources": { - "ServerlessApplication": {"Type": "AWS::Serverless::Application", "Properties": {"Location": "local/"}} - } - }""" + "TemplateBody": f"""{{ + "Resources": {{ + "ServerlessApplication": {{ + "Type": "AWS::Serverless::Application", + "Properties": {{"Location": "{str(Path("local") / "template.yaml")}"}} }} + }} + }}""" }, { "TemplateBody": """{ "Resources": { - "ServerlessFunction": {"Type": "AWS::Serverless::Function", "Properties": {"CodeUri": "local/"}} + "ServerlessFunction": { + "Type": "AWS::Serverless::Function", + "Properties": {"CodeUri": "function/"}} } }""" }, @@ -342,7 +363,20 @@ def test_auto_skip_infra_sync_nested_stack(self, session_mock, get_template_mock with patch("botocore.response.StreamingBody") as stream_mock: stream_mock.read.return_value = packaged_nested_dict.encode("utf-8") infra_sync_executor._s3_client.get_object.return_value = {"Body": stream_mock} - self.assertTrue(infra_sync_executor._auto_skip_infra_sync("path", "path", "stack_name")) + self.assertTrue( + infra_sync_executor._auto_skip_infra_sync( + str(Path("path") / "packaged-template.yaml"), + str(Path("path") / "built-template.yaml"), + "stack_name", + ) + ) + get_template_mock.assert_has_calls( + [ + call(str(Path("path") / "packaged-template.yaml")), + call(str(Path("path") / "built-template.yaml")), + call(str(Path("path") / "local/template.yaml")), + ] + ) self.assertEqual( infra_sync_executor.code_sync_resources, {ResourceIdentifier("ServerlessApplication/ServerlessFunction")}, @@ -399,10 +433,9 @@ def test_auto_skip_infra_sync_nested_stack_with_sar( self.assertEqual(infra_sync_executor._auto_skip_infra_sync("path", "path2", "stack_name"), expected_result) self.assertEqual(infra_sync_executor.code_sync_resources, set()) - @patch("samcli.lib.sync.infra_sync_executor.is_local_path") @patch("samcli.lib.sync.infra_sync_executor.get_template_data") @patch("samcli.lib.sync.infra_sync_executor.Session") - def test_auto_skip_infra_sync_http_template_location(self, session_mock, get_template_mock, local_path_mock): + def test_auto_skip_infra_sync_http_template_location(self, session_mock, get_template_mock): built_template_dict = { "Resources": { "NestedStack": { @@ -428,7 +461,6 @@ def test_auto_skip_infra_sync_http_template_location(self, session_mock, get_tem }""" get_template_mock.side_effect = [packaged_template_dict, built_template_dict] - local_path_mock.return_value = True infra_sync_executor = InfraSyncExecutor( self.build_context, self.package_context, self.deploy_context, self.sync_context diff --git a/tests/unit/lib/sync/test_sync_flow.py b/tests/unit/lib/sync/test_sync_flow.py index 3d14344004..290957f6c8 100644 --- a/tests/unit/lib/sync/test_sync_flow.py +++ b/tests/unit/lib/sync/test_sync_flow.py @@ -13,6 +13,15 @@ ) from parameterized import parameterized +from samcli.lib.utils.resources import ( + AWS_SERVERLESS_HTTPAPI, + AWS_SERVERLESS_API, + AWS_SERVERLESS_STATEMACHINE, + AWS_APIGATEWAY_RESTAPI, + AWS_APIGATEWAY_V2_API, + AWS_STEPFUNCTIONS_STATEMACHINE, +) + class TestSyncFlow(TestCase): def create_sync_flow(self, mock_update_local_hash=True): @@ -220,49 +229,69 @@ def test_hash(self): sync_flow._equality_keys.return_value = "A" self.assertEqual(hash(sync_flow), hash((type(sync_flow), "A"))) + @parameterized.expand( + [ + (AWS_SERVERLESS_HTTPAPI, "DefinitionUri"), + (AWS_SERVERLESS_API, "DefinitionUri"), + (AWS_SERVERLESS_STATEMACHINE, "DefinitionUri"), + (AWS_APIGATEWAY_V2_API, "BodyS3Location"), + (AWS_APIGATEWAY_RESTAPI, "BodyS3Location"), + (AWS_STEPFUNCTIONS_STATEMACHINE, "DefinitionS3Location"), + ] + ) @patch("samcli.lib.sync.sync_flow.Stack.get_stack_by_full_path") - def test_get_definition_path(self, get_stack_mock): - resource = {"Properties": {"DefinitionUri": "test_uri"}} + def test_get_definition_path(self, resource_type, definition_field, get_stack_mock): + resource = {"Properties": {definition_field: "test_uri"}, "Type": resource_type} get_stack_mock.return_value = Stack("parent_path", "stack_name", "location/template.yaml", None, {}) definition_path = get_definition_path(resource, "identifier", False, "base_dir", []) self.assertEqual(definition_path, Path("location").joinpath("test_uri")) - resource = {"Properties": {"DefinitionUri": ""}} + resource = {"Properties": {definition_field: ""}, "Type": resource_type} definition_path = get_definition_path(resource, "identifier", False, "base_dir", []) self.assertEqual(definition_path, None) - def test_get_definition_file_with_base_dir(self): - resource = {"Properties": {"DefinitionUri": "test_uri"}} + @parameterized.expand( + [ + (AWS_SERVERLESS_HTTPAPI, "DefinitionUri"), + (AWS_SERVERLESS_API, "DefinitionUri"), + (AWS_SERVERLESS_STATEMACHINE, "DefinitionUri"), + (AWS_APIGATEWAY_V2_API, "BodyS3Location"), + (AWS_APIGATEWAY_RESTAPI, "BodyS3Location"), + (AWS_STEPFUNCTIONS_STATEMACHINE, "DefinitionS3Location"), + ] + ) + def test_get_definition_file_with_base_dir(self, resource_type, definition_field): + resource = {"Properties": {definition_field: "test_uri"}, "Type": resource_type} definition_path = get_definition_path(resource, "identifier", True, "base_dir", []) self.assertEqual(definition_path, Path("base_dir").joinpath("test_uri")) - # Reminder: Add back after sync infra skip ready for release - # @patch("samcli.lib.sync.sync_flow.Session") - # @patch.multiple(SyncFlow, __abstractmethods__=set()) - # def test_compare_local(self, patched_session): - # sync_flow = SyncFlow( - # build_context=MagicMock(), - # deploy_context=MagicMock(), - # sync_context=MagicMock(), - # physical_id_mapping={}, - # log_name="log-name", - # stacks=[MagicMock()], - # ) - # sync_flow.gather_resources = MagicMock() - # sync_flow.compare_remote = MagicMock() - # sync_flow.sync = MagicMock() - # sync_flow.gather_dependencies = MagicMock() - # sync_flow._get_resource_api_calls = MagicMock() - - # sync_flow._local_sha = None - # self.assertEqual(sync_flow.compare_local(), False) - - # sync_flow._local_sha = "hash" - - # sync_flow._sync_context.get_resource_latest_sync_hash.return_value = None - # self.assertEqual(sync_flow.compare_local(), False) - - # sync_flow._sync_context.get_resource_latest_sync_hash.return_value = "hash" - # self.assertEqual(sync_flow.compare_local(), True) + @patch("samcli.lib.sync.sync_flow.Session") + @patch("samcli.lib.sync.sync_flow.SyncFlow.sync_state_identifier", new_callable=PropertyMock) + @patch.multiple(SyncFlow, __abstractmethods__=set()) + def test_compare_local(self, patched_session, patched_sync_state_identifier): + sync_flow = SyncFlow( + build_context=MagicMock(), + deploy_context=MagicMock(), + sync_context=MagicMock(), + physical_id_mapping={}, + log_name="log-name", + stacks=[MagicMock()], + ) + sync_flow.gather_resources = MagicMock() + sync_flow.compare_remote = MagicMock() + sync_flow.sync = MagicMock() + sync_flow.gather_dependencies = MagicMock() + sync_flow._get_resource_api_calls = MagicMock() + + sync_flow._local_sha = None + self.assertEqual(sync_flow.compare_local(), False) + + sync_flow._local_sha = "hash" + + sync_flow._sync_context.get_resource_latest_sync_hash.return_value = None + self.assertEqual(sync_flow.compare_local(), False) + + sync_flow._sync_context.get_resource_latest_sync_hash.return_value = "hash" + self.assertEqual(sync_flow.compare_local(), True) diff --git a/tests/unit/lib/sync/test_watch_manager.py b/tests/unit/lib/sync/test_watch_manager.py index e3e72520a4..5c46eceab4 100644 --- a/tests/unit/lib/sync/test_watch_manager.py +++ b/tests/unit/lib/sync/test_watch_manager.py @@ -276,7 +276,7 @@ def test_start_code_only(self, sleep_mock): self.watch_manager._add_code_triggers = add_code_trigger_mock self.watch_manager._start_code_sync = start_code_sync_mock - self.watch_manager._skip_infra_syncs = True + self.watch_manager._disable_infra_syncs = True with self.assertRaises(KeyboardInterrupt): self.watch_manager._start() @@ -295,7 +295,7 @@ def test_start_code_only(self, sleep_mock): self.path_observer.start.assert_called_once_with() def test_start_code_only_infra_sync_not_set(self): - self.watch_manager._skip_infra_syncs = True + self.watch_manager._disable_infra_syncs = True self.watch_manager.queue_infra_sync() self.assertFalse(self.watch_manager._waiting_infra_sync) diff --git a/tests/unit/local/docker/test_lambda_container.py b/tests/unit/local/docker/test_lambda_container.py index a721df44e3..fff20870e6 100644 --- a/tests/unit/local/docker/test_lambda_container.py +++ b/tests/unit/local/docker/test_lambda_container.py @@ -22,6 +22,7 @@ Runtime.python37.value, Runtime.python38.value, Runtime.python39.value, + Runtime.python310.value, Runtime.dotnet6.value, ] diff --git a/tests/unit/local/docker/test_lambda_debug_settings.py b/tests/unit/local/docker/test_lambda_debug_settings.py index 7c408a175a..ef58585fee 100644 --- a/tests/unit/local/docker/test_lambda_debug_settings.py +++ b/tests/unit/local/docker/test_lambda_debug_settings.py @@ -19,6 +19,7 @@ Runtime.python37, Runtime.python38, Runtime.python39, + Runtime.python310, ]