From 825003e32d3dceeb40bacdb4d92d15a5a5573fe8 Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Sun, 14 Oct 2018 15:36:53 -0700 Subject: [PATCH 01/74] Release 1.5.0 (#668) --- CHANGELOG.md | 14 ++++++++++++++ setup.py | 2 +- stacker/__init__.py | 2 +- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f7ea2f23f..380466a1d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ ## Upcoming release +## 1.5.0 (2018-10-14) + +The big feature in this release is the introduction of "targets" which act as +sort of "virtual nodes" in the graph. It provides a nice way to logically group +stacks. + +- Add support for "targets" [GH-572] +- Fix non-interactive changeset updates w/ stack policies [GH-657] +- Fix interactive_update_stack calls with empty string parameters [GH-658] +- Fix KMS unicode lookup in python 2 [GH-659] +- Locked stacks have no dependencies [GH-661] +- Set default profile earlier [GH-662] +- Get rid of recursion for tail retries and extend retry/timeout [GH-663] + ## 1.4.1 (2018-08-28) This is a minor bugfix release for 1.4.0, no major feature updates. diff --git a/setup.py b/setup.py index 2be72566d..1ed5d834b 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages -VERSION = "1.4.1" +VERSION = "1.5.0" src_dir = os.path.dirname(__file__) diff --git a/stacker/__init__.py b/stacker/__init__.py index c93dd60fe..aa813611b 100644 --- a/stacker/__init__.py +++ b/stacker/__init__.py @@ -2,4 +2,4 @@ from __future__ import division from __future__ import absolute_import -__version__ = "1.4.1" +__version__ = "1.5.0" From 749054c19c1dec5fd329f7c93f093185d074d8b8 Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Thu, 18 Oct 2018 17:00:28 -0700 Subject: [PATCH 02/74] Ensure output of ssmstore is the appropriate type of string (#670) --- stacker/lookups/handlers/ssmstore.py | 4 +++- stacker/tests/lookups/handlers/test_ssmstore.py | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/stacker/lookups/handlers/ssmstore.py b/stacker/lookups/handlers/ssmstore.py index 213168a55..0490eb592 100644 --- a/stacker/lookups/handlers/ssmstore.py +++ b/stacker/lookups/handlers/ssmstore.py @@ -1,6 +1,8 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import +from builtins import str + from stacker.session_cache import get_session from ...util import read_value_from_path @@ -51,7 +53,7 @@ def handler(value, **kwargs): WithDecryption=True ) if 'Parameters' in response: - return response['Parameters'][0]['Value'] + return str(response['Parameters'][0]['Value']) raise ValueError('SSMKey "{}" does not exist in region {}'.format(value, region)) diff --git a/stacker/tests/lookups/handlers/test_ssmstore.py b/stacker/tests/lookups/handlers/test_ssmstore.py index 7622bc25a..020f79772 100644 --- a/stacker/tests/lookups/handlers/test_ssmstore.py +++ b/stacker/tests/lookups/handlers/test_ssmstore.py @@ -1,6 +1,7 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import +from builtins import str import unittest import mock from botocore.stub import Stubber @@ -47,6 +48,7 @@ def test_ssmstore_handler(self, mock_client): with self.stubber: value = handler(self.ssmkey) self.assertEqual(value, self.ssmvalue) + self.assertIsInstance(value, str) @mock.patch('stacker.lookups.handlers.ssmstore.get_session', return_value=SessionStub(client)) From ba54836482be5992426ca6263d604e6c2d99f3fb Mon Sep 17 00:00:00 2001 From: "Eric J. Holmes" Date: Thu, 1 Nov 2018 18:18:53 -0700 Subject: [PATCH 03/74] Ignore some rediculous rules in newer version of flake8 --- .circleci/config.yml | 1 + Makefile | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index baf6e0744..a5ce441d6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -40,6 +40,7 @@ jobs: - checkout - run: sudo pip install flake8 codecov pep8-naming flake8-future-import - run: sudo python setup.py install + - run: flake8 --version - run: sudo make lint unit-test-27: diff --git a/Makefile b/Makefile index ace8e28ed..ba04a3a02 100644 --- a/Makefile +++ b/Makefile @@ -4,8 +4,8 @@ build: docker build -t remind101/stacker . lint: - flake8 --require-code --min-version=2.7 --ignore FI50,FI51,FI53,FI14,E402 --exclude stacker/tests/ stacker - flake8 --require-code --min-version=2.7 --ignore FI50,FI51,FI53,FI14,E402,N802 stacker/tests # ignore setUp naming + flake8 --require-code --min-version=2.7 --ignore FI50,FI51,FI53,FI14,E402,W503,W504,W605 --exclude stacker/tests/ stacker + flake8 --require-code --min-version=2.7 --ignore FI50,FI51,FI53,FI14,E402,N802,W605 stacker/tests # ignore setUp naming test-unit: clean AWS_DEFAULT_REGION=us-east-1 python setup.py nosetests From 36558a77a887413da2df0285710549db511c159e Mon Sep 17 00:00:00 2001 From: Craig Davis Date: Sun, 4 Nov 2018 09:39:00 -0600 Subject: [PATCH 04/74] By making it all lowercase the useable options become Y/y, v/V and anything else skips (#674) --- stacker/providers/aws/default.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stacker/providers/aws/default.py b/stacker/providers/aws/default.py index f6824e14c..dd4d16a22 100644 --- a/stacker/providers/aws/default.py +++ b/stacker/providers/aws/default.py @@ -162,7 +162,7 @@ def ask_for_approval(full_changeset=None, params_diff=None, approval_options.append('v') approve = ui.ask("Execute the above changes? [{}] ".format( - '/'.join(approval_options))) + '/'.join(approval_options))).lower() if include_verbose and approve == "v": if params_diff: From 16921c9bfc29076f9f3aae29ee975bfd7aef7547 Mon Sep 17 00:00:00 2001 From: Troy Ready Date: Sun, 4 Nov 2018 07:39:17 -0800 Subject: [PATCH 05/74] add '?' to lookup regex (#676) Filters passed into the ami lookup may include question marks --- stacker/lookups/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stacker/lookups/__init__.py b/stacker/lookups/__init__.py index fddaeb89c..4db0bb04f 100644 --- a/stacker/lookups/__init__.py +++ b/stacker/lookups/__init__.py @@ -19,7 +19,7 @@ # space ?\s* # any number of spaces separating the # type from the input -(?P[@\+\/,\._\-a-zA-Z0-9\:\s=\[\]\*]+) # the input value to the lookup +(?P[@\+\/,\.\?_\-a-zA-Z0-9\:\s=\[\]\*]+) # the input value to the lookup )\} # closing brace of the lookup """, re.VERBOSE) From ce1d88b15450daa6799a4891793b382bcac04455 Mon Sep 17 00:00:00 2001 From: Troy Ready Date: Sun, 4 Nov 2018 07:40:19 -0800 Subject: [PATCH 06/74] add local package sources (#677) --- docs/config.rst | 10 ++++++++-- stacker/config/__init__.py | 9 +++++++++ stacker/util.py | 27 +++++++++++++++++++++++---- 3 files changed, 40 insertions(+), 6 deletions(-) diff --git a/docs/config.rst b/docs/config.rst index a4120c2be..3e57af61e 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -156,8 +156,14 @@ For ``.tar.gz`` & ``zip`` archives on s3, specify a ``bucket`` & ``key``:: # last modified date on S3 changes use_latest: false -Use the ``paths`` option when subdirectories of the repo/archive should be -added to Stacker's ``sys.path``. +Local directories can also be specified:: + + package_sources: + local: + - source: ../vpc + +Use the ``paths`` option when subdirectories of the repo/archive/directory +should be added to Stacker's ``sys.path``. Cloned repos/archives will be cached between builds; the cache location defaults to ~/.stacker but can be manually specified via the **stacker_cache_dir** top diff --git a/stacker/config/__init__.py b/stacker/config/__init__.py index becb97d44..be53e2d97 100644 --- a/stacker/config/__init__.py +++ b/stacker/config/__init__.py @@ -234,6 +234,13 @@ def not_empty_list(value): class AnyType(BaseType): pass +class LocalPackageSource(Model): + source = StringType(required=True) + + paths = ListType(StringType, serialize_when_none=False) + + configs = ListType(StringType, serialize_when_none=False) + class GitPackageSource(Model): uri = StringType(required=True) @@ -264,6 +271,8 @@ class S3PackageSource(Model): class PackageSources(Model): + local = ListType(ModelType(LocalPackageSource)) + git = ListType(ModelType(GitPackageSource)) s3 = ListType(ModelType(S3PackageSource)) diff --git a/stacker/util.py b/stacker/util.py index 92ccf562c..d14242eba 100644 --- a/stacker/util.py +++ b/stacker/util.py @@ -628,6 +628,9 @@ def create_cache_directories(self): def get_package_sources(self): """Make remote python packages available for local use.""" + # Checkout local modules + for config in self.sources.get('local', []): + self.fetch_local_package(config=config) # Checkout S3 repositories specified in config for config in self.sources.get('s3', []): self.fetch_s3_package(config=config) @@ -635,6 +638,18 @@ def get_package_sources(self): for config in self.sources.get('git', []): self.fetch_git_package(config=config) + def fetch_local_package(self, config): + """Make a local path available to current stacker config. + + Args: + config (dict): 'local' path config dictionary + + """ + # Update sys.path & merge in remote configs (if necessary) + self.update_paths_and_config(config=config, + pkg_dir_name=config['source'], + pkg_cache_dir=os.getcwd()) + def fetch_s3_package(self, config): """Make a remote S3 archive available for local use. @@ -773,21 +788,25 @@ def fetch_git_package(self, config): self.update_paths_and_config(config=config, pkg_dir_name=dir_name) - def update_paths_and_config(self, config, pkg_dir_name): + def update_paths_and_config(self, config, pkg_dir_name, + pkg_cache_dir=None): """Handle remote source defined sys.paths & configs. Args: config (dict): git config dictionary pkg_dir_name (string): directory name of the stacker archive + pkg_cache_dir (string): fully qualified path to stacker cache + cache directory """ - cached_dir_path = os.path.join(self.package_cache_dir, pkg_dir_name) + if pkg_cache_dir is None: + pkg_cache_dir = self.package_cache_dir + cached_dir_path = os.path.join(pkg_cache_dir, pkg_dir_name) # Add the appropriate directory (or directories) to sys.path if config.get('paths'): for path in config['paths']: - path_to_append = os.path.join(self.package_cache_dir, - pkg_dir_name, + path_to_append = os.path.join(cached_dir_path, path) logger.debug("Appending \"%s\" to python sys.path", path_to_append) From a82836fe93924ac2c09b8809fdbc38b4bc1b6ffd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kai=20Xia=28=E5=A4=8F=E6=81=BA=29?= Date: Mon, 19 Nov 2018 10:36:07 +1100 Subject: [PATCH 07/74] unlock versions as moto/issues/1793 is resolved. (#680) Signed-off-by: Kai Xia --- setup.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/setup.py b/setup.py index 1ed5d834b..a706e8ba5 100644 --- a/setup.py +++ b/setup.py @@ -9,11 +9,8 @@ install_requires = [ "future", "troposphere>=1.9.0", - # pinning needed till https://github.com/spulec/moto/issues/1793 is - # resolved - "botocore<1.11.0", - "boto3>=1.7.0,<1.8.0", - ## + "botocore>=1.6.0", + "boto3>=1.3.1", "PyYAML>=3.12", "awacs>=0.6.0", "gitpython>=2.0,<3.0", @@ -23,10 +20,6 @@ ] tests_require = [ - # pinning needed till https://github.com/spulec/moto/issues/1793 is - # resolved - "aws-xray-sdk==1.1.2", - ## "mock~=2.0.0", "moto~=1.1.24", "testfixtures~=4.10.0", From bcf488026aa8eb0a89af139971f0513bbb7630b9 Mon Sep 17 00:00:00 2001 From: Craig Davis Date: Sun, 18 Nov 2018 17:37:26 -0600 Subject: [PATCH 08/74] Bugfix for instances where the uri is not a number. (#681) --- stacker/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stacker/util.py b/stacker/util.py index d14242eba..4f95a52f6 100644 --- a/stacker/util.py +++ b/stacker/util.py @@ -842,7 +842,7 @@ def git_ls_remote(self, uri, ref): logger.debug("Matching commit id found: %s", commit_id) return commit_id else: - raise ValueError("Ref \"%s\" not found for repo %d." % (ref, uri)) + raise ValueError("Ref \"%s\" not found for repo %s." % (ref, uri)) def determine_git_ls_remote_ref(self, config): """Determine the ref to be used with the "git ls-remote" command. From a2e386686ec771165be16c951029875939eb7918 Mon Sep 17 00:00:00 2001 From: Niels Laukens Date: Mon, 19 Nov 2018 00:42:47 +0100 Subject: [PATCH 09/74] Make lookup-logic more generic (#665) * Rewrote Lookup-parser The new parser will build the entire AST to support nested lookups. * Move dependency injection of ${output} to the lookup itself * Addressed comments * Removed dead code * Fix lint warnings * Fix lint errors after master merge * Fix lint error (unused exception) * Add warning when using old style lookups * Convert lookups to new style * Reformat code to fix linting errors --- stacker/config/__init__.py | 1 + stacker/config/translators/kms.py | 4 +- stacker/exceptions.py | 42 +- stacker/lookups/handlers/__init__.py | 34 ++ stacker/lookups/handlers/ami.py | 151 +++--- stacker/lookups/handlers/default.py | 48 +- stacker/lookups/handlers/dynamodb.py | 124 ++--- stacker/lookups/handlers/envvar.py | 41 +- stacker/lookups/handlers/file.py | 126 ++--- stacker/lookups/handlers/hook_data.py | 28 +- stacker/lookups/handlers/kms.py | 72 +-- stacker/lookups/handlers/output.py | 56 ++- stacker/lookups/handlers/rxref.py | 41 +- stacker/lookups/handlers/split.py | 44 +- stacker/lookups/handlers/ssmstore.py | 69 +-- stacker/lookups/handlers/xref.py | 35 +- stacker/lookups/registry.py | 41 +- stacker/stack.py | 28 +- stacker/tests/blueprints/test_base.py | 54 +- stacker/tests/lookups/handlers/test_ami.py | 14 +- .../tests/lookups/handlers/test_default.py | 16 +- .../tests/lookups/handlers/test_dynamodb.py | 18 +- stacker/tests/lookups/handlers/test_envvar.py | 6 +- stacker/tests/lookups/handlers/test_file.py | 22 +- .../tests/lookups/handlers/test_hook_data.py | 8 +- stacker/tests/lookups/handlers/test_kms.py | 6 +- stacker/tests/lookups/handlers/test_output.py | 5 +- stacker/tests/lookups/handlers/test_rxref.py | 7 +- stacker/tests/lookups/handlers/test_split.py | 8 +- .../tests/lookups/handlers/test_ssmstore.py | 8 +- stacker/tests/lookups/handlers/test_xref.py | 7 +- stacker/tests/lookups/test_registry.py | 38 +- stacker/tests/test_stack.py | 2 + stacker/tests/test_variables.py | 83 +--- stacker/variables.py | 463 ++++++++++++++---- 35 files changed, 1063 insertions(+), 687 deletions(-) diff --git a/stacker/config/__init__.py b/stacker/config/__init__.py index be53e2d97..96c09c359 100644 --- a/stacker/config/__init__.py +++ b/stacker/config/__init__.py @@ -234,6 +234,7 @@ def not_empty_list(value): class AnyType(BaseType): pass + class LocalPackageSource(Model): source = StringType(required=True) diff --git a/stacker/config/translators/kms.py b/stacker/config/translators/kms.py index ebe5083b1..9c2e1fe4d 100644 --- a/stacker/config/translators/kms.py +++ b/stacker/config/translators/kms.py @@ -2,9 +2,9 @@ from __future__ import division from __future__ import absolute_import # NOTE: The translator is going to be deprecated in favor of the lookup -from ...lookups.handlers.kms import handler +from ...lookups.handlers.kms import KmsLookup def kms_simple_constructor(loader, node): value = loader.construct_scalar(node) - return handler(value) + return KmsLookup.handler(value) diff --git a/stacker/exceptions.py b/stacker/exceptions.py index 1602528b6..e1ae8339f 100644 --- a/stacker/exceptions.py +++ b/stacker/exceptions.py @@ -15,17 +15,27 @@ def __init__(self, lookup, lookups, value, *args, **kwargs): message = ( "Lookup: \"{}\" has non-string return value, must be only lookup " "present (not {}) in \"{}\"" - ).format(lookup.raw, len(lookups), value) + ).format(str(lookup), len(lookups), value) super(InvalidLookupCombination, self).__init__(message, *args, **kwargs) +class InvalidLookupConcatenation(Exception): + """ + Intermediary Exception to be converted to InvalidLookupCombination once it + bubbles up there + """ + def __init__(self, lookup, lookups, *args, **kwargs): + self.lookup = lookup + self.lookups = lookups + super(InvalidLookupConcatenation, self).__init__("", *args, **kwargs) + + class UnknownLookupType(Exception): - def __init__(self, lookup, *args, **kwargs): - self.lookup = lookup - message = "Unknown lookup type: \"{}\"".format(lookup.type) + def __init__(self, lookup_type, *args, **kwargs): + message = "Unknown lookup type: \"{}\"".format(lookup_type) super(UnknownLookupType, self).__init__(message, *args, **kwargs) @@ -35,11 +45,22 @@ def __init__(self, variable_name, lookup, error, *args, **kwargs): self.lookup = lookup self.error = error message = "Couldn't resolve lookup in variable `%s`, " % variable_name - message += "lookup: ${%s}: " % lookup.raw + message += "lookup: ${%s}: " % repr(lookup) message += "(%s) %s" % (error.__class__, error) super(FailedVariableLookup, self).__init__(message, *args, **kwargs) +class FailedLookup(Exception): + """ + Intermediary Exception to be converted to FailedVariableLookup once it + bubbles up there + """ + def __init__(self, lookup, error, *args, **kwargs): + self.lookup = lookup + self.error = error + super(FailedLookup, self).__init__("Failed lookup", *args, **kwargs) + + class InvalidUserdataPlaceholder(Exception): def __init__(self, blueprint_name, exception_message, *args, **kwargs): @@ -70,6 +91,17 @@ def __init__(self, blueprint_name, variable, *args, **kwargs): super(UnresolvedVariable, self).__init__(message, *args, **kwargs) +class UnresolvedVariableValue(Exception): + """ + Intermediary Exception to be converted to UnresolvedVariable once it + bubbles up there + """ + def __init__(self, lookup, *args, **kwargs): + self.lookup = lookup + super(UnresolvedVariableValue, self).__init__( + "Unresolved lookup", *args, **kwargs) + + class MissingVariable(Exception): def __init__(self, blueprint_name, variable_name, *args, **kwargs): diff --git a/stacker/lookups/handlers/__init__.py b/stacker/lookups/handlers/__init__.py index e69de29bb..6b18bed59 100644 --- a/stacker/lookups/handlers/__init__.py +++ b/stacker/lookups/handlers/__init__.py @@ -0,0 +1,34 @@ +from __future__ import absolute_import +from __future__ import print_function +from __future__ import division + + +class LookupHandler(object): + @classmethod + def handle(cls, value, context, provider): + """ + Perform the actual lookup + + :param value: Parameter(s) given to this lookup + :type value: str + :param context: + :param provider: + :return: Looked-up value + :rtype: str + """ + raise NotImplementedError() + + @classmethod + def dependencies(cls, lookup_data): + """ + Calculate any dependencies required to perform this lookup. + + Note that lookup_data may not be (completely) resolved at this time. + + :param lookup_data: Parameter(s) given to this lookup + :type lookup_data VariableValue + :return: Set of stack names (str) this lookup depends on + :rtype: set + """ + del lookup_data # unused in this implementation + return set() diff --git a/stacker/lookups/handlers/ami.py b/stacker/lookups/handlers/ami.py index 1f6a0c58b..8d51c0619 100644 --- a/stacker/lookups/handlers/ami.py +++ b/stacker/lookups/handlers/ami.py @@ -5,6 +5,7 @@ import re import operator +from . import LookupHandler from ...util import read_value_from_path TYPE_NAME = "ami" @@ -19,76 +20,80 @@ def __init__(self, search_string): super(ImageNotFound, self).__init__(message) -def handler(value, provider, **kwargs): - """Fetch the most recent AMI Id using a filter - - For example: - - ${ami [@]owners:self,account,amazon name_regex:serverX-[0-9]+ architecture:x64,i386} - - The above fetches the most recent AMI where owner is self - account or amazon and the ami name matches the regex described, - the architecture will be either x64 or i386 - - You can also optionally specify the region in which to perform the AMI lookup. - - Valid arguments: - - owners (comma delimited) REQUIRED ONCE: - aws_account_id | amazon | self - - name_regex (a regex) REQUIRED ONCE: - e.g. my-ubuntu-server-[0-9]+ - - executable_users (comma delimited) OPTIONAL ONCE: - aws_account_id | amazon | self - - Any other arguments specified are sent as filters to the aws api - For example, "architecture:x86_64" will add a filter - """ # noqa - value = read_value_from_path(value) - - if "@" in value: - region, value = value.split("@", 1) - else: - region = provider.region - - ec2 = get_session(region).client('ec2') - - values = {} - describe_args = {} - - # now find any other arguments that can be filters - matches = re.findall('([0-9a-zA-z_-]+:[^\s$]+)', value) - for match in matches: - k, v = match.split(':', 1) - values[k] = v - - if not values.get('owners'): - raise Exception("'owners' value required when using ami") - owners = values.pop('owners').split(',') - describe_args["Owners"] = owners - - if not values.get('name_regex'): - raise Exception("'name_regex' value required when using ami") - name_regex = values.pop('name_regex') - - executable_users = None - if values.get('executable_users'): - executable_users = values.pop('executable_users').split(',') - describe_args["ExecutableUsers"] = executable_users - - filters = [] - for k, v in values.items(): - filters.append({"Name": k, "Values": v.split(',')}) - describe_args["Filters"] = filters - - result = ec2.describe_images(**describe_args) - - images = sorted(result['Images'], key=operator.itemgetter('CreationDate'), - reverse=True) - for image in images: - if re.match("^%s$" % name_regex, image['Name']): - return image['ImageId'] - - raise ImageNotFound(value) +class AmiLookup(LookupHandler): + @classmethod + def handle(cls, value, provider, **kwargs): + """Fetch the most recent AMI Id using a filter + + For example: + + ${ami [@]owners:self,account,amazon name_regex:serverX-[0-9]+ architecture:x64,i386} + + The above fetches the most recent AMI where owner is self + account or amazon and the ami name matches the regex described, + the architecture will be either x64 or i386 + + You can also optionally specify the region in which to perform the + AMI lookup. + + Valid arguments: + + owners (comma delimited) REQUIRED ONCE: + aws_account_id | amazon | self + + name_regex (a regex) REQUIRED ONCE: + e.g. my-ubuntu-server-[0-9]+ + + executable_users (comma delimited) OPTIONAL ONCE: + aws_account_id | amazon | self + + Any other arguments specified are sent as filters to the aws api + For example, "architecture:x86_64" will add a filter + """ # noqa + value = read_value_from_path(value) + + if "@" in value: + region, value = value.split("@", 1) + else: + region = provider.region + + ec2 = get_session(region).client('ec2') + + values = {} + describe_args = {} + + # now find any other arguments that can be filters + matches = re.findall('([0-9a-zA-z_-]+:[^\s$]+)', value) + for match in matches: + k, v = match.split(':', 1) + values[k] = v + + if not values.get('owners'): + raise Exception("'owners' value required when using ami") + owners = values.pop('owners').split(',') + describe_args["Owners"] = owners + + if not values.get('name_regex'): + raise Exception("'name_regex' value required when using ami") + name_regex = values.pop('name_regex') + + executable_users = None + if values.get('executable_users'): + executable_users = values.pop('executable_users').split(',') + describe_args["ExecutableUsers"] = executable_users + + filters = [] + for k, v in values.items(): + filters.append({"Name": k, "Values": v.split(',')}) + describe_args["Filters"] = filters + + result = ec2.describe_images(**describe_args) + + images = sorted(result['Images'], + key=operator.itemgetter('CreationDate'), + reverse=True) + for image in images: + if re.match("^%s$" % name_regex, image['Name']): + return image['ImageId'] + + raise ImageNotFound(value) diff --git a/stacker/lookups/handlers/default.py b/stacker/lookups/handlers/default.py index 860a33cc4..fc2b5c845 100644 --- a/stacker/lookups/handlers/default.py +++ b/stacker/lookups/handlers/default.py @@ -1,35 +1,41 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import + +from . import LookupHandler + + TYPE_NAME = "default" -def handler(value, **kwargs): - """Use a value from the environment or fall back to a default if the - environment doesn't contain the variable. +class DefaultLookup(LookupHandler): + @classmethod + def handle(cls, value, **kwargs): + """Use a value from the environment or fall back to a default if the + environment doesn't contain the variable. - Format of value: + Format of value: - :: + :: - For example: + For example: - Groups: ${default app_security_groups::sg-12345,sg-67890} + Groups: ${default app_security_groups::sg-12345,sg-67890} - If `app_security_groups` is defined in the environment, its defined value - will be returned. Otherwise, `sg-12345,sg-67890` will be the returned - value. + If `app_security_groups` is defined in the environment, its defined + value will be returned. Otherwise, `sg-12345,sg-67890` will be the + returned value. - This allows defaults to be set at the config file level. - """ + This allows defaults to be set at the config file level. + """ - try: - env_var_name, default_val = value.split("::", 1) - except ValueError: - raise ValueError("Invalid value for default: %s. Must be in " - ":: format." % value) + try: + env_var_name, default_val = value.split("::", 1) + except ValueError: + raise ValueError("Invalid value for default: %s. Must be in " + ":: format." % value) - if env_var_name in kwargs['context'].environment: - return kwargs['context'].environment[env_var_name] - else: - return default_val + if env_var_name in kwargs['context'].environment: + return kwargs['context'].environment[env_var_name] + else: + return default_val diff --git a/stacker/lookups/handlers/dynamodb.py b/stacker/lookups/handlers/dynamodb.py index 1789d30c6..9dcd97ce8 100644 --- a/stacker/lookups/handlers/dynamodb.py +++ b/stacker/lookups/handlers/dynamodb.py @@ -6,76 +6,80 @@ import re from stacker.session_cache import get_session +from . import LookupHandler from ...util import read_value_from_path TYPE_NAME = 'dynamodb' -def handler(value, **kwargs): - """Get a value from a dynamodb table +class DynamodbLookup(LookupHandler): + @classmethod + def handle(cls, value, **kwargs): + """Get a value from a dynamodb table - dynamodb field types should be in the following format: + dynamodb field types should be in the following format: - [:]@:.... + [:]@:.... - Note: The region is optional, and defaults to the environment's - `AWS_DEFAULT_REGION` if not specified. - """ - value = read_value_from_path(value) - table_info = None - table_keys = None - region = None - table_name = None - if '@' in value: - table_info, table_keys = value.split('@', 1) - if ':' in table_info: - region, table_name = table_info.split(':', 1) + Note: The region is optional, and defaults to the environment's + `AWS_DEFAULT_REGION` if not specified. + """ + value = read_value_from_path(value) + table_info = None + table_keys = None + region = None + table_name = None + if '@' in value: + table_info, table_keys = value.split('@', 1) + if ':' in table_info: + region, table_name = table_info.split(':', 1) + else: + table_name = table_info else: - table_name = table_info - else: - raise ValueError('Please make sure to include a tablename') - - if not table_name: - raise ValueError('Please make sure to include a dynamodb table name') - - table_lookup, table_keys = table_keys.split(':', 1) - - table_keys = table_keys.split('.') - - key_dict = _lookup_key_parse(table_keys) - new_keys = key_dict['new_keys'] - clean_table_keys = key_dict['clean_table_keys'] - - projection_expression = _build_projection_expression(clean_table_keys) - - # lookup the data from dynamodb - dynamodb = get_session(region).client('dynamodb') - try: - response = dynamodb.get_item( - TableName=table_name, - Key={ - table_lookup: new_keys[0] - }, - ProjectionExpression=projection_expression - ) - except ClientError as e: - if e.response['Error']['Code'] == 'ResourceNotFoundException': - raise ValueError( - 'Cannot find the dynamodb table: {}'.format(table_name)) - elif e.response['Error']['Code'] == 'ValidationException': - raise ValueError( - 'No dynamodb record matched the partition key: ' - '{}'.format(table_lookup)) + raise ValueError('Please make sure to include a tablename') + + if not table_name: + raise ValueError('Please make sure to include a dynamodb table ' + 'name') + + table_lookup, table_keys = table_keys.split(':', 1) + + table_keys = table_keys.split('.') + + key_dict = _lookup_key_parse(table_keys) + new_keys = key_dict['new_keys'] + clean_table_keys = key_dict['clean_table_keys'] + + projection_expression = _build_projection_expression(clean_table_keys) + + # lookup the data from dynamodb + dynamodb = get_session(region).client('dynamodb') + try: + response = dynamodb.get_item( + TableName=table_name, + Key={ + table_lookup: new_keys[0] + }, + ProjectionExpression=projection_expression + ) + except ClientError as e: + if e.response['Error']['Code'] == 'ResourceNotFoundException': + raise ValueError( + 'Cannot find the dynamodb table: {}'.format(table_name)) + elif e.response['Error']['Code'] == 'ValidationException': + raise ValueError( + 'No dynamodb record matched the partition key: ' + '{}'.format(table_lookup)) + else: + raise ValueError('The dynamodb lookup {} had an error: ' + '{}'.format(value, e)) + # find and return the key from the dynamo data returned + if 'Item' in response: + return (_get_val_from_ddb_data(response['Item'], new_keys[1:])) else: - raise ValueError('The dynamodb lookup {} had an error: ' - '{}'.format(value, e)) - # find and return the key from the dynamo data returned - if 'Item' in response: - return (_get_val_from_ddb_data(response['Item'], new_keys[1:])) - else: - raise ValueError( - 'The dynamodb record could not be found using the following ' - 'key: {}'.format(new_keys[0])) + raise ValueError( + 'The dynamodb record could not be found using the following ' + 'key: {}'.format(new_keys[0])) def _lookup_key_parse(table_keys): diff --git a/stacker/lookups/handlers/envvar.py b/stacker/lookups/handlers/envvar.py index 6551a63c2..a1d9ed5fd 100644 --- a/stacker/lookups/handlers/envvar.py +++ b/stacker/lookups/handlers/envvar.py @@ -3,35 +3,38 @@ from __future__ import absolute_import import os +from . import LookupHandler from ...util import read_value_from_path TYPE_NAME = "envvar" -def handler(value, **kwargs): - """Retrieve an environment variable. +class EnvvarLookup(LookupHandler): + @classmethod + def handle(cls, value, **kwargs): + """Retrieve an environment variable. - For example: + For example: - # In stacker we would reference the environment variable like this: - conf_key: ${envvar ENV_VAR_NAME} + # In stacker we would reference the environment variable like this: + conf_key: ${envvar ENV_VAR_NAME} - You can optionally store the value in a file, ie: + You can optionally store the value in a file, ie: - $ cat envvar_value.txt - ENV_VAR_NAME + $ cat envvar_value.txt + ENV_VAR_NAME - and reference it within stacker (NOTE: the path should be relative to - the stacker config file): + and reference it within stacker (NOTE: the path should be relative + to the stacker config file): - conf_key: ${envvar file://envvar_value.txt} + conf_key: ${envvar file://envvar_value.txt} - # Both of the above would resolve to - conf_key: ENV_VALUE - """ - value = read_value_from_path(value) + # Both of the above would resolve to + conf_key: ENV_VALUE + """ + value = read_value_from_path(value) - try: - return os.environ[value] - except KeyError: - raise ValueError('EnvVar "{}" does not exist'.format(value)) + try: + return os.environ[value] + except KeyError: + raise ValueError('EnvVar "{}" does not exist'.format(value)) diff --git a/stacker/lookups/handlers/file.py b/stacker/lookups/handlers/file.py index a57af6607..0eb87e74c 100644 --- a/stacker/lookups/handlers/file.py +++ b/stacker/lookups/handlers/file.py @@ -15,6 +15,7 @@ from troposphere import GenericHelperFn, Base64 +from . import LookupHandler from ...util import read_value_from_path @@ -23,93 +24,96 @@ _PARAMETER_PATTERN = re.compile(r'{{([::|\w]+)}}') -def handler(value, **kwargs): - """Translate a filename into the file contents. +class FileLookup(LookupHandler): + @classmethod + def handle(cls, value, **kwargs): + """Translate a filename into the file contents. - Fields should use the following format:: + Fields should use the following format:: - : + : - For example:: + For example:: - # We've written a file to /some/path: - $ echo "hello there" > /some/path + # We've written a file to /some/path: + $ echo "hello there" > /some/path - # In stacker we would reference the contents of this file with the - # following - conf_key: ${file plain:file://some/path} + # In stacker we would reference the contents of this file with the + # following + conf_key: ${file plain:file://some/path} - # The above would resolve to - conf_key: hello there + # The above would resolve to + conf_key: hello there - # Or, if we used wanted a base64 encoded copy of the file data - conf_key: ${file base64:file://some/path} + # Or, if we used wanted a base64 encoded copy of the file data + conf_key: ${file base64:file://some/path} - # The above would resolve to - conf_key: aGVsbG8gdGhlcmUK + # The above would resolve to + conf_key: aGVsbG8gdGhlcmUK - Supported codecs: + Supported codecs: - - plain + - plain - - base64 - encode the plain text file at the given path with base64 - prior to returning it + - base64 - encode the plain text file at the given path with base64 + prior to returning it - - parameterized - the same as plain, but additionally supports - referencing template parameters to create userdata that's - supplemented with information from the template, as is commonly - needed in EC2 UserData. For example, given a template parameter of - BucketName, the file could contain the following text:: + - parameterized - the same as plain, but additionally supports + referencing template parameters to create userdata that's + supplemented with information from the template, as is commonly + needed in EC2 UserData. For example, given a template parameter + of BucketName, the file could contain the following text:: - #!/bin/sh - aws s3 sync s3://{{BucketName}}/somepath /somepath + #!/bin/sh + aws s3 sync s3://{{BucketName}}/somepath /somepath - and then you could use something like this in the YAML config file:: + and then you could use something like this in the YAML config + file:: - UserData: ${file parameterized:/path/to/file} + UserData: ${file parameterized:/path/to/file} - resulting in the UserData parameter being defined as:: + resulting in the UserData parameter being defined as:: - { "Fn::Join" : ["", [ - "#!/bin/sh\\naws s3 sync s3://", - {"Ref" : "BucketName"}, - "/somepath /somepath" - ]] } + { "Fn::Join" : ["", [ + "#!/bin/sh\\naws s3 sync s3://", + {"Ref" : "BucketName"}, + "/somepath /somepath" + ]] } - - parameterized-b64 - the same as parameterized, with the results - additionally wrapped in *{ "Fn::Base64": ... }* , which is what you - actually need for EC2 UserData + - parameterized-b64 - the same as parameterized, with the results + additionally wrapped in *{ "Fn::Base64": ... }* , which is what + you actually need for EC2 UserData - When using parameterized-b64 for UserData, you should use a variable - defined as such: + When using parameterized-b64 for UserData, you should use a variable + defined as such: - .. code-block:: python + .. code-block:: python - from troposphere import AWSHelperFn + from troposphere import AWSHelperFn - "UserData": { - "type": AWSHelperFn, - "description": "Instance user data", - "default": Ref("AWS::NoValue") - } + "UserData": { + "type": AWSHelperFn, + "description": "Instance user data", + "default": Ref("AWS::NoValue") + } - and then assign UserData in a LaunchConfiguration or Instance to - *self.get_variables()["UserData"]*. Note that we use AWSHelperFn as the - type because the parameterized-b64 codec returns either a Base64 or a - GenericHelperFn troposphere object - """ + and then assign UserData in a LaunchConfiguration or Instance to + *self.get_variables()["UserData"]*. Note that we use AWSHelperFn as the + type because the parameterized-b64 codec returns either a Base64 or a + GenericHelperFn troposphere object + """ - try: - codec, path = value.split(":", 1) - except ValueError: - raise TypeError( - "File value must be of the format" - " \":\" (got %s)" % (value) - ) + try: + codec, path = value.split(":", 1) + except ValueError: + raise TypeError( + "File value must be of the format" + " \":\" (got %s)" % (value) + ) - value = read_value_from_path(path) + value = read_value_from_path(path) - return CODECS[codec](value) + return CODECS[codec](value) def _parameterize_string(raw): diff --git a/stacker/lookups/handlers/hook_data.py b/stacker/lookups/handlers/hook_data.py index 8aca70915..c27f65b93 100644 --- a/stacker/lookups/handlers/hook_data.py +++ b/stacker/lookups/handlers/hook_data.py @@ -1,20 +1,26 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import + +from . import LookupHandler + + TYPE_NAME = "hook_data" -def handler(value, context, **kwargs): - """Returns the value of a key for a given hook in hook_data. +class HookDataLookup(LookupHandler): + @classmethod + def handle(cls, value, context, **kwargs): + """Returns the value of a key for a given hook in hook_data. - Format of value: + Format of value: - :: - """ - try: - hook_name, key = value.split("::") - except ValueError: - raise ValueError("Invalid value for hook_data: %s. Must be in " - ":: format." % value) + :: + """ + try: + hook_name, key = value.split("::") + except ValueError: + raise ValueError("Invalid value for hook_data: %s. Must be in " + ":: format." % value) - return context.hook_data[hook_name][key] + return context.hook_data[hook_name][key] diff --git a/stacker/lookups/handlers/kms.py b/stacker/lookups/handlers/kms.py index b5f654d65..ba80d2779 100644 --- a/stacker/lookups/handlers/kms.py +++ b/stacker/lookups/handlers/kms.py @@ -4,60 +4,64 @@ import codecs from stacker.session_cache import get_session +from . import LookupHandler from ...util import read_value_from_path TYPE_NAME = "kms" -def handler(value, **kwargs): - """Decrypt the specified value with a master key in KMS. +class KmsLookup(LookupHandler): + @classmethod + def handle(cls, value, **kwargs): + """Decrypt the specified value with a master key in KMS. - kmssimple field types should be in the following format: + kmssimple field types should be in the following format: - [@] + [@] - Note: The region is optional, and defaults to the environment's - `AWS_DEFAULT_REGION` if not specified. + Note: The region is optional, and defaults to the environment's + `AWS_DEFAULT_REGION` if not specified. - For example: + For example: - # We use the aws cli to get the encrypted value for the string - # "PASSWORD" using the master key called "myStackerKey" in us-east-1 - $ aws --region us-east-1 kms encrypt --key-id alias/myStackerKey \ - --plaintext "PASSWORD" --output text --query CiphertextBlob + # We use the aws cli to get the encrypted value for the string + # "PASSWORD" using the master key called "myStackerKey" in + # us-east-1 + $ aws --region us-east-1 kms encrypt --key-id alias/myStackerKey \ + --plaintext "PASSWORD" --output text --query CiphertextBlob - CiD6bC8t2Y<...encrypted blob...> + CiD6bC8t2Y<...encrypted blob...> - # In stacker we would reference the encrypted value like: - conf_key: ${kms us-east-1@CiD6bC8t2Y<...encrypted blob...>} + # In stacker we would reference the encrypted value like: + conf_key: ${kms us-east-1@CiD6bC8t2Y<...encrypted blob...>} - You can optionally store the encrypted value in a file, ie: + You can optionally store the encrypted value in a file, ie: - kms_value.txt - us-east-1@CiD6bC8t2Y<...encrypted blob...> + kms_value.txt + us-east-1@CiD6bC8t2Y<...encrypted blob...> - and reference it within stacker (NOTE: the path should be relative to - the stacker config file): + and reference it within stacker (NOTE: the path should be relative + to the stacker config file): - conf_key: ${kms file://kms_value.txt} + conf_key: ${kms file://kms_value.txt} - # Both of the above would resolve to - conf_key: PASSWORD + # Both of the above would resolve to + conf_key: PASSWORD - """ - value = read_value_from_path(value) + """ + value = read_value_from_path(value) - region = None - if "@" in value: - region, value = value.split("@", 1) + region = None + if "@" in value: + region, value = value.split("@", 1) - kms = get_session(region).client('kms') + kms = get_session(region).client('kms') - # encode str value as an utf-8 bytestring for use with codecs.decode. - value = value.encode('utf-8') + # encode str value as an utf-8 bytestring for use with codecs.decode. + value = value.encode('utf-8') - # get raw but still encrypted value from base64 version. - decoded = codecs.decode(value, 'base64') + # get raw but still encrypted value from base64 version. + decoded = codecs.decode(value, 'base64') - # decrypt and return the plain text raw value. - return kms.decrypt(CiphertextBlob=decoded)["Plaintext"] + # decrypt and return the plain text raw value. + return kms.decrypt(CiphertextBlob=decoded)["Plaintext"] diff --git a/stacker/lookups/handlers/output.py b/stacker/lookups/handlers/output.py index e2fd9cea5..a40ba0fb3 100644 --- a/stacker/lookups/handlers/output.py +++ b/stacker/lookups/handlers/output.py @@ -1,32 +1,60 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import + +import re from collections import namedtuple +from . import LookupHandler + TYPE_NAME = "output" Output = namedtuple("Output", ("stack_name", "output_name")) -def handler(value, context=None, **kwargs): - """Fetch an output from the designated stack. +class OutputLookup(LookupHandler): + @classmethod + def handle(cls, value, context=None, **kwargs): + """Fetch an output from the designated stack. + + Args: + value (str): string with the following format: + ::, ie. some-stack::SomeOutput + context (:class:`stacker.context.Context`): stacker context + + Returns: + str: output from the specified stack - Args: - value (str): string with the following format: - ::, ie. some-stack::SomeOutput - context (:class:`stacker.context.Context`): stacker context + """ - Returns: - str: output from the specified stack + if context is None: + raise ValueError('Context is required') - """ + d = deconstruct(value) + stack = context.get_stack(d.stack_name) + return stack.outputs[d.output_name] - if context is None: - raise ValueError('Context is required') + @classmethod + def dependencies(cls, lookup_data): + # try to get the stack name + stack_name = '' + for data_item in lookup_data: + if not data_item.resolved(): + # We encountered an unresolved substitution. + # StackName is calculated dynamically based on context: + # e.g. ${output ${default var::source}::name} + # Stop here + return set() + stack_name = stack_name + data_item.value() + match = re.search(r'::', stack_name) + if match: + stack_name = stack_name[0:match.start()] + return {stack_name} + # else: try to append the next item - d = deconstruct(value) - stack = context.get_stack(d.stack_name) - return stack.outputs[d.output_name] + # We added all lookup_data, and still couldn't find a `::`... + # Probably an error... + return set() def deconstruct(value): diff --git a/stacker/lookups/handlers/rxref.py b/stacker/lookups/handlers/rxref.py index 6d0ecd61e..858a13a3d 100644 --- a/stacker/lookups/handlers/rxref.py +++ b/stacker/lookups/handlers/rxref.py @@ -14,31 +14,34 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import +from . import LookupHandler from .output import deconstruct TYPE_NAME = "rxref" -def handler(value, provider=None, context=None, **kwargs): - """Fetch an output from the designated stack. +class RxrefLookup(LookupHandler): + @classmethod + def handle(cls, value, provider=None, context=None, **kwargs): + """Fetch an output from the designated stack. - Args: - value (str): string with the following format: - ::, ie. some-stack::SomeOutput - provider (:class:`stacker.provider.base.BaseProvider`): subclass of the - base provider - context (:class:`stacker.context.Context`): stacker context + Args: + value (str): string with the following format: + ::, ie. some-stack::SomeOutput + provider (:class:`stacker.provider.base.BaseProvider`): subclass of + the base provider + context (:class:`stacker.context.Context`): stacker context - Returns: - str: output from the specified stack - """ + Returns: + str: output from the specified stack + """ - if provider is None: - raise ValueError('Provider is required') - if context is None: - raise ValueError('Context is required') + if provider is None: + raise ValueError('Provider is required') + if context is None: + raise ValueError('Context is required') - d = deconstruct(value) - stack_fqn = context.get_fqn(d.stack_name) - output = provider.get_output(stack_fqn, d.output_name) - return output + d = deconstruct(value) + stack_fqn = context.get_fqn(d.stack_name) + output = provider.get_output(stack_fqn, d.output_name) + return output diff --git a/stacker/lookups/handlers/split.py b/stacker/lookups/handlers/split.py index f178d1187..8908c7002 100644 --- a/stacker/lookups/handlers/split.py +++ b/stacker/lookups/handlers/split.py @@ -1,36 +1,40 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import +from . import LookupHandler TYPE_NAME = "split" -def handler(value, **kwargs): - """Split the supplied string on the given delimiter, providing a list. +class SplitLookup(LookupHandler): + @classmethod + def handle(cls, value, **kwargs): + """Split the supplied string on the given delimiter, providing a list. - Format of value: + Format of value: - :: + :: - For example: + For example: - Subnets: ${split ,::subnet-1,subnet-2,subnet-3} + Subnets: ${split ,::subnet-1,subnet-2,subnet-3} - Would result in the variable `Subnets` getting a list consisting of: + Would result in the variable `Subnets` getting a list consisting of: - ["subnet-1", "subnet-2", "subnet-3"] + ["subnet-1", "subnet-2", "subnet-3"] - This is particularly useful when getting an output from another stack that - contains a list. For example, the standard vpc blueprint outputs the list - of Subnets it creates as a pair of Outputs (PublicSubnets, PrivateSubnets) - that are comma separated, so you could use this in your config: + This is particularly useful when getting an output from another stack + that contains a list. For example, the standard vpc blueprint outputs + the list of Subnets it creates as a pair of Outputs (PublicSubnets, + PrivateSubnets) that are comma separated, so you could use this in your + config: - Subnets: ${split ,::${output vpc::PrivateSubnets}} - """ + Subnets: ${split ,::${output vpc::PrivateSubnets}} + """ - try: - delimiter, text = value.split("::", 1) - except ValueError: - raise ValueError("Invalid value for split: %s. Must be in " - ":: format." % value) + try: + delimiter, text = value.split("::", 1) + except ValueError: + raise ValueError("Invalid value for split: %s. Must be in " + ":: format." % value) - return text.split(delimiter) + return text.split(delimiter) diff --git a/stacker/lookups/handlers/ssmstore.py b/stacker/lookups/handlers/ssmstore.py index 0490eb592..2da724d30 100644 --- a/stacker/lookups/handlers/ssmstore.py +++ b/stacker/lookups/handlers/ssmstore.py @@ -5,55 +5,58 @@ from stacker.session_cache import get_session +from . import LookupHandler from ...util import read_value_from_path TYPE_NAME = "ssmstore" -def handler(value, **kwargs): - """Retrieve (and decrypt if applicable) a parameter from - AWS SSM Parameter Store. +class SsmstoreLookup(LookupHandler): + @classmethod + def handle(cls, value, **kwargs): + """Retrieve (and decrypt if applicable) a parameter from + AWS SSM Parameter Store. - ssmstore field types should be in the following format: + ssmstore field types should be in the following format: - [@]ssmkey + [@]ssmkey - Note: The region is optional, and defaults to us-east-1 if not given. + Note: The region is optional, and defaults to us-east-1 if not given. - For example: + For example: - # In stacker we would reference the encrypted value like: - conf_key: ${ssmstore us-east-1@ssmkey} + # In stacker we would reference the encrypted value like: + conf_key: ${ssmstore us-east-1@ssmkey} - You can optionally store the value in a file, ie: + You can optionally store the value in a file, ie: - ssmstore_value.txt - us-east-1@ssmkey + ssmstore_value.txt + us-east-1@ssmkey - and reference it within stacker (NOTE: the path should be relative to - the stacker config file): + and reference it within stacker (NOTE: the path should be relative + to the stacker config file): - conf_key: ${ssmstore file://ssmstore_value.txt} + conf_key: ${ssmstore file://ssmstore_value.txt} - # Both of the above would resolve to - conf_key: PASSWORD + # Both of the above would resolve to + conf_key: PASSWORD - """ - value = read_value_from_path(value) + """ + value = read_value_from_path(value) - region = "us-east-1" - if "@" in value: - region, value = value.split("@", 1) + region = "us-east-1" + if "@" in value: + region, value = value.split("@", 1) - client = get_session(region).client("ssm") - response = client.get_parameters( - Names=[ - value, - ], - WithDecryption=True - ) - if 'Parameters' in response: - return str(response['Parameters'][0]['Value']) + client = get_session(region).client("ssm") + response = client.get_parameters( + Names=[ + value, + ], + WithDecryption=True + ) + if 'Parameters' in response: + return str(response['Parameters'][0]['Value']) - raise ValueError('SSMKey "{}" does not exist in region {}'.format(value, - region)) + raise ValueError('SSMKey "{}" does not exist in region {}'.format( + value, region)) diff --git a/stacker/lookups/handlers/xref.py b/stacker/lookups/handlers/xref.py index 44c9bd30b..a318d252b 100644 --- a/stacker/lookups/handlers/xref.py +++ b/stacker/lookups/handlers/xref.py @@ -13,28 +13,31 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import +from . import LookupHandler from .output import deconstruct TYPE_NAME = "xref" -def handler(value, provider=None, **kwargs): - """Fetch an output from the designated stack. +class XrefLookup(LookupHandler): + @classmethod + def handle(cls, value, provider=None, **kwargs): + """Fetch an output from the designated stack. - Args: - value (str): string with the following format: - ::, ie. some-stack::SomeOutput - provider (:class:`stacker.provider.base.BaseProvider`): subclass of the - base provider + Args: + value (str): string with the following format: + ::, ie. some-stack::SomeOutput + provider (:class:`stacker.provider.base.BaseProvider`): subclass of + the base provider - Returns: - str: output from the specified stack - """ + Returns: + str: output from the specified stack + """ - if provider is None: - raise ValueError('Provider is required') + if provider is None: + raise ValueError('Provider is required') - d = deconstruct(value) - stack_fqn = d.stack_name - output = provider.get_output(stack_fqn, d.output_name) - return output + d = deconstruct(value) + stack_fqn = d.stack_name + output = provider.get_output(stack_fqn, d.output_name) + return output diff --git a/stacker/lookups/registry.py b/stacker/lookups/registry.py index 988c2363d..7d0fab46d 100644 --- a/stacker/lookups/registry.py +++ b/stacker/lookups/registry.py @@ -1,6 +1,10 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import + +import logging +import warnings + from past.builtins import basestring from ..exceptions import UnknownLookupType, FailedVariableLookup @@ -34,6 +38,19 @@ def register_lookup_handler(lookup_type, handler_or_path): if isinstance(handler_or_path, basestring): handler = load_object_from_string(handler_or_path) LOOKUP_HANDLERS[lookup_type] = handler + if type(handler) != type: + # Hander is a not a new-style handler + logger = logging.getLogger(__name__) + logger.warning("Registering lookup `%s`: Please upgrade to use the " + "new style of Lookups." % lookup_type) + warnings.warn( + # For some reason, this does not show up... + # Leaving it in anyway + "Lookup `%s`: Please upgrade to use the new style of Lookups" + "." % lookup_type, + DeprecationWarning, + stacklevel=2, + ) def unregister_lookup_handler(lookup_type): @@ -80,15 +97,15 @@ def resolve_lookups(variable, context, provider): return resolved_lookups -register_lookup_handler(output.TYPE_NAME, output.handler) -register_lookup_handler(kms.TYPE_NAME, kms.handler) -register_lookup_handler(ssmstore.TYPE_NAME, ssmstore.handler) -register_lookup_handler(envvar.TYPE_NAME, envvar.handler) -register_lookup_handler(xref.TYPE_NAME, xref.handler) -register_lookup_handler(rxref.TYPE_NAME, rxref.handler) -register_lookup_handler(ami.TYPE_NAME, ami.handler) -register_lookup_handler(file_handler.TYPE_NAME, file_handler.handler) -register_lookup_handler(split.TYPE_NAME, split.handler) -register_lookup_handler(default.TYPE_NAME, default.handler) -register_lookup_handler(hook_data.TYPE_NAME, hook_data.handler) -register_lookup_handler(dynamodb.TYPE_NAME, dynamodb.handler) +register_lookup_handler(output.TYPE_NAME, output.OutputLookup) +register_lookup_handler(kms.TYPE_NAME, kms.KmsLookup) +register_lookup_handler(ssmstore.TYPE_NAME, ssmstore.SsmstoreLookup) +register_lookup_handler(envvar.TYPE_NAME, envvar.EnvvarLookup) +register_lookup_handler(xref.TYPE_NAME, xref.XrefLookup) +register_lookup_handler(rxref.TYPE_NAME, rxref.RxrefLookup) +register_lookup_handler(ami.TYPE_NAME, ami.AmiLookup) +register_lookup_handler(file_handler.TYPE_NAME, file_handler.FileLookup) +register_lookup_handler(split.TYPE_NAME, split.SplitLookup) +register_lookup_handler(default.TYPE_NAME, default.DefaultLookup) +register_lookup_handler(hook_data.TYPE_NAME, hook_data.HookDataLookup) +register_lookup_handler(dynamodb.TYPE_NAME, dynamodb.DynamodbLookup) diff --git a/stacker/stack.py b/stacker/stack.py index ffdeeba1f..c0c877419 100644 --- a/stacker/stack.py +++ b/stacker/stack.py @@ -9,13 +9,8 @@ Variable, resolve_variables, ) -from .lookups.handlers.output import ( - TYPE_NAME as OUTPUT_LOOKUP_TYPE_NAME, - deconstruct, -) from .blueprints.raw import RawTemplateBlueprint -from .exceptions import FailedVariableLookup def _gather_variables(stack_def): @@ -99,22 +94,13 @@ def requires(self): # Add any dependencies based on output lookups for variable in self.variables: - for lookup in variable.lookups: - if lookup.type == OUTPUT_LOOKUP_TYPE_NAME: - - try: - d = deconstruct(lookup.input) - except ValueError as e: - raise FailedVariableLookup(self.name, lookup, e) - - if d.stack_name == self.name: - message = ( - "Variable %s in stack %s has a ciruclar reference " - "within lookup: %s" - ) % (variable.name, self.name, lookup.raw) - raise ValueError(message) - requires.add(d.stack_name) - + deps = variable.dependencies() + if self.name in deps: + message = ( + "Variable %s in stack %s has a ciruclar reference" + ) % (variable.name, self.name) + raise ValueError(message) + requires.update(deps) return requires @property diff --git a/stacker/tests/blueprints/test_base.py b/stacker/tests/blueprints/test_base.py index 141be10c7..52187aaa6 100644 --- a/stacker/tests/blueprints/test_base.py +++ b/stacker/tests/blueprints/test_base.py @@ -41,7 +41,7 @@ from stacker.variables import Variable from stacker.lookups import register_lookup_handler -from ..factories import mock_lookup, mock_context +from ..factories import mock_context def mock_lookup_handler(value, provider=None, context=None, fqn=False, @@ -424,11 +424,8 @@ class TestBlueprint(Blueprint): Variable("Param2", "${output other-stack::Output}"), Variable("Param3", 3), ] - resolved_lookups = { - mock_lookup("other-stack::Output", "output"): "Test Output", - } - for var in variables: - var.replace(resolved_lookups) + + variables[1]._value._resolve("Test Output") blueprint.resolve_variables(variables) self.assertEqual(blueprint.resolved_variables["Param1"], 1) @@ -441,15 +438,14 @@ class TestBlueprint(Blueprint): "Param1": {"type": list}, } + def return_list_something(*_args, **_kwargs): + return ["something"] + + register_lookup_handler("custom", return_list_something) blueprint = TestBlueprint(name="test", context=MagicMock()) variables = [Variable("Param1", "${custom non-string-return-val}")] - lookup = mock_lookup("non-string-return-val", "custom", - "custom non-string-return-val") - resolved_lookups = { - lookup: ["something"], - } for var in variables: - var.replace(resolved_lookups) + var._value.resolve({}, {}) blueprint.resolve_variables(variables) self.assertEqual(blueprint.resolved_variables["Param1"], ["something"]) @@ -460,15 +456,14 @@ class TestBlueprint(Blueprint): "Param1": {"type": Base64}, } + def return_obj(*_args, **_kwargs): + return Base64("test") + + register_lookup_handler("custom", return_obj) blueprint = TestBlueprint(name="test", context=MagicMock()) variables = [Variable("Param1", "${custom non-string-return-val}")] - lookup = mock_lookup("non-string-return-val", "custom", - "custom non-string-return-val") - resolved_lookups = { - lookup: Base64("test"), - } for var in variables: - var.replace(resolved_lookups) + var._value.resolve({}, {}) blueprint.resolve_variables(variables) self.assertEqual(blueprint.resolved_variables["Param1"].data, @@ -480,20 +475,17 @@ class TestBlueprint(Blueprint): "Param1": {"type": list}, } - variables = [ - Variable( - "Param1", - "${custom non-string-return-val},${output some-stack::Output}", - ) - ] - lookup = mock_lookup("non-string-return-val", "custom", - "custom non-string-return-val") - resolved_lookups = { - lookup: ["something"], - } + def return_list_something(*_args, **_kwargs): + return ["something"] + + register_lookup_handler("custom", return_list_something) + variable = Variable( + "Param1", + "${custom non-string-return-val},${output some-stack::Output}", + ) + variable._value[0].resolve({}, {}) with self.assertRaises(InvalidLookupCombination): - for var in variables: - var.replace(resolved_lookups) + variable.value() def test_get_variables(self): class TestBlueprint(Blueprint): diff --git a/stacker/tests/lookups/handlers/test_ami.py b/stacker/tests/lookups/handlers/test_ami.py index f0f8e770e..0e34b7b47 100644 --- a/stacker/tests/lookups/handlers/test_ami.py +++ b/stacker/tests/lookups/handlers/test_ami.py @@ -4,7 +4,7 @@ import unittest import mock from botocore.stub import Stubber -from stacker.lookups.handlers.ami import handler, ImageNotFound +from stacker.lookups.handlers.ami import AmiLookup, ImageNotFound import boto3 from stacker.tests.factories import SessionStub, mock_provider @@ -40,7 +40,7 @@ def test_basic_lookup_single_image(self, mock_client): ) with self.stubber: - value = handler( + value = AmiLookup.handle( value="owners:self name_regex:Fake\sImage\s\d", provider=self.provider ) @@ -68,7 +68,7 @@ def test_basic_lookup_with_region(self, mock_client): ) with self.stubber: - value = handler( + value = AmiLookup.handle( value="us-west-1@owners:self name_regex:Fake\sImage\s\d", provider=self.provider ) @@ -105,7 +105,7 @@ def test_basic_lookup_multiple_images(self, mock_client): ) with self.stubber: - value = handler( + value = AmiLookup.handle( value="owners:self name_regex:Fake\sImage\s\d", provider=self.provider ) @@ -142,7 +142,7 @@ def test_basic_lookup_multiple_images_name_match(self, mock_client): ) with self.stubber: - value = handler( + value = AmiLookup.handle( value="owners:self name_regex:Fake\sImage\s\d", provider=self.provider ) @@ -160,7 +160,7 @@ def test_basic_lookup_no_matching_images(self, mock_client): with self.stubber: with self.assertRaises(ImageNotFound): - handler( + AmiLookup.handle( value="owners:self name_regex:Fake\sImage\s\d", provider=self.provider ) @@ -188,7 +188,7 @@ def test_basic_lookup_no_matching_images_from_name(self, mock_client): with self.stubber: with self.assertRaises(ImageNotFound): - handler( + AmiLookup.handle( value="owners:self name_regex:MyImage\s\d", provider=self.provider ) diff --git a/stacker/tests/lookups/handlers/test_default.py b/stacker/tests/lookups/handlers/test_default.py index 5bca2df5b..a59ccd6d8 100644 --- a/stacker/tests/lookups/handlers/test_default.py +++ b/stacker/tests/lookups/handlers/test_default.py @@ -5,7 +5,7 @@ import unittest from stacker.context import Context -from stacker.lookups.handlers.default import handler +from stacker.lookups.handlers.default import DefaultLookup class TestDefaultLookup(unittest.TestCase): @@ -20,19 +20,19 @@ def setUp(self): def test_env_var_present(self): lookup_val = "env_var::fallback" - value = handler(lookup_val, - provider=self.provider, - context=self.context) + value = DefaultLookup.handle(lookup_val, + provider=self.provider, + context=self.context) assert value == 'val_in_env' def test_env_var_missing(self): lookup_val = "bad_env_var::fallback" - value = handler(lookup_val, - provider=self.provider, - context=self.context) + value = DefaultLookup.handle(lookup_val, + provider=self.provider, + context=self.context) assert value == 'fallback' def test_invalid_value(self): value = "env_var:fallback" with self.assertRaises(ValueError): - handler(value) + DefaultLookup.handle(value) diff --git a/stacker/tests/lookups/handlers/test_dynamodb.py b/stacker/tests/lookups/handlers/test_dynamodb.py index 6cda188ce..44b6cc693 100644 --- a/stacker/tests/lookups/handlers/test_dynamodb.py +++ b/stacker/tests/lookups/handlers/test_dynamodb.py @@ -4,7 +4,7 @@ import unittest import mock from botocore.stub import Stubber -from stacker.lookups.handlers.dynamodb import handler +from stacker.lookups.handlers.dynamodb import DynamodbLookup import boto3 from stacker.tests.factories import SessionStub @@ -37,7 +37,7 @@ def test_dynamodb_handler(self, mock_client): self.get_parameters_response, expected_params) with self.stubber: - value = handler(base_lookup_key) + value = DynamodbLookup.handle(base_lookup_key) self.assertEqual(value, base_lookup_key_valid) @mock.patch('stacker.lookups.handlers.dynamodb.get_session', @@ -57,7 +57,7 @@ def test_dynamodb_number_handler(self, mock_client): self.get_parameters_response, expected_params) with self.stubber: - value = handler(base_lookup_key) + value = DynamodbLookup.handle(base_lookup_key) self.assertEqual(value, base_lookup_key_valid) @mock.patch('stacker.lookups.handlers.dynamodb.get_session', @@ -77,7 +77,7 @@ def test_dynamodb_list_handler(self, mock_client): self.get_parameters_response, expected_params) with self.stubber: - value = handler(base_lookup_key) + value = DynamodbLookup.handle(base_lookup_key) self.assertEqual(value, base_lookup_key_valid) @mock.patch('stacker.lookups.handlers.dynamodb.get_session', @@ -96,7 +96,7 @@ def test_dynamodb_empty_table_handler(self, mock_client): expected_params) with self.stubber: try: - handler(base_lookup_key) + DynamodbLookup.handle(base_lookup_key) except ValueError as e: self.assertEqual( 'Please make sure to include a dynamodb table name', @@ -117,7 +117,7 @@ def test_dynamodb_missing_table_handler(self, mock_client): expected_params) with self.stubber: try: - handler(base_lookup_key) + DynamodbLookup.handle(base_lookup_key) except ValueError as e: self.assertEqual( 'Please make sure to include a tablename', @@ -140,7 +140,7 @@ def test_dynamodb_invalid_table_handler(self, mock_client): expected_params=expected_params) with self.stubber: try: - handler(base_lookup_key) + DynamodbLookup.handle(base_lookup_key) except ValueError as e: self.assertEqual( 'Cannot find the dynamodb table: FakeTable', @@ -164,7 +164,7 @@ def test_dynamodb_invalid_partition_key_handler(self, mock_client): with self.stubber: try: - handler(base_lookup_key) + DynamodbLookup.handle(base_lookup_key) except ValueError as e: self.assertEqual( 'No dynamodb record matched the partition key: FakeKey', @@ -187,7 +187,7 @@ def test_dynamodb_invalid_partition_val_handler(self, mock_client): expected_params) with self.stubber: try: - handler(base_lookup_key) + DynamodbLookup.handle(base_lookup_key) except ValueError as e: self.assertEqual( 'The dynamodb record could not be found using ' diff --git a/stacker/tests/lookups/handlers/test_envvar.py b/stacker/tests/lookups/handlers/test_envvar.py index c3aba7022..71c9bf8a5 100644 --- a/stacker/tests/lookups/handlers/test_envvar.py +++ b/stacker/tests/lookups/handlers/test_envvar.py @@ -2,7 +2,7 @@ from __future__ import division from __future__ import absolute_import import unittest -from stacker.lookups.handlers.envvar import handler +from stacker.lookups.handlers.envvar import EnvvarLookup import os @@ -15,9 +15,9 @@ def setUp(self): os.environ[self.testkey] = self.testval def test_valid_envvar(self): - value = handler(self.testkey) + value = EnvvarLookup.handle(self.testkey) self.assertEqual(value, self.testval) def test_invalid_envvar(self): with self.assertRaises(ValueError): - handler(self.invalidtestkey) + EnvvarLookup.handle(self.invalidtestkey) diff --git a/stacker/tests/lookups/handlers/test_file.py b/stacker/tests/lookups/handlers/test_file.py index 312f71ab8..5fb27b809 100644 --- a/stacker/tests/lookups/handlers/test_file.py +++ b/stacker/tests/lookups/handlers/test_file.py @@ -11,7 +11,7 @@ import json from troposphere import Base64, GenericHelperFn, Join -from stacker.lookups.handlers.file import (json_codec, handler, +from stacker.lookups.handlers.file import (json_codec, FileLookup, parameterized_codec, yaml_codec) @@ -113,13 +113,13 @@ def test_json_codec_parameterized(self): @mock.patch('stacker.lookups.handlers.file.read_value_from_path', return_value='') def test_file_loaded(self, content_mock): - handler(u'plain:file://tmp/test') + FileLookup.handle(u'plain:file://tmp/test') content_mock.assert_called_with(u'file://tmp/test') @mock.patch('stacker.lookups.handlers.file.read_value_from_path', return_value=u'Hello, world') def test_handler_plain(self, _): - out = handler(u'plain:file://tmp/test') + out = FileLookup.handle(u'plain:file://tmp/test') self.assertEqual(u'Hello, world', out) @mock.patch('stacker.lookups.handlers.file.read_value_from_path') @@ -128,7 +128,7 @@ def test_handler_b64(self, content_mock): encoded = base64.b64encode(plain.encode('utf8')) content_mock.return_value = plain - out = handler(u'base64:file://tmp/test') + out = FileLookup.handle(u'base64:file://tmp/test') self.assertEqual(encoded, out) @mock.patch('stacker.lookups.handlers.file.parameterized_codec') @@ -137,7 +137,7 @@ def test_handler_parameterized(self, content_mock, codec_mock): result = mock.Mock() codec_mock.return_value = result - out = handler(u'parameterized:file://tmp/test') + out = FileLookup.handle(u'parameterized:file://tmp/test') codec_mock.assert_called_once_with(content_mock.return_value, False) self.assertEqual(result, out) @@ -148,7 +148,7 @@ def test_handler_parameterized_b64(self, content_mock, codec_mock): result = mock.Mock() codec_mock.return_value = result - out = handler(u'parameterized-b64:file://tmp/test') + out = FileLookup.handle(u'parameterized-b64:file://tmp/test') codec_mock.assert_called_once_with(content_mock.return_value, True) self.assertEqual(result, out) @@ -159,7 +159,7 @@ def test_handler_yaml(self, content_mock, codec_mock): result = mock.Mock() codec_mock.return_value = result - out = handler(u'yaml:file://tmp/test') + out = FileLookup.handle(u'yaml:file://tmp/test') codec_mock.assert_called_once_with(content_mock.return_value, parameterized=False) @@ -171,7 +171,7 @@ def test_handler_yaml_parameterized(self, content_mock, codec_mock): result = mock.Mock() codec_mock.return_value = result - out = handler(u'yaml-parameterized:file://tmp/test') + out = FileLookup.handle(u'yaml-parameterized:file://tmp/test') codec_mock.assert_called_once_with(content_mock.return_value, parameterized=True) @@ -183,7 +183,7 @@ def test_handler_json(self, content_mock, codec_mock): result = mock.Mock() codec_mock.return_value = result - out = handler(u'json:file://tmp/test') + out = FileLookup.handle(u'json:file://tmp/test') codec_mock.assert_called_once_with(content_mock.return_value, parameterized=False) @@ -195,7 +195,7 @@ def test_handler_json_parameterized(self, content_mock, codec_mock): result = mock.Mock() codec_mock.return_value = result - out = handler(u'json-parameterized:file://tmp/test') + out = FileLookup.handle(u'json-parameterized:file://tmp/test') codec_mock.assert_called_once_with(content_mock.return_value, parameterized=True) @@ -204,4 +204,4 @@ def test_handler_json_parameterized(self, content_mock, codec_mock): @mock.patch('stacker.lookups.handlers.file.read_value_from_path') def test_unknown_codec(self, _): with self.assertRaises(KeyError): - handler(u'bad:file://tmp/test') + FileLookup.handle(u'bad:file://tmp/test') diff --git a/stacker/tests/lookups/handlers/test_hook_data.py b/stacker/tests/lookups/handlers/test_hook_data.py index e9978fed5..6dc0014d1 100644 --- a/stacker/tests/lookups/handlers/test_hook_data.py +++ b/stacker/tests/lookups/handlers/test_hook_data.py @@ -5,7 +5,7 @@ from stacker.context import Context -from stacker.lookups.handlers.hook_data import handler +from stacker.lookups.handlers.hook_data import HookDataLookup class TestHookDataLookup(unittest.TestCase): @@ -15,13 +15,13 @@ def setUp(self): self.ctx.set_hook_data("fake_hook", {"result": "good"}) def test_valid_hook_data(self): - value = handler("fake_hook::result", context=self.ctx) + value = HookDataLookup.handle("fake_hook::result", context=self.ctx) self.assertEqual(value, "good") def test_invalid_hook_data(self): with self.assertRaises(KeyError): - handler("fake_hook::bad_key", context=self.ctx) + HookDataLookup.handle("fake_hook::bad_key", context=self.ctx) def test_bad_value_hook_data(self): with self.assertRaises(ValueError): - handler("fake_hook", context=self.ctx) + HookDataLookup.handle("fake_hook", context=self.ctx) diff --git a/stacker/tests/lookups/handlers/test_kms.py b/stacker/tests/lookups/handlers/test_kms.py index 955bff0e9..bb199a639 100644 --- a/stacker/tests/lookups/handlers/test_kms.py +++ b/stacker/tests/lookups/handlers/test_kms.py @@ -8,7 +8,7 @@ import boto3 -from stacker.lookups.handlers.kms import handler +from stacker.lookups.handlers.kms import KmsLookup class TestKMSHandler(unittest.TestCase): @@ -25,12 +25,12 @@ def setUp(self): def test_kms_handler(self): with mock_kms(): - decrypted = handler(self.secret) + decrypted = KmsLookup.handle(self.secret) self.assertEqual(decrypted, self.plain) def test_kms_handler_with_region(self): region = "us-east-1" value = "%s@%s" % (region, self.secret) with mock_kms(): - decrypted = handler(value) + decrypted = KmsLookup.handle(value) self.assertEqual(decrypted, self.plain) diff --git a/stacker/tests/lookups/handlers/test_output.py b/stacker/tests/lookups/handlers/test_output.py index cd6903eb4..3891dfe25 100644 --- a/stacker/tests/lookups/handlers/test_output.py +++ b/stacker/tests/lookups/handlers/test_output.py @@ -6,7 +6,7 @@ from stacker.stack import Stack from ...factories import generate_definition -from stacker.lookups.handlers.output import handler +from stacker.lookups.handlers.output import OutputLookup class TestOutputHandler(unittest.TestCase): @@ -21,7 +21,8 @@ def test_output_handler(self): stack.set_outputs({ "SomeOutput": "Test Output"}) self.context.get_stack.return_value = stack - value = handler("stack-name::SomeOutput", context=self.context) + value = OutputLookup.handle("stack-name::SomeOutput", + context=self.context) self.assertEqual(value, "Test Output") self.assertEqual(self.context.get_stack.call_count, 1) args = self.context.get_stack.call_args diff --git a/stacker/tests/lookups/handlers/test_rxref.py b/stacker/tests/lookups/handlers/test_rxref.py index 7d29f6526..b5e7cb828 100644 --- a/stacker/tests/lookups/handlers/test_rxref.py +++ b/stacker/tests/lookups/handlers/test_rxref.py @@ -4,7 +4,7 @@ from mock import MagicMock import unittest -from stacker.lookups.handlers.rxref import handler +from stacker.lookups.handlers.rxref import RxrefLookup from ....context import Context from ....config import Config @@ -20,8 +20,9 @@ def setUp(self): def test_rxref_handler(self): self.provider.get_output.return_value = "Test Output" - value = handler("fully-qualified-stack-name::SomeOutput", - provider=self.provider, context=self.context) + value = RxrefLookup.handle("fully-qualified-stack-name::SomeOutput", + provider=self.provider, + context=self.context) self.assertEqual(value, "Test Output") args = self.provider.get_output.call_args diff --git a/stacker/tests/lookups/handlers/test_split.py b/stacker/tests/lookups/handlers/test_split.py index cdc847ce9..990799bb2 100644 --- a/stacker/tests/lookups/handlers/test_split.py +++ b/stacker/tests/lookups/handlers/test_split.py @@ -3,21 +3,21 @@ from __future__ import absolute_import import unittest -from stacker.lookups.handlers.split import handler +from stacker.lookups.handlers.split import SplitLookup class TestSplitLookup(unittest.TestCase): def test_single_character_split(self): value = ",::a,b,c" expected = ["a", "b", "c"] - assert handler(value) == expected + assert SplitLookup.handle(value) == expected def test_multi_character_split(self): value = ",,::a,,b,c" expected = ["a", "b,c"] - assert handler(value) == expected + assert SplitLookup.handle(value) == expected def test_invalid_value_split(self): value = ",:a,b,c" with self.assertRaises(ValueError): - handler(value) + SplitLookup.handle(value) diff --git a/stacker/tests/lookups/handlers/test_ssmstore.py b/stacker/tests/lookups/handlers/test_ssmstore.py index 020f79772..daff2444d 100644 --- a/stacker/tests/lookups/handlers/test_ssmstore.py +++ b/stacker/tests/lookups/handlers/test_ssmstore.py @@ -5,7 +5,7 @@ import unittest import mock from botocore.stub import Stubber -from stacker.lookups.handlers.ssmstore import handler +from stacker.lookups.handlers.ssmstore import SsmstoreLookup import boto3 from stacker.tests.factories import SessionStub @@ -46,7 +46,7 @@ def test_ssmstore_handler(self, mock_client): self.get_parameters_response, self.expected_params) with self.stubber: - value = handler(self.ssmkey) + value = SsmstoreLookup.handle(self.ssmkey) self.assertEqual(value, self.ssmvalue) self.assertIsInstance(value, str) @@ -58,7 +58,7 @@ def test_ssmstore_invalid_value_handler(self, mock_client): self.expected_params) with self.stubber: try: - handler(self.ssmkey) + SsmstoreLookup.handle(self.ssmkey) except ValueError: assert True @@ -71,5 +71,5 @@ def test_ssmstore_handler_with_region(self, mock_client): region = "us-east-1" temp_value = "%s@%s" % (region, self.ssmkey) with self.stubber: - value = handler(temp_value) + value = SsmstoreLookup.handle(temp_value) self.assertEqual(value, self.ssmvalue) diff --git a/stacker/tests/lookups/handlers/test_xref.py b/stacker/tests/lookups/handlers/test_xref.py index 91cfadacf..cb611ed65 100644 --- a/stacker/tests/lookups/handlers/test_xref.py +++ b/stacker/tests/lookups/handlers/test_xref.py @@ -4,7 +4,7 @@ from mock import MagicMock import unittest -from stacker.lookups.handlers.xref import handler +from stacker.lookups.handlers.xref import XrefLookup class TestXrefHandler(unittest.TestCase): @@ -15,8 +15,9 @@ def setUp(self): def test_xref_handler(self): self.provider.get_output.return_value = "Test Output" - value = handler("fully-qualified-stack-name::SomeOutput", - provider=self.provider, context=self.context) + value = XrefLookup.handle("fully-qualified-stack-name::SomeOutput", + provider=self.provider, + context=self.context) self.assertEqual(value, "Test Output") self.assertEqual(self.context.get_fqn.call_count, 0) args = self.provider.get_output.call_args diff --git a/stacker/tests/lookups/test_registry.py b/stacker/tests/lookups/test_registry.py index 7ef338013..1dc0b41f1 100644 --- a/stacker/tests/lookups/test_registry.py +++ b/stacker/tests/lookups/test_registry.py @@ -3,19 +3,16 @@ from __future__ import absolute_import import unittest -from mock import patch, MagicMock +from mock import MagicMock from stacker.exceptions import ( UnknownLookupType, FailedVariableLookup, ) -from stacker.lookups.registry import ( - LOOKUP_HANDLERS, - resolve_lookups, -) +from stacker.lookups.registry import LOOKUP_HANDLERS -from stacker.variables import Variable +from stacker.variables import Variable, VariableValueLookup from ..factories import ( mock_context, @@ -43,31 +40,32 @@ def test_autoloaded_lookup_handlers(self): ) def test_resolve_lookups_string_unknown_lookup(self): - variable = Variable("MyVar", "${bad_lookup foo}") - with self.assertRaises(UnknownLookupType): - resolve_lookups(variable, self.ctx, self.provider) + Variable("MyVar", "${bad_lookup foo}") def test_resolve_lookups_list_unknown_lookup(self): - variable = Variable( - "MyVar", [ - "${bad_lookup foo}", "random string", - ] - ) - with self.assertRaises(UnknownLookupType): - resolve_lookups(variable, self.ctx, self.provider) + Variable( + "MyVar", [ + "${bad_lookup foo}", "random string", + ] + ) def resolve_lookups_with_output_handler_raise_valueerror(self, variable): """Mock output handler to throw ValueError, then run resolve_lookups on the given variable. """ mock_handler = MagicMock(side_effect=ValueError("Error")) - with patch.dict(LOOKUP_HANDLERS, {"output": mock_handler}): - with self.assertRaises(FailedVariableLookup) as cm: - resolve_lookups(variable, self.ctx, self.provider) - self.assertIsInstance(cm.exception.error, ValueError) + # find the only lookup in the variable + for value in variable._value: + if isinstance(value, VariableValueLookup): + value.handler = mock_handler + + with self.assertRaises(FailedVariableLookup) as cm: + variable.resolve(self.ctx, self.provider) + + self.assertIsInstance(cm.exception.error, ValueError) def test_resolve_lookups_string_failed_variable_lookup(self): variable = Variable("MyVar", "${output foo::bar}") diff --git a/stacker/tests/test_stack.py b/stacker/tests/test_stack.py index a51ccbc0b..c1bba0156 100644 --- a/stacker/tests/test_stack.py +++ b/stacker/tests/test_stack.py @@ -4,6 +4,7 @@ from mock import MagicMock import unittest +from stacker.lookups import register_lookup_handler from stacker.context import Context from stacker.config import Config from stacker.stack import Stack @@ -20,6 +21,7 @@ def setUp(self): definition=generate_definition("vpc", 1), context=self.context, ) + register_lookup_handler("noop", lambda **kwargs: "test") def test_stack_requires(self): definition = generate_definition( diff --git a/stacker/tests/test_variables.py b/stacker/tests/test_variables.py index 08daf0327..2b1acbc55 100644 --- a/stacker/tests/test_variables.py +++ b/stacker/tests/test_variables.py @@ -13,7 +13,7 @@ from stacker.stack import Stack -from .factories import mock_lookup, generate_definition +from .factories import generate_definition class TestVariables(unittest.TestCase): @@ -24,27 +24,11 @@ def setUp(self): def test_variable_replace_no_lookups(self): var = Variable("Param1", "2") - self.assertEqual(len(var.lookups), 0) - resolved_lookups = { - mock_lookup("fakeStack::FakeOutput", "output"): "resolved", - } - var.replace(resolved_lookups) - self.assertEqual(var.value, "2") - - def test_variable_resolve_no_lookups(self): - var = Variable("Param1", "2") - self.assertEqual(len(var.lookups), 0) - var.resolve(self.context, self.provider) - self.assertTrue(var.resolved) self.assertEqual(var.value, "2") def test_variable_replace_simple_lookup(self): var = Variable("Param1", "${output fakeStack::FakeOutput}") - self.assertEqual(len(var.lookups), 1) - resolved_lookups = { - mock_lookup("fakeStack::FakeOutput", "output"): "resolved", - } - var.replace(resolved_lookups) + var._value._resolve("resolved") self.assertEqual(var.value, "resolved") def test_variable_resolve_simple_lookup(self): @@ -59,32 +43,26 @@ def test_variable_resolve_simple_lookup(self): self.context.get_stack.return_value = stack var = Variable("Param1", "${output fakeStack::FakeOutput}") - self.assertEqual(len(var.lookups), 1) var.resolve(self.context, self.provider) self.assertTrue(var.resolved) self.assertEqual(var.value, "resolved") - self.assertEqual(len(var.lookups), 0) def test_variable_resolve_default_lookup_empty(self): var = Variable("Param1", "${default fakeStack::}") - self.assertEqual(len(var.lookups), 1) var.resolve(self.context, self.provider) self.assertTrue(var.resolved) self.assertEqual(var.value, "") - self.assertEqual(len(var.lookups), 0) def test_variable_replace_multiple_lookups_string(self): var = Variable( "Param1", - "url://${output fakeStack::FakeOutput}@" - "${output fakeStack::FakeOutput2}", + "url://" # 0 + "${output fakeStack::FakeOutput}" # 1 + "@" # 2 + "${output fakeStack::FakeOutput2}", # 3 ) - self.assertEqual(len(var.lookups), 2) - resolved_lookups = { - mock_lookup("fakeStack::FakeOutput", "output"): "resolved", - mock_lookup("fakeStack::FakeOutput2", "output"): "resolved2", - } - var.replace(resolved_lookups) + var._value[1]._resolve("resolved") + var._value[3]._resolve("resolved2") self.assertEqual(var.value, "url://resolved@resolved2") def test_variable_resolve_multiple_lookups_string(self): @@ -93,7 +71,6 @@ def test_variable_resolve_multiple_lookups_string(self): "url://${output fakeStack::FakeOutput}@" "${output fakeStack::FakeOutput2}", ) - self.assertEqual(len(var.lookups), 2) stack = Stack( definition=generate_definition("vpc", 1), @@ -110,23 +87,17 @@ def test_variable_resolve_multiple_lookups_string(self): def test_variable_replace_no_lookups_list(self): var = Variable("Param1", ["something", "here"]) - self.assertEqual(len(var.lookups), 0) - resolved_lookups = { - mock_lookup("fakeStack::FakeOutput", "output"): "resolved", - } - var.replace(resolved_lookups) self.assertEqual(var.value, ["something", "here"]) def test_variable_replace_lookups_list(self): - value = ["something", "${output fakeStack::FakeOutput}", - "${output fakeStack::FakeOutput2}"] + value = ["something", # 0 + "${output fakeStack::FakeOutput}", # 1 + "${output fakeStack::FakeOutput2}" # 2 + ] var = Variable("Param1", value) - self.assertEqual(len(var.lookups), 2) - resolved_lookups = { - mock_lookup("fakeStack::FakeOutput", "output"): "resolved", - mock_lookup("fakeStack::FakeOutput2", "output"): "resolved2", - } - var.replace(resolved_lookups) + + var._value[1]._resolve("resolved") + var._value[2]._resolve("resolved2") self.assertEqual(var.value, ["something", "resolved", "resolved2"]) def test_variable_replace_lookups_dict(self): @@ -135,12 +106,8 @@ def test_variable_replace_lookups_dict(self): "other": "${output fakeStack::FakeOutput2}", } var = Variable("Param1", value) - self.assertEqual(len(var.lookups), 2) - resolved_lookups = { - mock_lookup("fakeStack::FakeOutput", "output"): "resolved", - mock_lookup("fakeStack::FakeOutput2", "output"): "resolved2", - } - var.replace(resolved_lookups) + var._value["something"]._resolve("resolved") + var._value["other"]._resolve("resolved2") self.assertEqual(var.value, {"something": "resolved", "other": "resolved2"}) @@ -157,13 +124,10 @@ def test_variable_replace_lookups_mixed(self): }, } var = Variable("Param1", value) - self.assertEqual(len(var.lookups), 3) - resolved_lookups = { - mock_lookup("fakeStack::FakeOutput", "output"): "resolved", - mock_lookup("fakeStack::FakeOutput2", "output"): "resolved2", - mock_lookup("fakeStack::FakeOutput3", "output"): "resolved3", - } - var.replace(resolved_lookups) + var._value["something"][0]._resolve("resolved") + var._value["here"]["other"]._resolve("resolved2") + var._value["here"]["same"]._resolve("resolved") + var._value["here"]["mixed"][1]._resolve("resolved3") self.assertEqual(var.value, { "something": [ "resolved", @@ -194,11 +158,6 @@ def mock_handler(value, context, provider, **kwargs): "Param1", "${lookup ${lookup ${output fakeStack::FakeOutput}}}", ) - self.assertEqual( - len(var.lookups), - 1, - "should only parse out the first complete lookup first", - ) var.resolve(self.context, self.provider) self.assertTrue(var.resolved) self.assertEqual(var.value, "looked up: looked up: resolved") diff --git a/stacker/variables.py b/stacker/variables.py index c27cdb899..5f49b2074 100644 --- a/stacker/variables.py +++ b/stacker/variables.py @@ -1,15 +1,17 @@ from __future__ import absolute_import from __future__ import print_function from __future__ import division + +import re + from past.builtins import basestring from builtins import object from string import Template -from .exceptions import InvalidLookupCombination -from .lookups import ( - extract_lookups, - resolve_lookups, -) +from .exceptions import InvalidLookupCombination, UnresolvedVariable, \ + UnknownLookupType, FailedVariableLookup, FailedLookup, \ + UnresolvedVariableValue, InvalidLookupConcatenation +from .lookups.registry import LOOKUP_HANDLERS class LookupTemplate(Template): @@ -18,53 +20,6 @@ class LookupTemplate(Template): idpattern = r'[_a-z][^\$\{\}]*' -def resolve_string(value, replacements): - """Resolve any lookups within a string. - - Args: - value (str): string value we're resolving lookups within - replacements (dict): resolved lookup values - - Returns: - str: value with any lookups resolved - - """ - lookups = extract_lookups(value) - for lookup in lookups: - lookup_value = replacements.get(lookup.raw) - if not isinstance(lookup_value, basestring): - if len(lookups) > 1: - raise InvalidLookupCombination(lookup, lookups, value) - return lookup_value - # we use safe_substitute to support resolving nested lookups - return LookupTemplate(value).safe_substitute(replacements) - - -def resolve(value, replacements): - """Recursively resolve any lookups within the data structure. - - Args: - value (Union[str, list, dict]): a structure that contains lookups - replacements: resolved lookup values - - Returns: - Union[str, list, dict]: value passed in with lookup values resolved - - """ - if isinstance(value, basestring): - return resolve_string(value, replacements) - elif isinstance(value, list): - resolved = [] - for v in value: - resolved.append(resolve(v, replacements)) - return resolved - elif isinstance(value, dict): - for key, v in value.items(): - value[key] = resolve(v, replacements) - return value - return value - - def resolve_variables(variables, context, provider): """Given a list of variables, resolve all of them. @@ -81,54 +36,37 @@ def resolve_variables(variables, context, provider): class Variable(object): - """Represents a variable passed to a stack. Args: name (str): Name of the variable - value (str): Initial value of the variable from the config - + value (any): Initial value of the variable from the config (str, list, + dict) """ def __init__(self, name, value): self.name = name - self._value = value - self._resolved_value = None - - @property - def lookups(self): - """Return any lookups within the value""" - return extract_lookups(self.value) - - @property - def needs_resolution(self): - """Return True if the value has any lookups that need resolving.""" - if self.lookups: - return True - return False + self._raw_value = value + self._value = VariableValue.parse(value) @property def value(self): """Return the current value of the Variable. - - `_resolved_value` takes precedence over `_value`. - """ - if self._resolved_value is not None: - return self._resolved_value - else: - return self._value + try: + return self._value.value() + except UnresolvedVariableValue: + raise UnresolvedVariable("", self) + except InvalidLookupConcatenation as e: + raise InvalidLookupCombination(e.lookup, e.lookups, self) @property def resolved(self): """Boolean for whether the Variable has been resolved. Variables only need to be resolved if they contain lookups. - """ - if self.needs_resolution: - return self._resolved_value is not None - return True + return self._value.resolved() def resolve(self, context, provider): """Recursively resolve any lookups with the Variable. @@ -140,21 +78,362 @@ def resolve(self, context, provider): the base provider """ + try: + self._value.resolve(context, provider) + except FailedLookup as e: + raise FailedVariableLookup(self.name, e.lookup, e.error) - while self.lookups: - resolved_lookups = resolve_lookups(self, context, provider) - self.replace(resolved_lookups) + def dependencies(self): + """ + Returns: + Set[str]: Stack names that this variable depends on + """ + return self._value.dependencies() - def replace(self, resolved_lookups): - """Replace lookups in the Variable with their resolved values. - Args: - resolved_lookups (dict): dict of :class:`stacker.lookups.Lookup` -> - resolved value. +class VariableValue(object): + """ + Abstract Syntax Tree base object to parse the value for a variable + """ + def value(self): + return NotImplementedError() + + def __iter__(self): + return NotImplementedError() + + def resolved(self): + """ + Returns: + bool: Whether value() will not raise an error + """ + return NotImplementedError() + + def resolve(self, context, provider): + pass + + def dependencies(self): + return set() + + def simplified(self): + """ + Return a simplified version of the Value. + This can be used to e.g. concatenate two literals in to one literal, or + to flatten nested Concatenations + + Returns: + VariableValue + """ + return self + + @classmethod + def parse(cls, input_object): + if isinstance(input_object, list): + return VariableValueList.parse(input_object) + elif isinstance(input_object, dict): + return VariableValueDict.parse(input_object) + elif not isinstance(input_object, basestring): + return VariableValueLiteral(input_object) + # else: # str + + tokens = VariableValueConcatenation([ + VariableValueLiteral(t) + for t in re.split(r'(\$\{|\}|\s+)', input_object) + ]) + + opener = '${' + closer = '}' + + while True: + last_open = None + next_close = None + for i, t in enumerate(tokens): + if not isinstance(t, VariableValueLiteral): + continue + + if t.value() == opener: + last_open = i + next_close = None + if last_open is not None and \ + t.value() == closer and \ + next_close is None: + next_close = i + + if next_close is not None: + lookup_data = VariableValueConcatenation( + tokens[(last_open + len(opener) + 1):next_close] + ) + lookup = VariableValueLookup( + lookup_name=tokens[last_open + 1], + lookup_data=lookup_data, + ) + tokens[last_open:(next_close + 1)] = [lookup] + else: + break + + tokens = tokens.simplified() + + return tokens + + +class VariableValueLiteral(VariableValue): + def __init__(self, value): + self._value = value + + def value(self): + return self._value + + def __iter__(self): + yield self + + def resolved(self): + return True + + def __repr__(self): + return "Literal<{}>".format(repr(self._value)) + + +class VariableValueList(VariableValue, list): + @classmethod + def parse(cls, input_object): + acc = [ + VariableValue.parse(obj) + for obj in input_object + ] + return cls(acc) + + def value(self): + return [ + item.value() + for item in self + ] + + def resolved(self): + accumulator = True + for item in self: + accumulator = accumulator and item.resolved() + return accumulator + + def __repr__(self): + return "List[{}]".format(', '.join([repr(value) for value in self])) + + def __iter__(self): + return list.__iter__(self) + + def resolve(self, context, provider): + for item in self: + item.resolve(context, provider) + + def dependencies(self): + deps = set() + for item in self: + deps.update(item.dependencies()) + return deps + + def simplified(self): + return [ + item.simplified() + for item in self + ] + + +class VariableValueDict(VariableValue, dict): + @classmethod + def parse(cls, input_object): + acc = { + k: VariableValue.parse(v) + for k, v in input_object.items() + } + return cls(acc) + + def value(self): + return { + k: v.value() + for k, v in self.items() + } + + def resolved(self): + accumulator = True + for item in self.values(): + accumulator = accumulator and item.resolved() + return accumulator + + def __repr__(self): + return "Dict[{}]".format(', '.join([ + "{}={}".format(k, repr(v)) for k, v in self.items() + ])) + + def __iter__(self): + return dict.__iter__(self) + + def resolve(self, context, provider): + for item in self.values(): + item.resolve(context, provider) + + def dependencies(self): + deps = set() + for item in self.values(): + deps.update(item.dependencies()) + return deps + + def simplified(self): + return { + k: v.simplified() + for k, v in self.items() + } + + +class VariableValueConcatenation(VariableValue, list): + def value(self): + if len(self) == 1: + return self[0].value() + values = [] + for value in self: + resolved_value = value.value() + if not isinstance(resolved_value, basestring): + raise InvalidLookupConcatenation(value, self) + values.append(resolved_value) + return ''.join(values) + + def __iter__(self): + return list.__iter__(self) + + def resolved(self): + accumulator = True + for item in self: + accumulator = accumulator and item.resolved() + return accumulator + + def __repr__(self): + return "Concat[{}]".format(', '.join([repr(value) for value in self])) + + def resolve(self, context, provider): + for value in self: + value.resolve(context, provider) + + def dependencies(self): + deps = set() + for item in self: + deps.update(item.dependencies()) + return deps + + def simplified(self): + concat = [] + for item in self: + if isinstance(item, VariableValueLiteral) and \ + item.value() == '': + pass + + elif isinstance(item, VariableValueLiteral) and \ + len(concat) > 0 and \ + isinstance(concat[-1], VariableValueLiteral): + # Join the literals together + concat[-1] = VariableValueLiteral( + concat[-1].value() + item.value() + ) + + elif isinstance(item, VariableValueConcatenation): + # Flatten concatenations + concat.extend(item.simplified()) + + else: + concat.append(item.simplified()) + + if len(concat) == 0: + return VariableValueLiteral('') + elif len(concat) == 1: + return concat[0] + else: + return VariableValueConcatenation(concat) + + +class VariableValueLookup(VariableValue): + def __init__(self, lookup_name, lookup_data, handler=None): + """ + Args: + lookup_name (basestring): Name of the invoked lookup + lookup_data (VariableValue): Data portion of the lookup """ - replacements = {} - for lookup, value in resolved_lookups.items(): - replacements[lookup.raw] = value + self._resolved = False + self._value = None + + self.lookup_name = lookup_name + + if isinstance(lookup_data, basestring): + lookup_data = VariableValueLiteral(lookup_data) + self.lookup_data = lookup_data - self._resolved_value = resolve(self.value, replacements) + if handler is None: + lookup_name_resolved = lookup_name.value() + try: + handler = LOOKUP_HANDLERS[lookup_name_resolved] + except KeyError: + raise UnknownLookupType(lookup_name_resolved) + self.handler = handler + + def resolve(self, context, provider): + self.lookup_data.resolve(context, provider) + try: + if type(self.handler) == type: + # Hander is a new-style handler + result = self.handler.handle( + value=self.lookup_data.value(), + context=context, + provider=provider + ) + else: + result = self.handler( + value=self.lookup_data.value(), + context=context, + provider=provider + ) + self._resolve(result) + except Exception as e: + raise FailedLookup(self, e) + + def _resolve(self, value): + self._value = value + self._resolved = True + + def dependencies(self): + if type(self.handler) == type: + return self.handler.dependencies(self.lookup_data) + else: + return set() + + def value(self): + if self._resolved: + return self._value + else: + raise UnresolvedVariableValue(self) + + def __iter__(self): + yield self + + def resolved(self): + return self._resolved + + def __repr__(self): + if self._resolved: + return "Lookup<{r} ({t} {d})>".format( + r=self._value, + t=self.lookup_name, + d=repr(self.lookup_data), + ) + else: + return "Lookup<{t} {d}>".format( + t=self.lookup_name, + d=repr(self.lookup_data), + ) + + def __str__(self): + return "${{{type} {data}}}".format( + type=self.lookup_name.value(), + data=self.lookup_data.value(), + ) + + def simplified(self): + return VariableValueLookup( + lookup_name=self.lookup_name, + lookup_data=self.lookup_data.simplified(), + ) From fe0086cff81aad891a62ac49ac28c8f5ff7570af Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Sun, 2 Dec 2018 22:40:42 -0800 Subject: [PATCH 10/74] Pinning PyYAML to 3.13 to deal with cfn-flip pin (#684) * Pinning PyYAML to 3.13 to deal with cfn-flip pin https://github.com/awslabs/aws-cfn-template-flip/pull/54 YAML was pinned in the cfn-flip package that troposphere depends on, and without this we have issues with building. * yay, they removed the pinning! https://github.com/awslabs/aws-cfn-template-flip/pull/58 * Ugh, have to go back to pinning moto. https://github.com/spulec/moto/issues/1924 https://github.com/spulec/moto/issues/1941 --- Makefile | 4 ++-- setup.py | 11 ++++++++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index ba04a3a02..c941acfba 100644 --- a/Makefile +++ b/Makefile @@ -8,10 +8,10 @@ lint: flake8 --require-code --min-version=2.7 --ignore FI50,FI51,FI53,FI14,E402,N802,W605 stacker/tests # ignore setUp naming test-unit: clean - AWS_DEFAULT_REGION=us-east-1 python setup.py nosetests + AWS_ACCESS_KEY_ID=x AWS_SECRET_ACCESS_KEY=x AWS_DEFAULT_REGION=us-east-1 python setup.py nosetests test-unit3: clean - AWS_DEFAULT_REGION=us-east-1 python3 setup.py nosetests + AWS_ACCESS_KEY_ID=x AWS_SECRET_ACCESS_KEY=x AWS_DEFAULT_REGION=us-east-1 python3 setup.py nosetests clean: rm -rf .egg stacker.egg-info diff --git a/setup.py b/setup.py index a706e8ba5..0a12e6680 100644 --- a/setup.py +++ b/setup.py @@ -9,9 +9,11 @@ install_requires = [ "future", "troposphere>=1.9.0", - "botocore>=1.6.0", - "boto3>=1.3.1", - "PyYAML>=3.12", + # pinning needed till https://github.com/spulec/moto/issues/1924 is + # resolved + "botocore<1.11.0", + "boto3>=1.7.0,<1.8.0", + "PyYAML>=3.13b1", "awacs>=0.6.0", "gitpython>=2.0,<3.0", "schematics>=2.0.1,<2.1.0", @@ -20,6 +22,9 @@ ] tests_require = [ + # pinning needed till https://github.com/spulec/moto/issues/1924 is + # resolved + "aws-xray-sdk==1.1.2", "mock~=2.0.0", "moto~=1.1.24", "testfixtures~=4.10.0", From 37cd35143be57726ee355d37fe6c303213cb8366 Mon Sep 17 00:00:00 2001 From: Aaron Zollman Date: Sun, 2 Dec 2018 22:41:37 -0800 Subject: [PATCH 11/74] Use default ACL for uploaded lambda code (#682) * Use default ACL for uploaded lambda code The "Authenticated-Read" ACL, currently set on all uploads, allows your code to be read by all S3 users. Default behavior should be to use the permissions implied by the bucket policy, i.e. "private". Organizations that do not grant SetObjectAcl permissions (for fear of data loss) will block this call. * Add config option to make default uploads private Per PR#682, although the default can be changed to 'private', we should allow users to set 'authenticated-read' if they desire. Adds a new configuration option, payload_acl, to define this. --- stacker/hooks/aws_lambda.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/stacker/hooks/aws_lambda.py b/stacker/hooks/aws_lambda.py index 4d86007b3..4b388f40c 100644 --- a/stacker/hooks/aws_lambda.py +++ b/stacker/hooks/aws_lambda.py @@ -194,7 +194,8 @@ def _head_object(s3_conn, bucket, key): raise -def _upload_code(s3_conn, bucket, prefix, name, contents, content_hash): +def _upload_code(s3_conn, bucket, prefix, name, contents, content_hash, + payload_acl): """Upload a ZIP file to S3 for use by Lambda. The key used for the upload will be unique based on the checksum of the @@ -210,6 +211,8 @@ def _upload_code(s3_conn, bucket, prefix, name, contents, content_hash): construct a key name for the uploaded file. contents (str): byte string with the content of the file upload. content_hash (str): md5 hash of the contents to be uploaded. + payload_acl (str): The canned S3 object ACL to be applied to the + uploaded payload Returns: troposphere.awslambda.Code: CloudFormation Lambda Code object, @@ -229,7 +232,7 @@ def _upload_code(s3_conn, bucket, prefix, name, contents, content_hash): logger.info('lambda: uploading object %s', key) s3_conn.put_object(Bucket=bucket, Key=key, Body=contents, ContentType='application/zip', - ACL='authenticated-read') + ACL=payload_acl) return Code(S3Bucket=bucket, S3Key=key) @@ -269,7 +272,8 @@ def _check_pattern_list(patterns, key, default=None): 'list of strings'.format(key)) -def _upload_function(s3_conn, bucket, prefix, name, options, follow_symlinks): +def _upload_function(s3_conn, bucket, prefix, name, options, follow_symlinks, + payload_acl): """Builds a Lambda payload from user configuration and uploads it to S3. Args: @@ -292,6 +296,8 @@ def _upload_function(s3_conn, bucket, prefix, name, options, follow_symlinks): file patterns to exclude from the payload (optional). follow_symlinks (bool): If true, symlinks will be included in the resulting zip file + payload_acl (str): The canned S3 object ACL to be applied to the + uploaded payload Returns: troposphere.awslambda.Code: CloudFormation AWS Lambda Code object, @@ -326,7 +332,7 @@ def _upload_function(s3_conn, bucket, prefix, name, options, follow_symlinks): follow_symlinks) return _upload_code(s3_conn, bucket, prefix, name, zip_contents, - content_hash) + content_hash, payload_acl) def select_bucket_region(custom_bucket, hook_region, stacker_bucket_region, @@ -385,6 +391,8 @@ def upload_lambda_functions(context, provider, **kwargs): zip name. follow_symlinks (bool, optional): Will determine if symlinks should be followed and included with the zip artifact. Default: False + payload_acl (str, optional): The canned S3 object ACL to be applied to + the uploaded payload. Default: private functions (dict): Configurations of desired payloads to build. Keys correspond to function names, used to derive key names for the payload. Each @@ -438,6 +446,7 @@ def upload_lambda_functions(context, provider, **kwargs): bucket: custom-bucket follow_symlinks: true prefix: cloudformation-custom-resources/ + payload_acl: authenticated-read functions: MyFunction: path: ./lambda_functions @@ -494,6 +503,10 @@ def create_template(self): if not isinstance(follow_symlinks, bool): raise ValueError('follow_symlinks option must be a boolean') + # Check for S3 object acl. Valid values from: + # https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl + payload_acl = kwargs.get('payload_acl', 'private') + # Always use the global client for s3 session = get_session(bucket_region) s3_client = session.client('s3') @@ -505,6 +518,6 @@ def create_template(self): results = {} for name, options in kwargs['functions'].items(): results[name] = _upload_function(s3_client, bucket_name, prefix, name, - options, follow_symlinks) + options, follow_symlinks, payload_acl) return results From f05b79fdb599bd2799e6daff9e561316d42c9d1e Mon Sep 17 00:00:00 2001 From: "Eric J. Holmes" Date: Thu, 1 Nov 2018 15:53:17 -0700 Subject: [PATCH 12/74] Add `in_progress` option to stack config. --- docs/config.rst | 6 ++++ stacker/actions/build.py | 3 +- stacker/config/__init__.py | 2 ++ stacker/providers/aws/default.py | 22 ++++++++++++- stacker/stack.py | 1 + stacker/tests/providers/aws/test_default.py | 36 +++++++++++++++++++++ 6 files changed, 68 insertions(+), 2 deletions(-) diff --git a/docs/config.rst b/docs/config.rst index 3e57af61e..905fc970a 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -404,6 +404,12 @@ A stack has the following keys: that will be applied when the CloudFormation stack is created and updated. You can use stack policies to prevent CloudFormation from making updates to protected resources (e.g. databases). See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/protect-stack-resources.html +**in_progress**: + (optional): If provided, specifies the behavior for when a stack is in + `CREATE_IN_PROGRESS` or `UPDATE_IN_PROGRESS`. By default, stacker will raise + an exception if the stack is in an `IN_PROGRESS` state. You can set this + option to `wait` and stacker will wait for the previous update to complete + before attempting to update the stack. Stacks Example ~~~~~~~~~~~~~~ diff --git a/stacker/actions/build.py b/stacker/actions/build.py index a81f4c618..97a6d28e5 100644 --- a/stacker/actions/build.py +++ b/stacker/actions/build.py @@ -340,7 +340,8 @@ def _launch_stack(self, stack, **kwargs): return SubmittedStatus("creating new stack") try: - if provider.prepare_stack_for_update(provider_stack, tags): + wait = stack.in_progress == "wait" + if provider.prepare_stack_for_update(provider_stack, tags, wait=wait): existing_params = provider_stack.get('Parameters', []) provider.update_stack( stack.fqn, diff --git a/stacker/config/__init__.py b/stacker/config/__init__.py index 96c09c359..b0a98d10c 100644 --- a/stacker/config/__init__.py +++ b/stacker/config/__init__.py @@ -332,6 +332,8 @@ class Stack(Model): stack_policy_path = StringType(serialize_when_none=False) + in_progress = StringType(serialize_when_none=False) + def validate_class_path(self, data, value): if value and data["template_path"]: raise ValidationError( diff --git a/stacker/providers/aws/default.py b/stacker/providers/aws/default.py index dd4d16a22..a50e2b62b 100644 --- a/stacker/providers/aws/default.py +++ b/stacker/providers/aws/default.py @@ -58,6 +58,14 @@ GET_EVENTS_SLEEP = 1 DEFAULT_CAPABILITIES = ["CAPABILITY_NAMED_IAM", ] +# Maps a CloudFormation StackStatus to a boto3 waiter. +# +# See https://goo.gl/VbhUZ3 +WAITERS = { + "UPDATE_IN_PROGRESS": "stack_update_complete", + "CREATE_IN_PROGRESS": "stack_create_complete", +} + def get_cloudformation_client(session): config = Config( @@ -750,7 +758,7 @@ def select_update_method(self, force_interactive, force_change_set): else: return self.default_update_stack - def prepare_stack_for_update(self, stack, tags): + def prepare_stack_for_update(self, stack, tags, wait=False): """Prepare a stack for updating It may involve deleting the stack if is has failed it's initial @@ -764,6 +772,8 @@ def prepare_stack_for_update(self, stack, tags): stack (dict): a stack object returned from get_stack tags (list): list of expected tags that must be present in the stack if it must be re-created + wait (bool): when True, this will wait for a previous + UpdateStack/CreateStack to complete. Returns: bool: True if the stack can be updated, False if it must be @@ -779,6 +789,16 @@ def prepare_stack_for_update(self, stack, tags): stack_status = self.get_stack_status(stack) if self.is_stack_in_progress(stack): + waitfunc = WAITERS.get(self.get_stack_status(stack)) + + # Wait for the stack to transition from + # UPDATE_IN_PROGRESS/CREATE_IN_PROGRESS to + # UPDATE_COMPLETE/CREATE_COMPLETE + if wait and waitfunc: + waiter = self.cloudformation.get_waiter(waitfunc) + waiter.wait(StackName=stack_name) + return True + raise exceptions.StackUpdateBadStatus( stack_name, stack_status, 'Update already in-progress') diff --git a/stacker/stack.py b/stacker/stack.py index c0c877419..9875ae2de 100644 --- a/stacker/stack.py +++ b/stacker/stack.py @@ -74,6 +74,7 @@ def __init__(self, definition, context, variables=None, mappings=None, self.protected = protected self.context = context self.outputs = None + self.in_progress = definition.in_progress def __repr__(self): return self.fqn diff --git a/stacker/tests/providers/aws/test_default.py b/stacker/tests/providers/aws/test_default.py index 10dc5577c..1c8c7e343 100644 --- a/stacker/tests/providers/aws/test_default.py +++ b/stacker/tests/providers/aws/test_default.py @@ -451,6 +451,42 @@ def test_prepare_stack_for_update_in_progress(self): self.assertIn('in-progress', str(raised.exception)) + def test_prepare_stack_for_update_in_progress_with_wait(self): + stack_name = "MockStack" + stack = generate_describe_stacks_stack( + stack_name, stack_status="UPDATE_IN_PROGRESS") + + self.stubber.add_response( + "describe_stacks", + {"Stacks": [ + { + "StackName": stack_name, + "CreationTime": "2010-07-27T22:28:28Z", + "StackStatus": "UPDATE_COMPLETE"}]}, + expected_params={"StackName": stack_name} + ) + + with self.stubber: + self.provider.prepare_stack_for_update(stack, [], wait=True) + + def test_prepare_stack_for_create_in_progress_with_wait(self): + stack_name = "MockStack" + stack = generate_describe_stacks_stack( + stack_name, stack_status="CREATE_IN_PROGRESS") + + self.stubber.add_response( + "describe_stacks", + {"Stacks": [ + { + "StackName": stack_name, + "CreationTime": "2010-07-27T22:28:28Z", + "StackStatus": "CREATE_COMPLETE"}]}, + expected_params={"StackName": stack_name} + ) + + with self.stubber: + self.provider.prepare_stack_for_update(stack, [], wait=True) + def test_prepare_stack_for_update_non_recreatable(self): stack_name = "MockStack" stack = generate_describe_stacks_stack( From 4fa104b1cd2c30e4e9d5ea9ce1c3e6505e740a05 Mon Sep 17 00:00:00 2001 From: "Eric J. Holmes" Date: Thu, 1 Nov 2018 17:55:43 -0700 Subject: [PATCH 13/74] Implement the wait check within `_launch_stack` --- stacker/actions/build.py | 8 +++-- stacker/actions/destroy.py | 3 +- stacker/providers/aws/default.py | 22 +------------ stacker/status.py | 1 + stacker/tests/providers/aws/test_default.py | 36 --------------------- 5 files changed, 10 insertions(+), 60 deletions(-) diff --git a/stacker/actions/build.py b/stacker/actions/build.py index 97a6d28e5..e36a94cb8 100644 --- a/stacker/actions/build.py +++ b/stacker/actions/build.py @@ -23,6 +23,8 @@ CompleteStatus, FailedStatus, SkippedStatus, + PENDING, + WAITING, SUBMITTED, INTERRUPTED ) @@ -256,7 +258,7 @@ def _launch_stack(self, stack, **kwargs): """ old_status = kwargs.get("status") - wait_time = STACK_POLL_TIME if old_status == SUBMITTED else 0 + wait_time = 0 if old_status is PENDING else STACK_POLL_TIME if self.cancel.wait(wait_time): return INTERRUPTED @@ -341,7 +343,9 @@ def _launch_stack(self, stack, **kwargs): try: wait = stack.in_progress == "wait" - if provider.prepare_stack_for_update(provider_stack, tags, wait=wait): + if wait and provider.is_stack_in_progress(provider_stack): + return WAITING + if provider.prepare_stack_for_update(provider_stack, tags): existing_params = provider_stack.get('Parameters', []) provider.update_stack( stack.fqn, diff --git a/stacker/actions/destroy.py b/stacker/actions/destroy.py index 03c0a1cf6..4f26692ad 100644 --- a/stacker/actions/destroy.py +++ b/stacker/actions/destroy.py @@ -10,6 +10,7 @@ from ..status import ( CompleteStatus, SubmittedStatus, + PENDING, SUBMITTED, INTERRUPTED ) @@ -45,7 +46,7 @@ def _generate_plan(self, tail=False): def _destroy_stack(self, stack, **kwargs): old_status = kwargs.get("status") - wait_time = STACK_POLL_TIME if old_status == SUBMITTED else 0 + wait_time = 0 if old_status is PENDING else STACK_POLL_TIME if self.cancel.wait(wait_time): return INTERRUPTED diff --git a/stacker/providers/aws/default.py b/stacker/providers/aws/default.py index a50e2b62b..dd4d16a22 100644 --- a/stacker/providers/aws/default.py +++ b/stacker/providers/aws/default.py @@ -58,14 +58,6 @@ GET_EVENTS_SLEEP = 1 DEFAULT_CAPABILITIES = ["CAPABILITY_NAMED_IAM", ] -# Maps a CloudFormation StackStatus to a boto3 waiter. -# -# See https://goo.gl/VbhUZ3 -WAITERS = { - "UPDATE_IN_PROGRESS": "stack_update_complete", - "CREATE_IN_PROGRESS": "stack_create_complete", -} - def get_cloudformation_client(session): config = Config( @@ -758,7 +750,7 @@ def select_update_method(self, force_interactive, force_change_set): else: return self.default_update_stack - def prepare_stack_for_update(self, stack, tags, wait=False): + def prepare_stack_for_update(self, stack, tags): """Prepare a stack for updating It may involve deleting the stack if is has failed it's initial @@ -772,8 +764,6 @@ def prepare_stack_for_update(self, stack, tags, wait=False): stack (dict): a stack object returned from get_stack tags (list): list of expected tags that must be present in the stack if it must be re-created - wait (bool): when True, this will wait for a previous - UpdateStack/CreateStack to complete. Returns: bool: True if the stack can be updated, False if it must be @@ -789,16 +779,6 @@ def prepare_stack_for_update(self, stack, tags, wait=False): stack_status = self.get_stack_status(stack) if self.is_stack_in_progress(stack): - waitfunc = WAITERS.get(self.get_stack_status(stack)) - - # Wait for the stack to transition from - # UPDATE_IN_PROGRESS/CREATE_IN_PROGRESS to - # UPDATE_COMPLETE/CREATE_COMPLETE - if wait and waitfunc: - waiter = self.cloudformation.get_waiter(waitfunc) - waiter.wait(StackName=stack_name) - return True - raise exceptions.StackUpdateBadStatus( stack_name, stack_status, 'Update already in-progress') diff --git a/stacker/status.py b/stacker/status.py index 66df2a26b..395d575d4 100644 --- a/stacker/status.py +++ b/stacker/status.py @@ -77,6 +77,7 @@ class StackDoesNotExist(SkippedStatus): PENDING = PendingStatus() +WAITING = PendingStatus(reason="waiting") SUBMITTED = SubmittedStatus() COMPLETE = CompleteStatus() SKIPPED = SkippedStatus() diff --git a/stacker/tests/providers/aws/test_default.py b/stacker/tests/providers/aws/test_default.py index 1c8c7e343..10dc5577c 100644 --- a/stacker/tests/providers/aws/test_default.py +++ b/stacker/tests/providers/aws/test_default.py @@ -451,42 +451,6 @@ def test_prepare_stack_for_update_in_progress(self): self.assertIn('in-progress', str(raised.exception)) - def test_prepare_stack_for_update_in_progress_with_wait(self): - stack_name = "MockStack" - stack = generate_describe_stacks_stack( - stack_name, stack_status="UPDATE_IN_PROGRESS") - - self.stubber.add_response( - "describe_stacks", - {"Stacks": [ - { - "StackName": stack_name, - "CreationTime": "2010-07-27T22:28:28Z", - "StackStatus": "UPDATE_COMPLETE"}]}, - expected_params={"StackName": stack_name} - ) - - with self.stubber: - self.provider.prepare_stack_for_update(stack, [], wait=True) - - def test_prepare_stack_for_create_in_progress_with_wait(self): - stack_name = "MockStack" - stack = generate_describe_stacks_stack( - stack_name, stack_status="CREATE_IN_PROGRESS") - - self.stubber.add_response( - "describe_stacks", - {"Stacks": [ - { - "StackName": stack_name, - "CreationTime": "2010-07-27T22:28:28Z", - "StackStatus": "CREATE_COMPLETE"}]}, - expected_params={"StackName": stack_name} - ) - - with self.stubber: - self.provider.prepare_stack_for_update(stack, [], wait=True) - def test_prepare_stack_for_update_non_recreatable(self): stack_name = "MockStack" stack = generate_describe_stacks_stack( From 7df4e5cd586911fa948fbd96314e51d9a5c27de4 Mon Sep 17 00:00:00 2001 From: "Eric J. Holmes" Date: Thu, 1 Nov 2018 20:36:00 -0700 Subject: [PATCH 14/74] Rename to `in_progress_behavior` --- docs/config.rst | 2 +- stacker/actions/build.py | 2 +- stacker/config/__init__.py | 2 +- stacker/stack.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/config.rst b/docs/config.rst index 905fc970a..6d7bf5f9d 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -404,7 +404,7 @@ A stack has the following keys: that will be applied when the CloudFormation stack is created and updated. You can use stack policies to prevent CloudFormation from making updates to protected resources (e.g. databases). See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/protect-stack-resources.html -**in_progress**: +**in_progress_behavior**: (optional): If provided, specifies the behavior for when a stack is in `CREATE_IN_PROGRESS` or `UPDATE_IN_PROGRESS`. By default, stacker will raise an exception if the stack is in an `IN_PROGRESS` state. You can set this diff --git a/stacker/actions/build.py b/stacker/actions/build.py index e36a94cb8..c4ef29857 100644 --- a/stacker/actions/build.py +++ b/stacker/actions/build.py @@ -342,7 +342,7 @@ def _launch_stack(self, stack, **kwargs): return SubmittedStatus("creating new stack") try: - wait = stack.in_progress == "wait" + wait = stack.in_progress_behavior == "wait" if wait and provider.is_stack_in_progress(provider_stack): return WAITING if provider.prepare_stack_for_update(provider_stack, tags): diff --git a/stacker/config/__init__.py b/stacker/config/__init__.py index b0a98d10c..3e190621a 100644 --- a/stacker/config/__init__.py +++ b/stacker/config/__init__.py @@ -332,7 +332,7 @@ class Stack(Model): stack_policy_path = StringType(serialize_when_none=False) - in_progress = StringType(serialize_when_none=False) + in_progress_behavior = StringType(serialize_when_none=False) def validate_class_path(self, data, value): if value and data["template_path"]: diff --git a/stacker/stack.py b/stacker/stack.py index 9875ae2de..811c34693 100644 --- a/stacker/stack.py +++ b/stacker/stack.py @@ -74,7 +74,7 @@ def __init__(self, definition, context, variables=None, mappings=None, self.protected = protected self.context = context self.outputs = None - self.in_progress = definition.in_progress + self.in_progress_behavior = definition.in_progress_behavior def __repr__(self): return self.fqn From 409e24f0b87a089aa391525ae4b643d843daf44f Mon Sep 17 00:00:00 2001 From: Garison Draper Date: Thu, 6 Dec 2018 19:45:05 -0800 Subject: [PATCH 15/74] Displaying roll back reason after both ROLLBACK_COMPLETE or UPDATE_ROLLBACK_COMPLETE --- stacker/actions/build.py | 5 ++++- stacker/providers/aws/default.py | 22 ++++++++++++++++++++-- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/stacker/actions/build.py b/stacker/actions/build.py index c4ef29857..bd2b91714 100644 --- a/stacker/actions/build.py +++ b/stacker/actions/build.py @@ -310,8 +310,11 @@ def _launch_stack(self, stack, **kwargs): reason = old_status.reason if 'rolling' in reason: reason = reason.replace('rolling', 'rolled') - + status_reason = provider.get_rollback_status_reason(stack.fqn) + logger.info( + "%s Stack Roll Back Reason: " + status_reason, stack.fqn) return FailedStatus(reason) + elif provider.is_stack_completed(provider_stack): stack.set_outputs( provider.get_output_dict(provider_stack)) diff --git a/stacker/providers/aws/default.py b/stacker/providers/aws/default.py index dd4d16a22..808531346 100644 --- a/stacker/providers/aws/default.py +++ b/stacker/providers/aws/default.py @@ -624,7 +624,7 @@ def _tail_print(e): e['ResourceType'], e['EventId'])) - def get_events(self, stack_name): + def get_events(self, stack_name, chronological=True): """Get the events in batches and return in chronological order""" next_token = None event_list = [] @@ -642,7 +642,25 @@ def get_events(self, stack_name): if next_token is None: break time.sleep(GET_EVENTS_SLEEP) - return reversed(sum(event_list, [])) + if chronological: + return reversed(sum(event_list, [])) + else: + return sum(event_list, []) + + def get_rollback_status_reason(self, stack_name): + """Process events and returns latest roll back reason""" + event = next((item for item in self.get_events(stack_name, + False) if item["ResourceStatus"] == + "UPDATE_ROLLBACK_IN_PROGRESS"), None) + if event: + reason = event["ResourceStatusReason"] + return reason + else: + event = next((item for item in self.get_events(stack_name) + if item["ResourceStatus"] == + "ROLLBACK_IN_PROGRESS"), None) + reason = event["ResourceStatusReason"] + return reason def tail(self, stack_name, cancel, log_func=_tail_print, sleep_time=5, include_initial=True): From 5728f0a868bec2be012dbb62f2ca686c45c93d04 Mon Sep 17 00:00:00 2001 From: Garison Draper Date: Tue, 11 Dec 2018 16:54:53 -0800 Subject: [PATCH 16/74] updating previous written test with data sample returned by the get_events method --- stacker/tests/actions/test_build.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/stacker/tests/actions/test_build.py b/stacker/tests/actions/test_build.py index 253a8cf12..018101401 100644 --- a/stacker/tests/actions/test_build.py +++ b/stacker/tests/actions/test_build.py @@ -244,10 +244,15 @@ def get_stack(name, *args, **kwargs): 'Outputs': [], 'Tags': []} + def get_events(name, *args, **kwargs): + return [{'ResourceStatus': 'ROLLBACK_IN_PROGRESS', + 'ResourceStatusReason': 'CFN fail'}] + patch_object(self.provider, 'get_stack', side_effect=get_stack) patch_object(self.provider, 'update_stack') patch_object(self.provider, 'create_stack') patch_object(self.provider, 'destroy_stack') + patch_object(self.provider, 'get_events', side_effect=get_events) patch_object(self.build_action, "s3_stack_push") From 3c6ca421beeb2c9bc7886a8ced40e5435105f18e Mon Sep 17 00:00:00 2001 From: Taryn Date: Wed, 12 Dec 2018 16:24:39 -0600 Subject: [PATCH 17/74] Fix typo in lookups.rst regarding envvar --- docs/lookups.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/lookups.rst b/docs/lookups.rst index 3f2c2acb9..4754f12e7 100644 --- a/docs/lookups.rst +++ b/docs/lookups.rst @@ -405,7 +405,7 @@ Example:: $ export DATABASE_USER=root # In the stacker config we could reference the value: - DBUser: ${envvar DATABASE_UER} + DBUser: ${envvar DATABASE_USER} # Which would resolve to: DBUser: root From 5c5443800d9a687506246d82a37cc42e31db0041 Mon Sep 17 00:00:00 2001 From: Taryn Hill Date: Tue, 18 Dec 2018 10:31:25 -0600 Subject: [PATCH 18/74] Fix typo in circular dep error message --- stacker/stack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stacker/stack.py b/stacker/stack.py index 811c34693..aa5ab81b4 100644 --- a/stacker/stack.py +++ b/stacker/stack.py @@ -98,7 +98,7 @@ def requires(self): deps = variable.dependencies() if self.name in deps: message = ( - "Variable %s in stack %s has a ciruclar reference" + "Variable %s in stack %s has a circular reference" ) % (variable.name, self.name) raise ValueError(message) requires.update(deps) From 1dc6de8d20f70b1a7e40b581c704f2176f1d31a2 Mon Sep 17 00:00:00 2001 From: Troy Ready Date: Tue, 8 Jan 2019 20:08:32 -0800 Subject: [PATCH 19/74] add ssm param types (#692) --- docs/blueprints.rst | 7 ++-- stacker/blueprints/variables/types.py | 52 +++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 3 deletions(-) diff --git a/docs/blueprints.rst b/docs/blueprints.rst index 464b57f97..149ec438b 100644 --- a/docs/blueprints.rst +++ b/docs/blueprints.rst @@ -244,9 +244,10 @@ CFNType The ``CFNType`` can be used to signal that a variable should be submitted to CloudFormation as a Parameter instead of only available to the -Blueprint when rendering. This is useful if you want to leverage AWS -specific Parameter types like ``List``. See -``stacker.blueprints.variables.types`` for available subclasses of the +Blueprint when rendering. This is useful if you want to leverage AWS- +Specific Parameter types (e.g. ``List``) or Systems +Manager Parameter Store values (e.g. ``AWS::SSM::Parameter::Value``). +See ``stacker.blueprints.variables.types`` for available subclasses of the ``CFNType``. Example diff --git a/stacker/blueprints/variables/types.py b/stacker/blueprints/variables/types.py index 4ffe60c5f..5bfa77108 100644 --- a/stacker/blueprints/variables/types.py +++ b/stacker/blueprints/variables/types.py @@ -127,10 +127,14 @@ def __init__(self, parameter_type): self.parameter_type = parameter_type +# General CFN types CFNString = CFNType("String") CFNNumber = CFNType("Number") CFNNumberList = CFNType("List") CFNCommaDelimitedList = CFNType("CommaDelimitedList") + +# AWS-Specific Parameter Types +# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/parameters-section-structure.html#aws-specific-parameter-types EC2AvailabilityZoneName = CFNType("AWS::EC2::AvailabilityZone::Name") EC2ImageId = CFNType("AWS::EC2::Image::Id") EC2InstanceId = CFNType("AWS::EC2::Instance::Id") @@ -151,3 +155,51 @@ def __init__(self, parameter_type): EC2VolumeIdList = CFNType("List") EC2VPCIdList = CFNType("List") Route53HostedZoneIdList = CFNType("List") + +# SSM Parameter Types +# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/parameters-section-structure.html#aws-ssm-parameter-types +SSMParameterName = CFNType("AWS::SSM::Parameter::Name") +SSMParameterValueString = CFNType("AWS::SSM::Parameter::Value") +SSMParameterValueStringList = CFNType( + "AWS::SSM::Parameter::Value>") +SSMParameterValueCommaDelimitedList = CFNType( + "AWS::SSM::Parameter::Value") +# Each AWS-specific type here is repeated from the the list above +SSMParameterValueEC2AvailabilityZoneName = CFNType( + "AWS::SSM::Parameter::Value") +SSMParameterValueEC2ImageId = CFNType( + "AWS::SSM::Parameter::Value") +SSMParameterValueEC2InstanceId = CFNType( + "AWS::SSM::Parameter::Value") +SSMParameterValueEC2KeyPairKeyName = CFNType( + "AWS::SSM::Parameter::Value") +SSMParameterValueEC2SecurityGroupGroupName = CFNType( + "AWS::SSM::Parameter::Value") +SSMParameterValueEC2SecurityGroupId = CFNType( + "AWS::SSM::Parameter::Value") +SSMParameterValueEC2SubnetId = CFNType( + "AWS::SSM::Parameter::Value") +SSMParameterValueEC2VolumeId = CFNType( + "AWS::SSM::Parameter::Value") +SSMParameterValueEC2VPCId = CFNType( + "AWS::SSM::Parameter::Value") +SSMParameterValueRoute53HostedZoneId = CFNType( + "AWS::SSM::Parameter::Value") +SSMParameterValueEC2AvailabilityZoneNameList = CFNType( + "AWS::SSM::Parameter::Value>") +SSMParameterValueEC2ImageIdList = CFNType( + "AWS::SSM::Parameter::Value>") +SSMParameterValueEC2InstanceIdList = CFNType( + "AWS::SSM::Parameter::Value>") +SSMParameterValueEC2SecurityGroupGroupNameList = CFNType( + "AWS::SSM::Parameter::Value>") +SSMParameterValueEC2SecurityGroupIdList = CFNType( + "AWS::SSM::Parameter::Value>") +SSMParameterValueEC2SubnetIdList = CFNType( + "AWS::SSM::Parameter::Value>") +SSMParameterValueEC2VolumeIdList = CFNType( + "AWS::SSM::Parameter::Value>") +SSMParameterValueEC2VPCIdList = CFNType( + "AWS::SSM::Parameter::Value>") +SSMParameterValueRoute53HostedZoneIdList = CFNType( + "AWS::SSM::Parameter::Value>") From c54b30c13a47444e484a0b77e7f7ffd67a4973f2 Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Tue, 22 Jan 2019 08:45:43 -0800 Subject: [PATCH 20/74] Release 1.6.0 (#694) --- CHANGELOG.md | 10 ++++++++++ setup.py | 2 +- stacker/__init__.py | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 380466a1d..e48fc3199 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ ## Upcoming release +## 1.6.0 (2019-01-21) + +- New lookup format/syntax, making it more generic [GH-665] +- Allow lowercase y/Y when prompted [GH-674] +- Local package sources [GH-677] +- Add `in_progress` option to stack config [GH-678] +- Use default ACL for uploaded lambda code [GH-682] +- Display rollback reason after error [GH-687] +- ssm parameter types [GH-692] + ## 1.5.0 (2018-10-14) The big feature in this release is the introduction of "targets" which act as diff --git a/setup.py b/setup.py index 0a12e6680..c3966c206 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages -VERSION = "1.5.0" +VERSION = "1.6.0" src_dir = os.path.dirname(__file__) diff --git a/stacker/__init__.py b/stacker/__init__.py index aa813611b..213a08f64 100644 --- a/stacker/__init__.py +++ b/stacker/__init__.py @@ -2,4 +2,4 @@ from __future__ import division from __future__ import absolute_import -__version__ = "1.5.0" +__version__ = "1.6.0" From 3cdfbd543f82d6805435ac9fa2655bf31e436cc4 Mon Sep 17 00:00:00 2001 From: Alec Rajeev Date: Mon, 4 Feb 2019 16:55:54 -0600 Subject: [PATCH 21/74] Add Additional ECS Unit Test (#696) * added additional unit test * removed unnecessary commentt --- stacker/tests/hooks/test_ecs.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/stacker/tests/hooks/test_ecs.py b/stacker/tests/hooks/test_ecs.py index 927e37029..12998590f 100644 --- a/stacker/tests/hooks/test_ecs.py +++ b/stacker/tests/hooks/test_ecs.py @@ -78,3 +78,27 @@ def test_create_multiple_clusters(self): response = client.list_clusters() self.assertEqual(len(response["clusterArns"]), 2) + + def test_fail_create_cluster(self): + with mock_ecs(): + logger = "stacker.hooks.ecs" + client = boto3.client("ecs", region_name=REGION) + response = client.list_clusters() + + self.assertEqual(len(response["clusterArns"]), 0) + with LogCapture(logger) as logs: + create_clusters( + provider=self.provider, + context=self.context + ) + + logs.check( + ( + logger, + "ERROR", + "setup_clusters hook missing \"clusters\" argument" + ) + ) + + response = client.list_clusters() + self.assertEqual(len(response["clusterArns"]), 0) From 4058c224714de40a5e621c8a475ec33a8cbe0b8a Mon Sep 17 00:00:00 2001 From: Nate Ferrell Date: Tue, 5 Feb 2019 10:55:21 -0600 Subject: [PATCH 22/74] "sublcasses" > "subclasses" (#697) quick spelling correction --- docs/blueprints.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/blueprints.rst b/docs/blueprints.rst index 149ec438b..74fa5b84e 100644 --- a/docs/blueprints.rst +++ b/docs/blueprints.rst @@ -289,7 +289,7 @@ Below is an annotated example: t = self.template # `get_variables` returns a dictionary of : . For the sublcasses of `CFNType`, the values are + value>. For the subclasses of `CFNType`, the values are instances of `CFNParameter` which have a `ref` helper property which will return a troposphere `Ref` to the parameter name. variables = self.get_variables() From d20a9df1f060b57c57e6b472469de973b7407d71 Mon Sep 17 00:00:00 2001 From: Alec Rajeev Date: Mon, 18 Feb 2019 22:18:26 -0600 Subject: [PATCH 23/74] Added unit test for service role that exists (#704) --- stacker/tests/hooks/test_iam.py | 32 ++++++++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/stacker/tests/hooks/test_iam.py b/stacker/tests/hooks/test_iam.py index 1db390ab8..d194f4f06 100644 --- a/stacker/tests/hooks/test_iam.py +++ b/stacker/tests/hooks/test_iam.py @@ -13,6 +13,8 @@ _get_cert_arn_from_response, ) +from awacs.helpers.trust import get_ecs_assumerole_policy + from ..factories import ( mock_context, mock_provider, @@ -22,8 +24,8 @@ REGION = "us-east-1" # No test for stacker.hooks.iam.ensure_server_cert_exists until -# this PR is accepted in moto: -# https://github.com/spulec/moto/pull/679 +# updated version of moto is imported +# (https://github.com/spulec/moto/pull/679) merged class TestIAMHooks(unittest.TestCase): @@ -71,3 +73,29 @@ def test_create_service_role(self): RoleName=role_name, PolicyName=policy_name ) + + def test_create_service_role_already_exists(self): + role_name = "ecsServiceRole" + policy_name = "AmazonEC2ContainerServiceRolePolicy" + with mock_iam(): + client = boto3.client("iam", region_name=REGION) + client.create_role( + RoleName=role_name, + AssumeRolePolicyDocument=get_ecs_assumerole_policy().to_json() + ) + + self.assertTrue( + create_ecs_service_role( + context=self.context, + provider=self.provider, + ) + ) + + role = client.get_role(RoleName=role_name) + + self.assertIn("Role", role) + self.assertEqual(role_name, role["Role"]["RoleName"]) + client.get_role_policy( + RoleName=role_name, + PolicyName=policy_name + ) From afc5e99390ae52b1065f34c5e1c5bcbdf61f363d Mon Sep 17 00:00:00 2001 From: Alec Rajeev Date: Mon, 18 Feb 2019 22:19:33 -0600 Subject: [PATCH 24/74] Add Keypair Unit Test (#700) --- stacker/tests/hooks/test_keypair.py | 162 ++++++++++++++++++++++++++++ 1 file changed, 162 insertions(+) create mode 100644 stacker/tests/hooks/test_keypair.py diff --git a/stacker/tests/hooks/test_keypair.py b/stacker/tests/hooks/test_keypair.py new file mode 100644 index 000000000..bc348dae0 --- /dev/null +++ b/stacker/tests/hooks/test_keypair.py @@ -0,0 +1,162 @@ +from __future__ import print_function +from __future__ import division +from __future__ import absolute_import +import unittest +from mock import patch + +import boto3 +from moto import mock_ec2 +from testfixtures import LogCapture + +from stacker.hooks.keypair import ensure_keypair_exists, find +from ..factories import ( + mock_context, + mock_provider, +) + +REGION = "us-east-1" +KEY_PAIR_NAME = "FakeKey" + + +class TestKeypairHooks(unittest.TestCase): + + def setUp(self): + self.provider = mock_provider(region=REGION) + self.context = mock_context(namespace="fake") + + @patch("stacker.hooks.keypair.input", create=True) + def test_keypair_missing_cancel_input(self, mocked_input): + mocked_input.side_effect = ["Cancel"] + with mock_ec2(): + logger = "stacker.hooks.keypair" + client = boto3.client("ec2", region_name=REGION) + response = client.describe_key_pairs() + + # initially no key pairs created + self.assertEqual(len(response["KeyPairs"]), 0) + with LogCapture(logger) as logs: + self.assertFalse(ensure_keypair_exists(provider=self.provider, + context=self.context, + keypair=KEY_PAIR_NAME)) + logs.check( + ( + logger, + "INFO", + "keypair: \"%s\" not found" % KEY_PAIR_NAME + ), + ( + logger, + "WARNING", + "no action to find keypair, failing" + ) + ) + + def test_keypair_exists(self): + with mock_ec2(): + logger = "stacker.hooks.keypair" + client = boto3.client("ec2", region_name=REGION) + client.create_key_pair(KeyName=KEY_PAIR_NAME) + response = client.describe_key_pairs() + + # check that one keypair was created + self.assertEqual(len(response["KeyPairs"]), 1) + keypair = find(response["KeyPairs"], "KeyName", KEY_PAIR_NAME) + with LogCapture(logger) as logs: + value = ensure_keypair_exists(provider=self.provider, + context=self.context, + keypair=KEY_PAIR_NAME) + message = "keypair: " + KEY_PAIR_NAME + \ + " (" + keypair["KeyFingerprint"] + ") exists" + logs.check( + ( + logger, + "INFO", + message + ) + ) + self.assertEqual(value["status"], "exists") + self.assertEqual(value["key_name"], KEY_PAIR_NAME) + self.assertEqual(value["fingerprint"], + keypair["KeyFingerprint"]) + + @patch("stacker.hooks.keypair.input", create=True) + def test_keypair_missing_create(self, mocked_input): + mocked_input.side_effect = ["create", "./"] + with mock_ec2(): + logger = "stacker.hooks.keypair" + client = boto3.client("ec2", region_name=REGION) + with LogCapture(logger) as logs: + value = ensure_keypair_exists(provider=self.provider, + context=self.context, + keypair=KEY_PAIR_NAME) + response = client.describe_key_pairs() + print(response) + keypair = find(response["KeyPairs"], "KeyName", KEY_PAIR_NAME) + message = "keypair: " + KEY_PAIR_NAME + \ + " (" + keypair["KeyFingerprint"] + ") created" + logs.check( + ( + logger, + "INFO", + "keypair: \"%s\" not found" % KEY_PAIR_NAME + ), + ( + logger, + "INFO", + message + ) + ) + tmp_file_path = "/home/circleci/project/" + KEY_PAIR_NAME + ".pem" + self.assertEqual(value["status"], "created") + self.assertEqual(value["key_name"], KEY_PAIR_NAME) + self.assertEqual(value["file_path"], tmp_file_path) + + @patch("stacker.hooks.keypair.input", create=True) + def test_keypair_missing_create_invalid_path(self, mocked_input): + mocked_input.side_effect = ["create", "$"] + with mock_ec2(): + logger = "stacker.hooks.keypair" + with LogCapture(logger) as logs: + value = ensure_keypair_exists(provider=self.provider, + context=self.context, + keypair=KEY_PAIR_NAME) + message = "\"/home/circleci/project/" + \ + "$" + "\" is not a valid directory" + logs.check( + ( + logger, + "INFO", + "keypair: \"%s\" not found" % KEY_PAIR_NAME + ), + ( + logger, + "ERROR", + message + ) + ) + self.assertFalse(value) + + @patch("stacker.hooks.keypair.input", create=True) + def test_keypair_missing_import_invalid_path(self, mocked_input): + mocked_input.side_effect = ["import", "$"] + with mock_ec2(): + logger = "stacker.hooks.keypair" + with LogCapture(logger) as logs: + value = ensure_keypair_exists(provider=self.provider, + context=self.context, + keypair=KEY_PAIR_NAME) + er_message = "Failed to find keypair at path: " + \ + "/home/circleci/project/$" + logs.check( + ( + logger, + "INFO", + "keypair: \"%s\" not found" % KEY_PAIR_NAME + ), + ( + logger, + "ERROR", + er_message + ) + ) + self.assertFalse(value) From a1ed87a442bbaf93a01a75538f414cede2ba95d7 Mon Sep 17 00:00:00 2001 From: Troy Ready Date: Sun, 24 Feb 2019 14:25:26 -0800 Subject: [PATCH 25/74] add jinja2 template parsing (#701) * add jinja2 template parsing * fix use with non-blueprint variables and lookups --- docs/index.rst | 1 + docs/templates.rst | 23 ++++++++++ setup.py | 1 + stacker/blueprints/raw.py | 41 ++++++++++++----- stacker/tests/blueprints/test_raw.py | 49 +++++++++++++++++++++ stacker/tests/factories.py | 7 ++- stacker/tests/fixtures/cfn_template.json.j2 | 23 ++++++++++ 7 files changed, 132 insertions(+), 13 deletions(-) create mode 100644 docs/templates.rst create mode 100644 stacker/tests/fixtures/cfn_template.json.j2 diff --git a/docs/index.rst b/docs/index.rst index c2acffe0b..555d93b1e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -56,6 +56,7 @@ Contents: lookups commands blueprints + templates API Docs diff --git a/docs/templates.rst b/docs/templates.rst new file mode 100644 index 000000000..50bb81eb7 --- /dev/null +++ b/docs/templates.rst @@ -0,0 +1,23 @@ +========== +Templates +========== + +CloudFormation templates can be provided via python Blueprints_ or JSON/YAML. +JSON/YAML templates are specified for stacks via the ``template_path`` config +option (see `Stacks `_). + +Jinja2 Templating +================= + +Templates with a ``.j2`` extension will be parsed using `Jinja2 +`_. The stacker ``context`` and ``mappings`` objects +and stack ``variables`` objects are available for use in the template: + +.. code-block:: yaml + + Description: TestTemplate + Resources: + Bucket: + Type: AWS::S3::Bucket + Properties: + BucketName: {{ context.environment.foo }}-{{ variables.myparamname }} diff --git a/setup.py b/setup.py index c3966c206..5dfc13928 100644 --- a/setup.py +++ b/setup.py @@ -16,6 +16,7 @@ "PyYAML>=3.13b1", "awacs>=0.6.0", "gitpython>=2.0,<3.0", + "jinja2>=2.7,<3.0", "schematics>=2.0.1,<2.1.0", "formic2", "python-dateutil>=2.0,<3.0", diff --git a/stacker/blueprints/raw.py b/stacker/blueprints/raw.py index dc8572cd0..0e324f21e 100644 --- a/stacker/blueprints/raw.py +++ b/stacker/blueprints/raw.py @@ -8,6 +8,8 @@ import os import sys +from jinja2 import Template + from ..util import parse_cloudformation_template from ..exceptions import InvalidConfig, UnresolvedVariable from .base import Blueprint @@ -52,16 +54,13 @@ def get_template_params(template): return params -def resolve_variable(var_name, var_def, provided_variable, blueprint_name): +def resolve_variable(provided_variable, blueprint_name): """Resolve a provided variable value against the variable definition. This acts as a subset of resolve_variable logic in the base module, leaving out everything that doesn't apply to CFN parameters. Args: - var_name (str): The name of the defined variable on a blueprint. - var_def (dict): A dictionary representing the defined variables - attributes. provided_variable (:class:`stacker.variables.Variable`): The variable value provided to the blueprint. blueprint_name (str): The name of the blueprint that the variable is @@ -71,8 +70,6 @@ def resolve_variable(var_name, var_def, provided_variable, blueprint_name): object: The resolved variable string value. Raises: - MissingVariable: Raised when a variable with no default is not - provided a value. UnresolvedVariable: Raised when the provided variable is not already resolved. @@ -143,20 +140,33 @@ def resolve_variables(self, provided_variables): """Resolve the values of the blueprint variables. This will resolve the values of the template parameters with values - from the env file, the config, and any lookups resolved. + from the env file, the config, and any lookups resolved. The + resolution is run twice, in case the blueprint is jinja2 templated + and requires provided variables to render. Args: provided_variables (list of :class:`stacker.variables.Variable`): list of provided variables """ + # Pass 1 to set resolved_variables to provided variables self.resolved_variables = {} + variable_dict = dict((var.name, var) for var in provided_variables) + for var_name, _var_def in variable_dict.items(): + value = resolve_variable( + variable_dict.get(var_name), + self.name + ) + if value is not None: + self.resolved_variables[var_name] = value + + # Pass 2 to render the blueprint and set resolved_variables according + # to defined variables defined_variables = self.get_parameter_definitions() + self.resolved_variables = {} variable_dict = dict((var.name, var) for var in provided_variables) - for var_name, var_def in defined_variables.items(): + for var_name, _var_def in defined_variables.items(): value = resolve_variable( - var_name, - var_def, variable_dict.get(var_name), self.name ) @@ -186,7 +196,16 @@ def rendered(self): template_path = get_template_path(self.raw_template_path) if template_path: with open(template_path, 'r') as template: - self._rendered = template.read() + if len(os.path.splitext(template_path)) == 2 and ( + os.path.splitext(template_path)[1] == '.j2'): + self._rendered = Template(template.read()).render( + context=self.context, + mappings=self.mappings, + name=self.name, + variables=self.resolved_variables + ) + else: + self._rendered = template.read() else: raise InvalidConfig( 'Could not find template %s' % self.raw_template_path diff --git a/stacker/tests/blueprints/test_raw.py b/stacker/tests/blueprints/test_raw.py index b3c5c88c7..eb4ee9f59 100644 --- a/stacker/tests/blueprints/test_raw.py +++ b/stacker/tests/blueprints/test_raw.py @@ -12,10 +12,12 @@ from stacker.blueprints.raw import ( get_template_params, get_template_path, RawTemplateBlueprint ) +from stacker.variables import Variable from ..factories import mock_context RAW_JSON_TEMPLATE_PATH = 'stacker/tests/fixtures/cfn_template.json' RAW_YAML_TEMPLATE_PATH = 'stacker/tests/fixtures/cfn_template.yaml' +RAW_J2_TEMPLATE_PATH = 'stacker/tests/fixtures/cfn_template.json.j2' class TestRawBluePrintHelpers(unittest.TestCase): @@ -115,6 +117,53 @@ def test_to_json(self): expected_json ) + def test_j2_to_json(self): + """Verify jinja2 template parsing.""" + expected_json = json.dumps( + { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "TestTemplate", + "Parameters": { + "Param1": { + "Type": "String" + }, + "Param2": { + "Default": "default", + "Type": "CommaDelimitedList" + } + }, + "Resources": { + "Dummy": { + "Type": "AWS::CloudFormation::WaitConditionHandle" + } + }, + "Outputs": { + "DummyId": { + "Value": "dummy-bar-param1val-foo-1234" + } + } + }, + sort_keys=True, + indent=4 + ) + blueprint = RawTemplateBlueprint( + name="stack1", + context=mock_context( + extra_config_args={'stacks': [{'name': 'stack1', + 'template_path': 'unused', + 'variables': { + 'Param1': 'param1val', + 'bar': 'foo'}}]}, + environment={'foo': 'bar'}), + raw_template_path=RAW_J2_TEMPLATE_PATH + ) + blueprint.resolve_variables([Variable("Param1", "param1val"), + Variable("bar", "foo")]) + self.assertEqual( + expected_json, + blueprint.to_json() + ) + class TestVariables(unittest.TestCase): """Test class for blueprint variable methods.""" diff --git a/stacker/tests/factories.py b/stacker/tests/factories.py index dfdef8fd1..f930c5177 100644 --- a/stacker/tests/factories.py +++ b/stacker/tests/factories.py @@ -32,10 +32,13 @@ def mock_context(namespace="default", extra_config_args=None, **kwargs): if extra_config_args: config_args.update(extra_config_args) config = Config(config_args) - environment = kwargs.get("environment", {}) + if kwargs.get("environment"): + return Context( + config=config, + **kwargs) return Context( config=config, - environment=environment, + environment={}, **kwargs) diff --git a/stacker/tests/fixtures/cfn_template.json.j2 b/stacker/tests/fixtures/cfn_template.json.j2 new file mode 100644 index 000000000..e03cbf9f5 --- /dev/null +++ b/stacker/tests/fixtures/cfn_template.json.j2 @@ -0,0 +1,23 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "TestTemplate", + "Parameters": { + "Param1": { + "Type": "String" + }, + "Param2": { + "Default": "default", + "Type": "CommaDelimitedList" + } + }, + "Resources": { + "Dummy": { + "Type": "AWS::CloudFormation::WaitConditionHandle" + } + }, + "Outputs": { + "DummyId": { + "Value": "dummy-{{ context.environment.foo }}-{{ variables.Param1 }}-{{ variables.bar }}-1234" + } + } +} From 22e62f12867218e50044c90ffaa164c51d61b4d7 Mon Sep 17 00:00:00 2001 From: Rodney Gitzel Date: Tue, 5 Mar 2019 19:32:46 -0800 Subject: [PATCH 26/74] Clarifications re blueprints and file lookups. (#706) --- docs/blueprints.rst | 7 +++++-- docs/config.rst | 2 +- docs/lookups.rst | 3 ++- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/docs/blueprints.rst b/docs/blueprints.rst index 74fa5b84e..61e558bbd 100644 --- a/docs/blueprints.rst +++ b/docs/blueprints.rst @@ -2,8 +2,11 @@ Blueprints ========== -Blueprints are python classes that build CloudFormation templates. -Traditionally these are built using troposphere_, but that is not absolutely +Blueprints are python classes that dynamically build CloudFormation templates. Where +you would specify a raw Cloudformation template in a stack using the ``template_path`` key, +you instead specify a blueprint python file using the ``class_path`` key. + +Traditionally blueprints are built using troposphere_, but that is not absolutely necessary. You are encouraged to check out the library of publicly shared Blueprints in the stacker_blueprints_ package. diff --git a/docs/config.rst b/docs/config.rst index 6d7bf5f9d..097a3756d 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -226,7 +226,7 @@ The keyword is a list of dictionaries with the following keys: the python import path to the hook **data_key:** If set, and the hook returns data (a dictionary), the results will be stored - in the context.hook_data with the data_key as it's key. + in the context.hook_data with the data_key as its key. **required:** whether to stop execution if the hook fails **enabled:** diff --git a/docs/lookups.rst b/docs/lookups.rst index 4754f12e7..a69fea678 100644 --- a/docs/lookups.rst +++ b/docs/lookups.rst @@ -250,7 +250,8 @@ Basic examples:: conf_key: aGVsbG8gdGhlcmUK Supported codecs: - - plain + - plain - load the contents of the file untouched. This is the only codec that should be used + with raw Cloudformation templates (the other codecs are intended for blueprints). - base64 - encode the plain text file at the given path with base64 prior to returning it - parameterized - the same as plain, but additionally supports From 3f97c9fba4f4f34ab37f50024f1412e66d7d87e2 Mon Sep 17 00:00:00 2001 From: Russell Ballestrini Date: Wed, 6 Mar 2019 09:34:19 -0500 Subject: [PATCH 27/74] Allow for custom log formats to be defined in stacks.yaml (#705) Allow for custom log formats to be defined in stacker.yaml #705 Docs and config model adjustment. modified: .gitignore modified: docs/config.rst modified: stacker/commands/stacker/__init__.py modified: stacker/commands/stacker/base.py modified: stacker/config/__init__.py modified: stacker/logger/__init__.py new file: stacker/tests/fixtures/not-basic.env new file: stacker/tests/fixtures/vpc-custom-log-format-info.yaml modified: stacker/tests/test_stacker.py --- .gitignore | 2 ++ docs/config.rst | 22 +++++++++++++++++++ stacker/commands/stacker/__init__.py | 20 +++++++++-------- stacker/commands/stacker/base.py | 8 +------ stacker/config/__init__.py | 2 ++ stacker/logger/__init__.py | 22 +++++++++++++++---- stacker/tests/fixtures/not-basic.env | 2 ++ .../fixtures/vpc-custom-log-format-info.yaml | 18 +++++++++++++++ stacker/tests/test_stacker.py | 21 ++++++++++++++++++ 9 files changed, 97 insertions(+), 20 deletions(-) create mode 100644 stacker/tests/fixtures/not-basic.env create mode 100644 stacker/tests/fixtures/vpc-custom-log-format-info.yaml diff --git a/.gitignore b/.gitignore index 635bc9e6c..6c422f410 100644 --- a/.gitignore +++ b/.gitignore @@ -68,3 +68,5 @@ dev.yaml dev.env tests/fixtures/blueprints/*-result + +FakeKey.pem diff --git a/docs/config.rst b/docs/config.rst index 097a3756d..e9711ace9 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -469,6 +469,28 @@ Here's an example of a target that will execute all "database" stacks:: required_by: - databases +Custom Log Formats +------------------ + +By default, stacker uses the following `log_formats`:: + + log_formats: + info: "[%(asctime)s] %(message)s" + color: "[%(asctime)s] \033[%(color)sm%(message)s\033[39m" + debug: "[%(asctime)s] %(levelname)s %(threadName)s %(name)s:%(lineno)d(%(funcName)s): %(message)s" + +You may optionally provide custom `log_formats`. + +You may use any of the standard Python +[logging module format attributes](https://docs.python.org/2.7/library/logging.html#logrecord-attributes) +when building your `log_formats`. + +In this example, we add the environment name to each log line:: + + log_formats: + info: "[%(asctime)s] ${environment} %(message)s" + color: "[%(asctime)s] ${environment} \033[%(color)sm%(message)s\033[39m" + Variables ========== diff --git a/stacker/commands/stacker/__init__.py b/stacker/commands/stacker/__init__.py index 3c17fedb8..6c96ac286 100644 --- a/stacker/commands/stacker/__init__.py +++ b/stacker/commands/stacker/__init__.py @@ -24,35 +24,37 @@ class Stacker(BaseCommand): subcommands = (Build, Destroy, Info, Diff, Graph) def configure(self, options, **kwargs): - super(Stacker, self).configure(options, **kwargs) - if options.interactive: - logger.info("Using interactive AWS provider mode.") - else: - logger.info("Using default AWS provider mode") session_cache.default_profile = options.profile - config = load_config( + self.config = load_config( options.config.read(), environment=options.environment, - validate=True) + validate=True, + ) options.provider_builder = default.ProviderBuilder( region=options.region, interactive=options.interactive, replacements_only=options.replacements_only, recreate_failed=options.recreate_failed, - service_role=config.service_role, + service_role=self.config.service_role, ) options.context = Context( environment=options.environment, - config=config, + config=self.config, # Allow subcommands to provide any specific kwargs to the Context # that it wants. **options.get_context_kwargs(options) ) + super(Stacker, self).configure(options, **kwargs) + if options.interactive: + logger.info("Using interactive AWS provider mode.") + else: + logger.info("Using default AWS provider mode") + def add_arguments(self, parser): parser.add_argument("--version", action="version", version="%%(prog)s %s" % (__version__,)) diff --git a/stacker/commands/stacker/base.py b/stacker/commands/stacker/base.py index 3095b6c47..c3f2084d7 100644 --- a/stacker/commands/stacker/base.py +++ b/stacker/commands/stacker/base.py @@ -107,12 +107,6 @@ def add_subcommands(self, parser): subparser.set_defaults( get_context_kwargs=subcommand.get_context_kwargs) - @property - def logger(self): - if not hasattr(self, "_logger"): - self._logger = logging.getLogger(self.name) - return self._logger - def parse_args(self, *vargs): parser = argparse.ArgumentParser(description=self.description) self.add_subcommands(parser) @@ -126,7 +120,7 @@ def run(self, options, **kwargs): def configure(self, options, **kwargs): if self.setup_logging: - self.setup_logging(options.verbose) + self.setup_logging(options.verbose, self.config.log_formats) def get_context_kwargs(self, options, **kwargs): """Return a dictionary of kwargs that will be used with the Context. diff --git a/stacker/config/__init__.py b/stacker/config/__init__.py index 3e190621a..5fdde4162 100644 --- a/stacker/config/__init__.py +++ b/stacker/config/__init__.py @@ -433,6 +433,8 @@ class Config(Model): stacks = ListType( ModelType(Stack), default=[]) + log_formats = DictType(StringType, serialize_when_none=False) + def _remove_excess_keys(self, data): excess_keys = set(data.keys()) excess_keys -= self._schema.valid_input_keys diff --git a/stacker/logger/__init__.py b/stacker/logger/__init__.py index 16d7a0f5e..72c7efa1d 100644 --- a/stacker/logger/__init__.py +++ b/stacker/logger/__init__.py @@ -21,15 +21,29 @@ def format(self, record): return msg -def setup_logging(verbosity): +def setup_logging(verbosity, formats=None): + """ + Configure a proper logger based on verbosity and optional log formats. + + Args: + verbosity (int): 0, 1, 2 + formats (dict): Optional, looks for `info`, `color`, and `debug` keys + which may override the associated default log formats. + """ + if formats is None: + formats = {} + log_level = logging.INFO - log_format = INFO_FORMAT + + log_format = formats.get("info", INFO_FORMAT) + if sys.stdout.isatty(): - log_format = COLOR_FORMAT + log_format = formats.get("color", COLOR_FORMAT) if verbosity > 0: log_level = logging.DEBUG - log_format = DEBUG_FORMAT + log_format = formats.get("debug", DEBUG_FORMAT) + if verbosity < 2: logging.getLogger("botocore").setLevel(logging.CRITICAL) diff --git a/stacker/tests/fixtures/not-basic.env b/stacker/tests/fixtures/not-basic.env new file mode 100644 index 000000000..5f7629ffa --- /dev/null +++ b/stacker/tests/fixtures/not-basic.env @@ -0,0 +1,2 @@ +namespace: test.stacker +environment: test diff --git a/stacker/tests/fixtures/vpc-custom-log-format-info.yaml b/stacker/tests/fixtures/vpc-custom-log-format-info.yaml new file mode 100644 index 000000000..43c8502ea --- /dev/null +++ b/stacker/tests/fixtures/vpc-custom-log-format-info.yaml @@ -0,0 +1,18 @@ +log_formats: + info: "[%(asctime)s] ${environment} custom log format - %(message)s" + +stacks: + - name: vpc + class_path: stacker.tests.fixtures.mock_blueprints.VPC + variables: + InstanceType: m3.medium + SshKeyName: default + ImageName: NAT + # Only build 2 AZs, can be overridden with -p on the command line + # Note: If you want more than 4 AZs you should add more subnets below + # Also you need at least 2 AZs in order to use the DB because + # of the fact that the DB blueprint uses MultiAZ + AZCount: 2 + # Enough subnets for 4 AZs + PublicSubnets: 10.128.0.0/24,10.128.1.0/24,10.128.2.0/24,10.128.3.0/24 + PrivateSubnets: 10.128.8.0/22,10.128.12.0/22,10.128.16.0/22,10.128.20.0/22 diff --git a/stacker/tests/test_stacker.py b/stacker/tests/test_stacker.py index 3503c0fe5..237b9628f 100644 --- a/stacker/tests/test_stacker.py +++ b/stacker/tests/test_stacker.py @@ -92,6 +92,27 @@ def test_stacker_build_fail_when_parameters_in_stack_def(self): with self.assertRaises(InvalidConfig): stacker.configure(args) + def test_stacker_build_custom_info_log_format(self): + stacker = Stacker() + args = stacker.parse_args( + [ + "build", "-r", "us-west-2", + "stacker/tests/fixtures/not-basic.env", + "stacker/tests/fixtures/vpc-custom-log-format-info.yaml" + ] + ) + stacker.configure(args) + self.assertEqual( + stacker.config.log_formats["info"], + '[%(asctime)s] test custom log format - %(message)s' + ) + self.assertIsNone( + stacker.config.log_formats.get("color") + ) + self.assertIsNone( + stacker.config.log_formats.get("debug") + ) + if __name__ == '__main__': unittest.main() From c436eeaf400c8e76a4eafdddcb194054e98039fc Mon Sep 17 00:00:00 2001 From: Daniel Miranda Date: Tue, 12 Mar 2019 18:29:17 -0300 Subject: [PATCH 28/74] Update setup dependencies and remove pinnings (#712) * setup: update dependencies and remove pinnings * setup: add "testing" extras label to help with local testing It's much easier to install test dependencies locally by doing `pip install -e .[testing]` instead of having to manually copy and paste the list from setup.py. --- Makefile | 4 ++-- setup.py | 12 ++++-------- stacker/tests/__init__.py | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 40 insertions(+), 10 deletions(-) diff --git a/Makefile b/Makefile index c941acfba..0e776a140 100644 --- a/Makefile +++ b/Makefile @@ -8,10 +8,10 @@ lint: flake8 --require-code --min-version=2.7 --ignore FI50,FI51,FI53,FI14,E402,N802,W605 stacker/tests # ignore setUp naming test-unit: clean - AWS_ACCESS_KEY_ID=x AWS_SECRET_ACCESS_KEY=x AWS_DEFAULT_REGION=us-east-1 python setup.py nosetests + python setup.py nosetests test-unit3: clean - AWS_ACCESS_KEY_ID=x AWS_SECRET_ACCESS_KEY=x AWS_DEFAULT_REGION=us-east-1 python3 setup.py nosetests + python3 setup.py nosetests clean: rm -rf .egg stacker.egg-info diff --git a/setup.py b/setup.py index 5dfc13928..296015d03 100644 --- a/setup.py +++ b/setup.py @@ -9,10 +9,8 @@ install_requires = [ "future", "troposphere>=1.9.0", - # pinning needed till https://github.com/spulec/moto/issues/1924 is - # resolved - "botocore<1.11.0", - "boto3>=1.7.0,<1.8.0", + "botocore", + "boto3>=1.9.111<2.0", "PyYAML>=3.13b1", "awacs>=0.6.0", "gitpython>=2.0,<3.0", @@ -23,11 +21,8 @@ ] tests_require = [ - # pinning needed till https://github.com/spulec/moto/issues/1924 is - # resolved - "aws-xray-sdk==1.1.2", "mock~=2.0.0", - "moto~=1.1.24", + "moto~=1.3.7", "testfixtures~=4.10.0", "coverage~=4.3.4", "flake8-future-import", @@ -64,6 +59,7 @@ def read(filename): install_requires=install_requires, tests_require=tests_require, setup_requires=setup_requires, + extras_require=dict(testing=tests_require), test_suite="nose.collector", classifiers=[ "Development Status :: 5 - Production/Stable", diff --git a/stacker/tests/__init__.py b/stacker/tests/__init__.py index e69de29bb..8b04f58a9 100644 --- a/stacker/tests/__init__.py +++ b/stacker/tests/__init__.py @@ -0,0 +1,34 @@ +from __future__ import absolute_import, division, print_function + +import logging +import os + + +logger = logging.getLogger(__name__) +_saved_env = {} + + +def setUpModule(): + # Handle change in https://github.com/spulec/moto/issues/1924 + # Ensure AWS SDK find some (bogus) credentials in the environment and + # doesn't try to use other providers + overrides = { + 'AWS_ACCESS_KEY_ID': 'testing', + 'AWS_SECRET_ACCESS_KEY': 'testing', + 'AWS_DEFAULT_REGION': 'us-east-1' + } + for key, value in overrides.items(): + logger.info('Overriding env var: {}={}'.format(key, value)) + _saved_env[key] = os.environ.get(key, None) + os.environ[key] = value + + +def tearDownModule(): + for key, value in _saved_env.items(): + logger.info('Restoring saved env var: {}={}'.format(key, value)) + if value is None: + del os.environ[key] + else: + os.environ[key] = value + + _saved_env.clear() From 49295ce3a71f7b55e5c3009cde78cb05eb3bf180 Mon Sep 17 00:00:00 2001 From: Russell Ballestrini Date: Tue, 12 Mar 2019 18:48:08 -0400 Subject: [PATCH 29/74] Update config.rst to fix external URI syntax (#707) * Update config.rst to fix external URI syntax Fixes the syntax to properly render an external URI in docs. * Update config.rst --- docs/config.rst | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/docs/config.rst b/docs/config.rst index e9711ace9..a6804926b 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -479,17 +479,16 @@ By default, stacker uses the following `log_formats`:: color: "[%(asctime)s] \033[%(color)sm%(message)s\033[39m" debug: "[%(asctime)s] %(levelname)s %(threadName)s %(name)s:%(lineno)d(%(funcName)s): %(message)s" -You may optionally provide custom `log_formats`. - -You may use any of the standard Python -[logging module format attributes](https://docs.python.org/2.7/library/logging.html#logrecord-attributes) -when building your `log_formats`. - -In this example, we add the environment name to each log line:: +You may optionally provide custom `log_formats`. In this example, we add the environment name to each log line:: log_formats: info: "[%(asctime)s] ${environment} %(message)s" color: "[%(asctime)s] ${environment} \033[%(color)sm%(message)s\033[39m" + +You may use any of the standard Python +`logging module format attributes `_ +when building your `log_formats`. + Variables ========== From 1f1df37bd543d881494e948725ffadecb5ce0c28 Mon Sep 17 00:00:00 2001 From: Daniel Miranda Date: Tue, 12 Mar 2019 19:59:21 -0300 Subject: [PATCH 30/74] Add Python 3.7 tests in Circle CI (#711) --- .circleci/config.yml | 69 +++++++++++++++----------------------------- 1 file changed, 24 insertions(+), 45 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a5ce441d6..b8915e1d7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -24,13 +24,20 @@ workflows: - functional-test-36: requires: - unit-test-36 - - functional-test-27 - functional-test-35 + - unit-test-37: + requires: + - lint + - functional-test-37: + requires: + - unit-test-37 + - functional-test-36 - cleanup-functional-buckets: requires: - functional-test-27 - functional-test-35 - functional-test-36 + - functional-test-37 jobs: lint: @@ -46,7 +53,7 @@ jobs: unit-test-27: docker: - image: circleci/python:2.7 - steps: + steps: &unit_test_steps - checkout - run: sudo python setup.py install - run: sudo make test-unit @@ -54,23 +61,22 @@ jobs: unit-test-35: docker: - image: circleci/python:3.5 - steps: - - checkout - - run: sudo python setup.py install - - run: sudo make test-unit + steps: *unit_test_steps unit-test-36: docker: - image: circleci/python:3.6 - steps: - - checkout - - run: sudo python setup.py install - - run: sudo make test-unit + steps: *unit_test_steps + + unit-test-37: + docker: + - image: circleci/python:3.7 + steps: *unit_test_steps functional-test-27: docker: - image: circleci/python:2.7 - steps: + steps: &functional_test_steps - checkout - run: command: | @@ -91,44 +97,17 @@ jobs: functional-test-35: docker: - image: circleci/python:3.5 - steps: - - checkout - - run: - command: | - git clone https://github.com/bats-core/bats-core.git - cd bats-core - git checkout v1.0.2 - sudo ./install.sh /usr/local - bats --version - - run: sudo python setup.py install - - run: - command: | - export TERM=xterm - export AWS_DEFAULT_REGION=us-east-1 - export STACKER_NAMESPACE=cloudtools-functional-tests-$CIRCLE_BUILD_NUM - export STACKER_ROLE=arn:aws:iam::459170252436:role/cloudtools-functional-tests-sta-FunctionalTestRole-1M9HFJ9VQVMFX - sudo -E make test-functional + steps: *functional_test_steps functional-test-36: docker: - image: circleci/python:3.6 - steps: - - checkout - - run: - command: | - git clone https://github.com/bats-core/bats-core.git - cd bats-core - git checkout v1.0.2 - sudo ./install.sh /usr/local - bats --version - - run: sudo python setup.py install - - run: - command: | - export TERM=xterm - export AWS_DEFAULT_REGION=us-east-1 - export STACKER_NAMESPACE=cloudtools-functional-tests-$CIRCLE_BUILD_NUM - export STACKER_ROLE=arn:aws:iam::459170252436:role/cloudtools-functional-tests-sta-FunctionalTestRole-1M9HFJ9VQVMFX - sudo -E make test-functional + steps: *functional_test_steps + + functional-test-37: + docker: + - image: circleci/python:3.7 + steps: *functional_test_steps cleanup-functional-buckets: docker: From cfe719e1434db36db7322d9927a8cc85dda8a24b Mon Sep 17 00:00:00 2001 From: Daniel Miranda Date: Tue, 12 Mar 2019 20:01:46 -0300 Subject: [PATCH 31/74] Upload blueprint templates with bucket-owner-full-control ACL (#713) This way it's possible to use a shared stacker bucket from a master account in configs contain stacks with profiles of different child accounts. --- stacker/actions/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/stacker/actions/base.py b/stacker/actions/base.py index 65ddebe4f..d567e82fc 100644 --- a/stacker/actions/base.py +++ b/stacker/actions/base.py @@ -189,7 +189,8 @@ def s3_stack_push(self, blueprint, force=False): self.s3_conn.put_object(Bucket=self.bucket_name, Key=key_name, Body=blueprint.rendered, - ServerSideEncryption='AES256') + ServerSideEncryption='AES256', + ACL='bucket-owner-full-control') logger.debug("Blueprint %s pushed to %s.", blueprint.name, template_url) return template_url From a3feb4a85c815fc76cc6fe13b243a2655b497d03 Mon Sep 17 00:00:00 2001 From: Adam McElwee Date: Tue, 19 Mar 2019 10:11:04 -0500 Subject: [PATCH 32/74] ThreadedWalker constructor accepts a Semaphore, not an int --- stacker/actions/base.py | 9 +++++++-- stacker/dag/__init__.py | 8 +++----- stacker/tests/test_dag.py | 9 +++++++-- 3 files changed, 17 insertions(+), 9 deletions(-) diff --git a/stacker/actions/base.py b/stacker/actions/base.py index d567e82fc..0763e5245 100644 --- a/stacker/actions/base.py +++ b/stacker/actions/base.py @@ -7,7 +7,7 @@ import logging import threading -from ..dag import walk, ThreadedWalker +from ..dag import walk, ThreadedWalker, UnlimitedSemaphore from ..plan import Step, build_plan, build_graph import botocore.exceptions @@ -53,7 +53,12 @@ def build_walker(concurrency): """ if concurrency == 1: return walk - return ThreadedWalker(concurrency).walk + + semaphore = UnlimitedSemaphore() + if concurrency > 1: + semaphore = threading.Semaphore(concurrency) + + return ThreadedWalker(semaphore).walk def plan(description, stack_action, context, diff --git a/stacker/dag/__init__.py b/stacker/dag/__init__.py index ef7fc23de..ca12f20bf 100644 --- a/stacker/dag/__init__.py +++ b/stacker/dag/__init__.py @@ -414,14 +414,12 @@ class ThreadedWalker(object): allows, using threads. Args: - semaphore (threading.Semaphore, optional): a semaphore object which + semaphore (threading.Semaphore): a semaphore object which can be used to control how many steps are executed in parallel. - By default, there is not limit to the amount of parallelism, - other than what the graph topology allows. """ - def __init__(self, semaphore=None): - self.semaphore = semaphore or UnlimitedSemaphore() + def __init__(self, semaphore): + self.semaphore = semaphore def walk(self, dag, walk_func): """ Walks each node of the graph, in parallel if it can. diff --git a/stacker/tests/test_dag.py b/stacker/tests/test_dag.py index 785fb5ef9..a7147ef2c 100644 --- a/stacker/tests/test_dag.py +++ b/stacker/tests/test_dag.py @@ -5,7 +5,12 @@ from nose import with_setup from nose.tools import nottest, raises -from stacker.dag import DAG, DAGValidationError, ThreadedWalker +from stacker.dag import ( + DAG, + DAGValidationError, + ThreadedWalker, + UnlimitedSemaphore +) import threading dag = None @@ -220,7 +225,7 @@ def test_transitive_deep_reduction(): @with_setup(blank_setup) def test_threaded_walker(): dag = DAG() - walker = ThreadedWalker() + walker = ThreadedWalker(UnlimitedSemaphore()) # b and c should be executed at the same time. dag.from_dict({'a': ['b', 'c'], From 7f98feb20189c55abab6441c14567f5f88ef566e Mon Sep 17 00:00:00 2001 From: Daniel Miranda Date: Wed, 20 Mar 2019 01:06:41 -0300 Subject: [PATCH 33/74] Change test runner from nose to py.test (#714) * Change test runner from nose to py.test Nose has been deprecated by its authors. py.test is very active, widely used, and has many useful features for running and writing tests. Changing the tests themselves away from unittest-style is a different step that must be taken later. This commit only changes the tooling. * dag: port tests to pytest * blueprints: raw: always return absolute template paths Additionaly, adapt tests to not depend on any fixtures in the test directory, or on the current working directory of the test runner. * test: disable deprecation warnings by default due to noise --- .gitignore | 10 +- Makefile | 4 +- docs/blueprints.rst | 2 +- setup.cfg | 13 ++- setup.py | 11 +-- stacker/blueprints/raw.py | 5 +- stacker/tests/__init__.py | 34 ------- stacker/tests/blueprints/test_raw.py | 102 ++++++++++--------- stacker/tests/conftest.py | 37 +++++++ stacker/tests/test_dag.py | 143 ++++++++++++++------------- 10 files changed, 185 insertions(+), 176 deletions(-) create mode 100644 stacker/tests/conftest.py diff --git a/.gitignore b/.gitignore index 6c422f410..622c099ae 100644 --- a/.gitignore +++ b/.gitignore @@ -56,17 +56,13 @@ dist/ .eggs/ *.egg -# nosetest --with-coverage dumps these in CWD +# Coverage artifacts .coverage - - - -vm_setup.sh +htmlcov # Ignore development conf/env files dev.yaml dev.env - tests/fixtures/blueprints/*-result - FakeKey.pem +vm_setup.sh diff --git a/Makefile b/Makefile index 0e776a140..2af72f238 100644 --- a/Makefile +++ b/Makefile @@ -8,10 +8,10 @@ lint: flake8 --require-code --min-version=2.7 --ignore FI50,FI51,FI53,FI14,E402,N802,W605 stacker/tests # ignore setUp naming test-unit: clean - python setup.py nosetests + python setup.py test test-unit3: clean - python3 setup.py nosetests + python3 setup.py test clean: rm -rf .egg stacker.egg-info diff --git a/docs/blueprints.rst b/docs/blueprints.rst index 61e558bbd..caaa207df 100644 --- a/docs/blueprints.rst +++ b/docs/blueprints.rst @@ -427,7 +427,7 @@ structure and execute a test case for it. As an example: variables: var1: val1 -When run from nosetests, this will create a template fixture file called +When run from tests, this will create a template fixture file called test_stack.json containing the output from the `stacker_blueprints.s3.Buckets` template. diff --git a/setup.cfg b/setup.cfg index 6c28c6233..433580347 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,8 +1,11 @@ [metadata] description-file = README.rst -[nosetests] -tests=stacker/tests -detailed-errors=1 -with-coverage=1 -cover-package=stacker +[aliases] +test = pytest + +[tool:pytest] +testpaths = stacker/tests +cov = stacker +filterwarnings = + ignore::DeprecationWarning diff --git a/setup.py b/setup.py index 296015d03..8aae81862 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,6 @@ import os from setuptools import setup, find_packages - VERSION = "1.6.0" src_dir = os.path.dirname(__file__) @@ -20,16 +19,17 @@ "python-dateutil>=2.0,<3.0", ] +setup_requires = ['pytest-runner'] + tests_require = [ - "mock~=2.0.0", + "pytest~=4.3", + "pytest-cov~=2.6", + "mock~=2.0", "moto~=1.3.7", "testfixtures~=4.10.0", - "coverage~=4.3.4", "flake8-future-import", ] -setup_requires = ["nose"] - scripts = [ "scripts/compare_env", "scripts/docker-stacker", @@ -60,7 +60,6 @@ def read(filename): tests_require=tests_require, setup_requires=setup_requires, extras_require=dict(testing=tests_require), - test_suite="nose.collector", classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", diff --git a/stacker/blueprints/raw.py b/stacker/blueprints/raw.py index 0e324f21e..2a5f1c444 100644 --- a/stacker/blueprints/raw.py +++ b/stacker/blueprints/raw.py @@ -30,10 +30,11 @@ def get_template_path(filename): """ if os.path.isfile(filename): - return filename + return os.path.abspath(filename) for i in sys.path: if os.path.isfile(os.path.join(i, filename)): - return os.path.join(i, filename) + return os.path.abspath(os.path.join(i, filename)) + return None diff --git a/stacker/tests/__init__.py b/stacker/tests/__init__.py index 8b04f58a9..e69de29bb 100644 --- a/stacker/tests/__init__.py +++ b/stacker/tests/__init__.py @@ -1,34 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import logging -import os - - -logger = logging.getLogger(__name__) -_saved_env = {} - - -def setUpModule(): - # Handle change in https://github.com/spulec/moto/issues/1924 - # Ensure AWS SDK find some (bogus) credentials in the environment and - # doesn't try to use other providers - overrides = { - 'AWS_ACCESS_KEY_ID': 'testing', - 'AWS_SECRET_ACCESS_KEY': 'testing', - 'AWS_DEFAULT_REGION': 'us-east-1' - } - for key, value in overrides.items(): - logger.info('Overriding env var: {}={}'.format(key, value)) - _saved_env[key] = os.environ.get(key, None) - os.environ[key] = value - - -def tearDownModule(): - for key, value in _saved_env.items(): - logger.info('Restoring saved env var: {}={}'.format(key, value)) - if value is None: - del os.environ[key] - else: - os.environ[key] = value - - _saved_env.clear() diff --git a/stacker/tests/blueprints/test_raw.py b/stacker/tests/blueprints/test_raw.py index eb4ee9f59..9f93f7c82 100644 --- a/stacker/tests/blueprints/test_raw.py +++ b/stacker/tests/blueprints/test_raw.py @@ -3,8 +3,6 @@ from __future__ import division from __future__ import absolute_import import json -import os -import sys import unittest from mock import MagicMock @@ -15,57 +13,51 @@ from stacker.variables import Variable from ..factories import mock_context + RAW_JSON_TEMPLATE_PATH = 'stacker/tests/fixtures/cfn_template.json' RAW_YAML_TEMPLATE_PATH = 'stacker/tests/fixtures/cfn_template.yaml' RAW_J2_TEMPLATE_PATH = 'stacker/tests/fixtures/cfn_template.json.j2' -class TestRawBluePrintHelpers(unittest.TestCase): - """Test class for functions in module.""" - - def test_get_template_path_local_file(self): # noqa pylint: disable=invalid-name - """Verify get_template_path finding a file relative to CWD.""" - self.assertEqual(get_template_path(RAW_YAML_TEMPLATE_PATH), - RAW_YAML_TEMPLATE_PATH) - - def test_get_template_path_invalid_file(self): # noqa pylint: disable=invalid-name - """Verify get_template_path with an invalid filename.""" - self.assertEqual(get_template_path('afilenamethatdoesnotexist.txt'), - None) - - def test_get_template_path_file_in_syspath(self): # noqa pylint: disable=invalid-name - """Verify get_template_path with a file in sys.path. - - This ensures templates are able to be retreived from remote packages. - - """ - stacker_tests_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) # noqa - old_sys_path = list(sys.path) - sys.path.append(stacker_tests_dir) - try: - self.assertEqual(get_template_path('fixtures/cfn_template.yaml'), - os.path.join(stacker_tests_dir, - 'fixtures/cfn_template.yaml')) - finally: - sys.path = old_sys_path - - def test_get_template_params(self): - """Verify get_template_params function operation.""" - template_dict = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "TestTemplate", - "Parameters": { - "Param1": { - "Type": "String" - }, - "Param2": { - "Default": "default", - "Type": "CommaDelimitedList" - } - }, - "Resources": {} - } - template_params = { +def test_get_template_path_local_file(tmpdir): + """Verify get_template_path finding a file relative to CWD.""" + + template_path = tmpdir.join('cfn_template.json') + template_path.ensure() + + with tmpdir.as_cwd(): + result = get_template_path('cfn_template.json') + assert template_path.samefile(result) + + +def test_get_template_path_invalid_file(tmpdir): + """Verify get_template_path with an invalid filename.""" + + with tmpdir.as_cwd(): + assert get_template_path('cfn_template.json') is None + + +def test_get_template_path_file_in_syspath(tmpdir, monkeypatch): + """Verify get_template_path with a file in sys.path. + + This ensures templates are able to be retrieved from remote packages. + + """ + + template_path = tmpdir.join('cfn_template.json') + template_path.ensure() + + monkeypatch.syspath_prepend(tmpdir) + result = get_template_path(template_path.basename) + assert template_path.samefile(result) + + +def test_get_template_params(): + """Verify get_template_params function operation.""" + template_dict = { + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "TestTemplate", + "Parameters": { "Param1": { "Type": "String" }, @@ -73,8 +65,20 @@ def test_get_template_params(self): "Default": "default", "Type": "CommaDelimitedList" } + }, + "Resources": {} + } + template_params = { + "Param1": { + "Type": "String" + }, + "Param2": { + "Default": "default", + "Type": "CommaDelimitedList" } - self.assertEqual(get_template_params(template_dict), template_params) + } + + assert get_template_params(template_dict) == template_params class TestBlueprintRendering(unittest.TestCase): diff --git a/stacker/tests/conftest.py b/stacker/tests/conftest.py new file mode 100644 index 000000000..b9f7b390a --- /dev/null +++ b/stacker/tests/conftest.py @@ -0,0 +1,37 @@ +from __future__ import absolute_import, division, print_function + +import logging +import os + +import pytest + + +logger = logging.getLogger(__name__) + + +@pytest.fixture(scope='session', autouse=True) +def aws_credentials(): + # Handle change in https://github.com/spulec/moto/issues/1924 + # Ensure AWS SDK finds some (bogus) credentials in the environment and + # doesn't try to use other providers. + overrides = { + 'AWS_ACCESS_KEY_ID': 'testing', + 'AWS_SECRET_ACCESS_KEY': 'testing', + 'AWS_DEFAULT_REGION': 'us-east-1' + } + saved_env = {} + for key, value in overrides.items(): + logger.info('Overriding env var: {}={}'.format(key, value)) + saved_env[key] = os.environ.get(key, None) + os.environ[key] = value + + yield + + for key, value in saved_env.items(): + logger.info('Restoring saved env var: {}={}'.format(key, value)) + if value is None: + del os.environ[key] + else: + os.environ[key] = value + + saved_env.clear() diff --git a/stacker/tests/test_dag.py b/stacker/tests/test_dag.py index a7147ef2c..ab83d39a4 100644 --- a/stacker/tests/test_dag.py +++ b/stacker/tests/test_dag.py @@ -2,44 +2,43 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import +import threading + +import pytest -from nose import with_setup -from nose.tools import nottest, raises from stacker.dag import ( DAG, DAGValidationError, ThreadedWalker, UnlimitedSemaphore ) -import threading -dag = None +@pytest.fixture +def empty_dag(): + return DAG() -@nottest -def blank_setup(): - global dag - dag = DAG() - -@nottest -def start_with_graph(): - global dag +@pytest.fixture +def basic_dag(): dag = DAG() dag.from_dict({'a': ['b', 'c'], 'b': ['d'], 'c': ['d'], 'd': []}) + return dag + +def test_add_node(empty_dag): + dag = empty_dag -@with_setup(blank_setup) -def test_add_node(): dag.add_node('a') assert dag.graph == {'a': set()} -@with_setup(start_with_graph) -def test_transpose(): +def test_transpose(basic_dag): + dag = basic_dag + transposed = dag.transpose() assert transposed.graph == {'d': set(['c', 'b']), 'c': set(['a']), @@ -47,16 +46,18 @@ def test_transpose(): 'a': set([])} -@with_setup(blank_setup) -def test_add_edge(): +def test_add_edge(empty_dag): + dag = empty_dag + dag.add_node('a') dag.add_node('b') dag.add_edge('a', 'b') assert dag.graph == {'a': set('b'), 'b': set()} -@with_setup(blank_setup) -def test_from_dict(): +def test_from_dict(empty_dag): + dag = empty_dag + dag.from_dict({'a': ['b', 'c'], 'b': ['d'], 'c': ['d'], @@ -67,17 +68,17 @@ def test_from_dict(): 'd': set()} -@with_setup(blank_setup) -def test_reset_graph(): +def test_reset_graph(empty_dag): + dag = empty_dag + dag.add_node('a') assert dag.graph == {'a': set()} dag.reset_graph() assert dag.graph == {} -@with_setup(blank_setup) -def test_walk(): - dag = DAG() +def test_walk(empty_dag): + dag = empty_dag # b and c should be executed at the same time. dag.from_dict({'a': ['b', 'c'], @@ -95,86 +96,90 @@ def walk_func(n): assert nodes == ['d', 'c', 'b', 'a'] or nodes == ['d', 'b', 'c', 'a'] -@with_setup(start_with_graph) -def test_ind_nodes(): +def test_ind_nodes(basic_dag): + dag = basic_dag assert dag.ind_nodes() == ['a'] -@with_setup(blank_setup) -def test_topological_sort(): +def test_topological_sort(empty_dag): + dag = empty_dag dag.from_dict({'a': [], 'b': ['a'], 'c': ['b']}) assert dag.topological_sort() == ['c', 'b', 'a'] -@with_setup(start_with_graph) -def test_successful_validation(): +def test_successful_validation(basic_dag): + dag = basic_dag assert dag.validate()[0] == True # noqa: E712 -@raises(DAGValidationError) -@with_setup(blank_setup) -def test_failed_validation(): - dag.from_dict({'a': ['b'], - 'b': ['a']}) +def test_failed_validation(empty_dag): + dag = empty_dag + with pytest.raises(DAGValidationError): + dag.from_dict({'a': ['b'], + 'b': ['a']}) -@with_setup(start_with_graph) -def test_downstream(): + +def test_downstream(basic_dag): + dag = basic_dag assert set(dag.downstream('a')) == set(['b', 'c']) -@with_setup(start_with_graph) -def test_all_downstreams(): +def test_all_downstreams(basic_dag): + dag = basic_dag + assert dag.all_downstreams('a') == ['b', 'c', 'd'] assert dag.all_downstreams('b') == ['d'] assert dag.all_downstreams('d') == [] -@with_setup(start_with_graph) -def test_all_downstreams_pass_graph(): - dag2 = DAG() - dag2.from_dict({'a': ['c'], - 'b': ['d'], - 'c': ['d'], - 'd': []}) - assert dag2.all_downstreams('a') == ['c', 'd'] - assert dag2.all_downstreams('b') == ['d'] - assert dag2.all_downstreams('d') == [] +def test_all_downstreams_pass_graph(empty_dag): + dag = empty_dag + dag.from_dict({'a': ['c'], + 'b': ['d'], + 'c': ['d'], + 'd': []}) + assert dag.all_downstreams('a') == ['c', 'd'] + assert dag.all_downstreams('b') == ['d'] + assert dag.all_downstreams('d') == [] -@with_setup(start_with_graph) -def test_predecessors(): +def test_predecessors(basic_dag): + dag = basic_dag + assert set(dag.predecessors('a')) == set([]) assert set(dag.predecessors('b')) == set(['a']) assert set(dag.predecessors('c')) == set(['a']) assert set(dag.predecessors('d')) == set(['b', 'c']) -@with_setup(start_with_graph) -def test_filter(): +def test_filter(basic_dag): + dag = basic_dag + dag2 = dag.filter(['b', 'c']) assert dag2.graph == {'b': set('d'), 'c': set('d'), 'd': set()} -@with_setup(start_with_graph) -def test_all_leaves(): +def test_all_leaves(basic_dag): + dag = basic_dag + assert dag.all_leaves() == ['d'] -@with_setup(start_with_graph) -def test_size(): +def test_size(basic_dag): + dag = basic_dag + assert dag.size() == 4 dag.delete_node('a') assert dag.size() == 3 -@with_setup(blank_setup) -def test_transitive_reduction_no_reduction(): - dag = DAG() +def test_transitive_reduction_no_reduction(empty_dag): + dag = empty_dag dag.from_dict({'a': ['b', 'c'], 'b': ['d'], 'c': ['d'], @@ -186,9 +191,8 @@ def test_transitive_reduction_no_reduction(): 'd': set()} -@with_setup(blank_setup) -def test_transitive_reduction(): - dag = DAG() +def test_transitive_reduction(empty_dag): + dag = empty_dag # https://en.wikipedia.org/wiki/Transitive_reduction#/media/File:Tred-G.svg dag.from_dict({'a': ['b', 'c', 'd', 'e'], 'b': ['d'], @@ -204,9 +208,8 @@ def test_transitive_reduction(): 'e': set()} -@with_setup(blank_setup) -def test_transitive_deep_reduction(): - dag = DAG() +def test_transitive_deep_reduction(empty_dag): + dag = empty_dag # https://en.wikipedia.org/wiki/Transitive_reduction#/media/File:Tred-G.svg dag.from_dict({ 'a': ['b', 'd'], @@ -222,9 +225,9 @@ def test_transitive_deep_reduction(): 'd': set()} -@with_setup(blank_setup) -def test_threaded_walker(): - dag = DAG() +def test_threaded_walker(empty_dag): + dag = empty_dag + walker = ThreadedWalker(UnlimitedSemaphore()) # b and c should be executed at the same time. From 599dd6332b75597d1a6154175ffdb9b14e4fdf33 Mon Sep 17 00:00:00 2001 From: Adam McElwee Date: Tue, 19 Mar 2019 23:28:24 -0500 Subject: [PATCH 34/74] Use the ui I/O helper to properly serialize diff outputs (#717) --- stacker/actions/diff.py | 40 +++++++++++++++++++++++++--------------- 1 file changed, 25 insertions(+), 15 deletions(-) diff --git a/stacker/actions/diff.py b/stacker/actions/diff.py index 7e4550e02..97801ae7d 100644 --- a/stacker/actions/diff.py +++ b/stacker/actions/diff.py @@ -10,6 +10,7 @@ from .base import plan, build_walker from . import build +from ..ui import ui from .. import exceptions from ..util import parse_cloudformation_template from ..status import ( @@ -167,9 +168,10 @@ def normalize_json(template): return result -def print_stack_changes(stack_name, new_stack, old_stack, new_params, +def build_stack_changes(stack_name, new_stack, old_stack, new_params, old_params): - """Prints out the parameters (if changed) and stack diff""" + """Builds a list of strings to represent the the parameters (if changed) + and stack diff""" from_file = "old_%s" % (stack_name,) to_file = "new_%s" % (stack_name,) lines = difflib.context_diff( @@ -178,13 +180,15 @@ def print_stack_changes(stack_name, new_stack, old_stack, new_params, n=7) # ensure at least a few lines of context are displayed afterward template_changes = list(lines) + log_lines = [] if not template_changes: - print("*** No changes to template ***") + log_lines.append("*** No changes to template ***") param_diffs = diff_parameters(old_params, new_params) if param_diffs: - print(format_params_diff(param_diffs)) + log_lines.append(format_params_diff(param_diffs)) if template_changes: - print("".join(template_changes)) + log_lines.append("".join(template_changes)) + return log_lines class Action(build.Action): @@ -199,15 +203,19 @@ class Action(build.Action): config. """ - def _print_new_stack(self, stack, parameters): - """Prints out the parameters & stack contents of a new stack""" - print("New template parameters:") + def _build_new_template(self, stack, parameters): + """Constructs the parameters & contents of a new stack and returns a + list(str) representation to be output to the user + """ + log_lines = ["New template parameters:"] for param in sorted(parameters, key=lambda param: param['ParameterKey']): - print("%s = %s" % (param['ParameterKey'], param['ParameterValue'])) + log_lines.append("%s = %s" % (param['ParameterKey'], + param['ParameterValue'])) - print("\nNew template contents:") - print("".join(stack)) + log_lines.append("\nNew template contents:") + log_lines.append("".join(stack)) + return log_lines def _diff_stack(self, stack, **kwargs): """Handles the diffing a stack in CloudFormation vs our config""" @@ -241,10 +249,10 @@ def _diff_stack(self, stack, **kwargs): new_template = stack.blueprint.rendered new_stack = normalize_json(new_template) - print("============== Stack: %s ==============" % (stack.name,)) + output = ["============== Stack: %s ==============" % (stack.name,)] # If this is a completely new template dump our params & stack if not old_template: - self._print_new_stack(new_stack, parameters) + output.extend(self._build_new_template(new_stack, parameters)) else: # Diff our old & new stack/parameters old_template = parse_cloudformation_template(old_template) @@ -260,8 +268,9 @@ def _diff_stack(self, stack, **kwargs): indent=4, default=str) ) - print_stack_changes(stack.name, new_stack, old_stack, new_params, - old_params) + output.extend(build_stack_changes(stack.name, new_stack, old_stack, + new_params, old_params)) + ui.info('\n' + '\n'.join(output)) stack.set_outputs( provider.get_output_dict(provider_stack)) @@ -285,6 +294,7 @@ def run(self, concurrency=0, *args, **kwargs): plan.execute(walker) """Don't ever do anything for pre_run or post_run""" + def pre_run(self, *args, **kwargs): pass From 97d520698dcfff74d5b292ae896e1a191aa75e8c Mon Sep 17 00:00:00 2001 From: Daniel Miranda Date: Thu, 21 Mar 2019 15:10:35 -0300 Subject: [PATCH 35/74] hooks: keypair: add some features and rewrite tests (#715) * hooks: keypair: overhaul and rewrite tests - Add support for importing a local public key file - Add support for storing generated private keys in SSM parameter store - Refactor code to be more streamlined and separate interactive input from other work * hooks: keypair: use input helpers from stacker.ui --- stacker/hooks/keypair.py | 303 +++++++++++++----- stacker/tests/conftest.py | 9 +- stacker/tests/fixtures/keypair/fingerprint | 1 + stacker/tests/fixtures/keypair/id_rsa | 27 ++ stacker/tests/fixtures/keypair/id_rsa.pub | 1 + stacker/tests/hooks/test_keypair.py | 350 ++++++++++++--------- 6 files changed, 461 insertions(+), 230 deletions(-) create mode 100644 stacker/tests/fixtures/keypair/fingerprint create mode 100644 stacker/tests/fixtures/keypair/id_rsa create mode 100644 stacker/tests/fixtures/keypair/id_rsa.pub diff --git a/stacker/hooks/keypair.py b/stacker/hooks/keypair.py index 75fa346bd..3114729cd 100644 --- a/stacker/hooks/keypair.py +++ b/stacker/hooks/keypair.py @@ -1,22 +1,184 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import -from builtins import input + import logging import os +import sys + +from botocore.exceptions import ClientError from stacker.session_cache import get_session +from stacker.hooks import utils +from stacker.ui import get_raw_input -from . import utils logger = logging.getLogger(__name__) +KEYPAIR_LOG_MESSAGE = "keypair: %s (%s) %s" + + +def get_existing_key_pair(ec2, keypair_name): + resp = ec2.describe_key_pairs() + keypair = next((kp for kp in resp["KeyPairs"] + if kp["KeyName"] == keypair_name), None) + + if keypair: + logger.info(KEYPAIR_LOG_MESSAGE, + keypair["KeyName"], + keypair["KeyFingerprint"], + "exists") + return { + "status": "exists", + "key_name": keypair["KeyName"], + "fingerprint": keypair["KeyFingerprint"], + } -def find(lst, key, value): - for i, dic in enumerate(lst): - if dic[key] == value: - return lst[i] - return False + logger.info("keypair: \"%s\" not found", keypair_name) + return None + + +def import_key_pair(ec2, keypair_name, public_key_data): + keypair = ec2.import_key_pair( + KeyName=keypair_name, + PublicKeyMaterial=public_key_data.strip(), + DryRun=False) + logger.info(KEYPAIR_LOG_MESSAGE, + keypair["KeyName"], + keypair["KeyFingerprint"], + "imported") + return keypair + + +def read_public_key_file(path): + try: + with open(utils.full_path(path), 'rb') as f: + data = f.read() + + if not data.startswith(b"ssh-rsa"): + raise ValueError( + "Bad public key data, must be an RSA key in SSH authorized " + "keys format (beginning with `ssh-rsa`)") + + return data.strip() + except (ValueError, IOError, OSError) as e: + logger.error("Failed to read public key file {}: {}".format( + path, e)) + return None + + +def create_key_pair_from_public_key_file(ec2, keypair_name, public_key_path): + public_key_data = read_public_key_file(public_key_path) + if not public_key_data: + return None + + keypair = import_key_pair(ec2, keypair_name, public_key_data) + return { + "status": "imported", + "key_name": keypair["KeyName"], + "fingerprint": keypair["KeyFingerprint"], + } + + +def create_key_pair_in_ssm(ec2, ssm, keypair_name, parameter_name, + kms_key_id=None): + keypair = create_key_pair(ec2, keypair_name) + try: + kms_key_label = 'default' + kms_args = {} + if kms_key_id: + kms_key_label = kms_key_id + kms_args = {"KeyId": kms_key_id} + + logger.info("Storing generated key in SSM parameter \"%s\" " + "using KMS key \"%s\"", parameter_name, kms_key_label) + + ssm.put_parameter( + Name=parameter_name, + Description="SSH private key for KeyPair \"{}\" " + "(generated by Stacker)".format(keypair_name), + Value=keypair["KeyMaterial"], + Type="SecureString", + Overwrite=False, + **kms_args) + except ClientError: + # Erase the key pair if we failed to store it in SSM, since the + # private key will be lost anyway + + logger.exception("Failed to store generated key in SSM, deleting " + "created key pair as private key will be lost") + ec2.delete_key_pair(KeyName=keypair_name, DryRun=False) + return None + + return { + "status": "created", + "key_name": keypair["KeyName"], + "fingerprint": keypair["KeyFingerprint"], + } + + +def create_key_pair(ec2, keypair_name): + keypair = ec2.create_key_pair(KeyName=keypair_name, DryRun=False) + logger.info(KEYPAIR_LOG_MESSAGE, + keypair["KeyName"], + keypair["KeyFingerprint"], + "created") + return keypair + + +def create_key_pair_local(ec2, keypair_name, dest_dir): + dest_dir = utils.full_path(dest_dir) + if not os.path.isdir(dest_dir): + logger.error("\"%s\" is not a valid directory", dest_dir) + return None + + file_name = "{0}.pem".format(keypair_name) + key_path = os.path.join(dest_dir, file_name) + if os.path.isfile(key_path): + # This mimics the old boto2 keypair.save error + logger.error("\"%s\" already exists in \"%s\" directory", + file_name, dest_dir) + return None + + # Open the file before creating the key pair to catch errors early + with open(key_path, "wb") as f: + keypair = create_key_pair(ec2, keypair_name) + f.write(keypair["KeyMaterial"].encode("ascii")) + + return { + "status": "created", + "key_name": keypair["KeyName"], + "fingerprint": keypair["KeyFingerprint"], + "file_path": key_path + } + + +def interactive_prompt(keypair_name, ): + if not sys.stdin.isatty(): + return None, None + + try: + while True: + action = get_raw_input( + "import or create keypair \"%s\"? (import/create/cancel) " % ( + keypair_name, + ) + ) + + if action.lower() == "cancel": + break + + if action.lower() in ("i", "import"): + path = get_raw_input("path to keypair file: ") + return "import", path.strip() + + if action.lower() == "create": + path = get_raw_input("directory to save keyfile: ") + return "create", path.strip() + except (EOFError, KeyboardInterrupt): + return None, None + + return None, None def ensure_keypair_exists(provider, context, **kwargs): @@ -28,84 +190,63 @@ def ensure_keypair_exists(provider, context, **kwargs): provider (:class:`stacker.providers.base.BaseProvider`): provider instance context (:class:`stacker.context.Context`): context instance - - Returns: boolean for whether or not the hook succeeded. + keypair (str): name of the key pair to create + ssm_parameter_name (str, optional): path to an SSM store parameter to + receive the generated private key, instead of importing it or + storing it locally. + ssm_key_id (str, optional): ID of a KMS key to encrypt the SSM + parameter with. If omitted, the default key will be used. + public_key_path (str, optional): path to a public key file to be + imported instead of generating a new key. Incompatible with the SSM + options, as the private key will not be available for storing. + + Returns: + In case of failure ``False``, otherwise a dict containing: + status (str): one of "exists", "imported" or "created" + key_name (str): name of the key pair + fingerprint (str): fingerprint of the key pair + file_path (str, optional): if a new key was created, the path to + the file where the private key was stored """ - session = get_session(provider.region) - client = session.client("ec2") - keypair_name = kwargs.get("keypair") - resp = client.describe_key_pairs() - keypair = find(resp["KeyPairs"], "KeyName", keypair_name) - message = "keypair: %s (%s) %s" + + keypair_name = kwargs["keypair"] + ssm_parameter_name = kwargs.get("ssm_parameter_name") + ssm_key_id = kwargs.get("ssm_key_id") + public_key_path = kwargs.get("public_key_path") + + if public_key_path and ssm_parameter_name: + logger.error("public_key_path and ssm_parameter_name cannot be " + "specified at the same time") + return False + + session = get_session(region=provider.region, + profile=kwargs.get("profile")) + ec2 = session.client("ec2") + + keypair = get_existing_key_pair(ec2, keypair_name) if keypair: - logger.info(message, - keypair["KeyName"], - keypair["KeyFingerprint"], - "exists") - return { - "status": "exists", - "key_name": keypair["KeyName"], - "fingerprint": keypair["KeyFingerprint"], - } + return keypair - logger.info("keypair: \"%s\" not found", keypair_name) - create_or_upload = input( - "import or create keypair \"%s\"? (import/create/Cancel) " % ( - keypair_name, - ), - ) - if create_or_upload == "import": - path = input("path to keypair file: ") - full_path = utils.full_path(path) - if not os.path.exists(full_path): - logger.error("Failed to find keypair at path: %s", full_path) - return False - - with open(full_path) as read_file: - contents = read_file.read() - - keypair = client.import_key_pair(KeyName=keypair_name, - PublicKeyMaterial=contents) - logger.info(message, - keypair["KeyName"], - keypair["KeyFingerprint"], - "imported") - return { - "status": "imported", - "key_name": keypair["KeyName"], - "fingerprint": keypair["KeyFingerprint"], - "file_path": full_path, - } - elif create_or_upload == "create": - path = input("directory to save keyfile: ") - full_path = utils.full_path(path) - if not os.path.exists(full_path) and not os.path.isdir(full_path): - logger.error("\"%s\" is not a valid directory", full_path) - return False - - file_name = "{0}.pem".format(keypair_name) - if os.path.isfile(os.path.join(full_path, file_name)): - # This mimics the old boto2 keypair.save error - logger.error("\"%s\" already exists in \"%s\" directory", - file_name, - full_path) - return False - - keypair = client.create_key_pair(KeyName=keypair_name) - logger.info(message, - keypair["KeyName"], - keypair["KeyFingerprint"], - "created") - with open(os.path.join(full_path, file_name), "w") as f: - f.write(keypair["KeyMaterial"]) + if public_key_path: + keypair = create_key_pair_from_public_key_file( + ec2, keypair_name, public_key_path) - return { - "status": "created", - "key_name": keypair["KeyName"], - "fingerprint": keypair["KeyFingerprint"], - "file_path": os.path.join(full_path, file_name) - } + elif ssm_parameter_name: + ssm = session.client('ssm') + keypair = create_key_pair_in_ssm( + ec2, ssm, keypair_name, ssm_parameter_name, ssm_key_id) else: - logger.warning("no action to find keypair, failing") + action, path = interactive_prompt(keypair_name) + if action == "import": + keypair = create_key_pair_from_public_key_file( + ec2, keypair_name, path) + elif action == "create": + keypair = create_key_pair_local(ec2, keypair_name, path) + else: + logger.warning("no action to find keypair, failing") + + if not keypair: return False + + return keypair diff --git a/stacker/tests/conftest.py b/stacker/tests/conftest.py index b9f7b390a..6597ebc81 100644 --- a/stacker/tests/conftest.py +++ b/stacker/tests/conftest.py @@ -4,7 +4,7 @@ import os import pytest - +import py.path logger = logging.getLogger(__name__) @@ -35,3 +35,10 @@ def aws_credentials(): os.environ[key] = value saved_env.clear() + + +@pytest.fixture(scope="package") +def stacker_fixture_dir(): + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), + 'fixtures') + return py.path.local(path) diff --git a/stacker/tests/fixtures/keypair/fingerprint b/stacker/tests/fixtures/keypair/fingerprint new file mode 100644 index 000000000..95b7181a1 --- /dev/null +++ b/stacker/tests/fixtures/keypair/fingerprint @@ -0,0 +1 @@ +d7:50:1f:78:55:5f:22:c1:f6:88:c6:5d:82:4f:94:4f diff --git a/stacker/tests/fixtures/keypair/id_rsa b/stacker/tests/fixtures/keypair/id_rsa new file mode 100644 index 000000000..1ff9afcd0 --- /dev/null +++ b/stacker/tests/fixtures/keypair/id_rsa @@ -0,0 +1,27 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABFwAAAAdzc2gtcn +NhAAAAAwEAAQAAAQEA7rF34ExOHgT+dDYJUswkhBpyC+vnK+ptx+nGQDTkPj9aP1uAXbXA +C97KK+Ihou0jniYKPJMHsjEK4a7eh2ihoK6JkYs9+y0MeGCAHAYuGXdNt5jv1e0XNgoYdf +JloC0pgOp4Po9+4qeuOds8bb9IxwM/aSaJWygaSc22ZTzeOWQk5PXJNH0lR0ZelUUkj0HK +aouuV6UX/t+czTghgnNZgDjk5sOfUNmugN7fJi+6/dWjOaukDkJttfZXLRTPDux0SZw4Jo +RqZ40cBNS8ipLVk24BWeEjVlNl6rrFDtO4yrkscz7plwXlPiRLcdCdbamcCZaRrdkftKje +5ypz5dvocQAAA9DJ0TBmydEwZgAAAAdzc2gtcnNhAAABAQDusXfgTE4eBP50NglSzCSEGn +IL6+cr6m3H6cZANOQ+P1o/W4BdtcAL3sor4iGi7SOeJgo8kweyMQrhrt6HaKGgromRiz37 +LQx4YIAcBi4Zd023mO/V7Rc2Chh18mWgLSmA6ng+j37ip6452zxtv0jHAz9pJolbKBpJzb +ZlPN45ZCTk9ck0fSVHRl6VRSSPQcpqi65XpRf+35zNOCGCc1mAOOTmw59Q2a6A3t8mL7r9 +1aM5q6QOQm219lctFM8O7HRJnDgmhGpnjRwE1LyKktWTbgFZ4SNWU2XqusUO07jKuSxzPu +mXBeU+JEtx0J1tqZwJlpGt2R+0qN7nKnPl2+hxAAAAAwEAAQAAAQAwMUSy1LUw+nElpYNc +ZDs7MNu17HtQMpTXuCt+6y7qIoBmKmNQiFGuE91d3tpLuvVmCOgoMsdrAtvflR741/dKKf +M8n5B0FjReWZ2ECvtjyOK4HvjNiIEXOBKYPcim/ndSwARnHTHRMWnL5KfewLBA/jbfVBiH +fyFPpWkeJ5v2mg3EDCkTCj7mBZwXYkX8uZ1IN6CZJ9kWNaPO3kloTlamgs6pd/5+OmMGWc +/vhfJQppaJjW58y7D7zCpncHg3Yf0HZsgWRTGJO93TxuyzDlAXITVGwqcz7InTVQZS1XTx +3FNmIpb0lDtVrKGxwvR/7gP6DpxMlKkzoCg3j1o8tHvBAAAAgQDuZCVAAqQFrY4ZH2TluP +SFulXuTiT4mgQivAwI6ysMxjpX1IGBTgDvHXJ0xyW4LN7pCvg8hRAhsPlaNBX24nNfOGmn +QMYp/qAZG5JP2vEJmDUKmEJ77Twwmk+k0zXfyZyfo7rgpF4c5W2EFnV7xiMtBTKbAj4HMn +qGPYDPGpySTwAAAIEA+w72mMctM2yd9Sxyg5b7ZlhuNyKW1oHcEvLoEpTtru0f8gh7C3HT +C0SiuTOth2xoHUWnbo4Yv5FV3gSoQ/rd1sWbkpEZMwbaPGsTA8bkCn2eItsjfrQx+6oY1U +HgZDrkjbByB3KQiq+VioKsrUmgfT/UgBq2tSnHqcYB56Eqj0sAAACBAPNkMvCstNJGS4FN +nSCGXghoYqKHivZN/IjWP33t/cr72lGp1yCY5S6FCn+JdNrojKYk2VXOSF5xc3fZllbr7W +hmhXRr/csQkymXMDkJHnsdhpMeoEZm7wBjUx+hE1+QbNF63kZMe9sjm5y/YRu7W7H6ngme +kb5FW97sspLYX8WzAAAAF2RhbmllbGt6YUBkYW5pZWwtcGMubGFuAQID +-----END OPENSSH PRIVATE KEY----- diff --git a/stacker/tests/fixtures/keypair/id_rsa.pub b/stacker/tests/fixtures/keypair/id_rsa.pub new file mode 100644 index 000000000..6bdddc029 --- /dev/null +++ b/stacker/tests/fixtures/keypair/id_rsa.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAklOUpkDHrfHY17SbrmTIpNLTGK9Tjom/BWDSUGPl+nafzlHDTYW7hdI4yZ5ew18JH4JW9jbhUFrviQzM7xlELEVf4h9lFX5QVkbPppSwg0cda3Pbv7kOdJ/MTyBlWXFCR+HAo3FXRitBqxiX1nKhXpHAZsMciLq8V6RjsNAQwdsdMFvSlVK/7XAt3FaoJoAsncM1Q9x5+3V0Ww68/eIFmb1zuUFljQJKprrX88XypNDvjYNby6vw/Pb0rwert/EnmZ+AW4OZPnTPI89ZPmVMLuayrD2cE86Z/il8b+gw3r3+1nKatmIkjn2so1d01QraTlMqVSsbxNrRFi9wrf+M7Q== diff --git a/stacker/tests/hooks/test_keypair.py b/stacker/tests/hooks/test_keypair.py index bc348dae0..49686c594 100644 --- a/stacker/tests/hooks/test_keypair.py +++ b/stacker/tests/hooks/test_keypair.py @@ -1,162 +1,216 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import -import unittest -from mock import patch +import sys +from collections import namedtuple +from contextlib import contextmanager + +import mock +import pytest import boto3 -from moto import mock_ec2 -from testfixtures import LogCapture +from moto import mock_ec2, mock_ssm + +from stacker.hooks.keypair import ensure_keypair_exists +from ..factories import mock_context, mock_provider -from stacker.hooks.keypair import ensure_keypair_exists, find -from ..factories import ( - mock_context, - mock_provider, -) REGION = "us-east-1" KEY_PAIR_NAME = "FakeKey" +SSHKey = namedtuple('SSHKey', 'public_key private_key fingerprint') -class TestKeypairHooks(unittest.TestCase): - def setUp(self): - self.provider = mock_provider(region=REGION) - self.context = mock_context(namespace="fake") +@pytest.fixture(scope="module") +def ssh_key(stacker_fixture_dir): + base = stacker_fixture_dir.join('keypair') + return SSHKey( + private_key=base.join('id_rsa').read_binary(), + public_key=base.join('id_rsa.pub').read_binary(), + fingerprint=base.join('fingerprint').read_text('ascii').strip()) - @patch("stacker.hooks.keypair.input", create=True) - def test_keypair_missing_cancel_input(self, mocked_input): - mocked_input.side_effect = ["Cancel"] - with mock_ec2(): - logger = "stacker.hooks.keypair" - client = boto3.client("ec2", region_name=REGION) - response = client.describe_key_pairs() - - # initially no key pairs created - self.assertEqual(len(response["KeyPairs"]), 0) - with LogCapture(logger) as logs: - self.assertFalse(ensure_keypair_exists(provider=self.provider, - context=self.context, - keypair=KEY_PAIR_NAME)) - logs.check( - ( - logger, - "INFO", - "keypair: \"%s\" not found" % KEY_PAIR_NAME - ), - ( - logger, - "WARNING", - "no action to find keypair, failing" - ) - ) - - def test_keypair_exists(self): - with mock_ec2(): - logger = "stacker.hooks.keypair" - client = boto3.client("ec2", region_name=REGION) - client.create_key_pair(KeyName=KEY_PAIR_NAME) - response = client.describe_key_pairs() - - # check that one keypair was created - self.assertEqual(len(response["KeyPairs"]), 1) - keypair = find(response["KeyPairs"], "KeyName", KEY_PAIR_NAME) - with LogCapture(logger) as logs: - value = ensure_keypair_exists(provider=self.provider, - context=self.context, - keypair=KEY_PAIR_NAME) - message = "keypair: " + KEY_PAIR_NAME + \ - " (" + keypair["KeyFingerprint"] + ") exists" - logs.check( - ( - logger, - "INFO", - message - ) - ) - self.assertEqual(value["status"], "exists") - self.assertEqual(value["key_name"], KEY_PAIR_NAME) - self.assertEqual(value["fingerprint"], - keypair["KeyFingerprint"]) - - @patch("stacker.hooks.keypair.input", create=True) - def test_keypair_missing_create(self, mocked_input): - mocked_input.side_effect = ["create", "./"] - with mock_ec2(): - logger = "stacker.hooks.keypair" - client = boto3.client("ec2", region_name=REGION) - with LogCapture(logger) as logs: - value = ensure_keypair_exists(provider=self.provider, - context=self.context, - keypair=KEY_PAIR_NAME) - response = client.describe_key_pairs() - print(response) - keypair = find(response["KeyPairs"], "KeyName", KEY_PAIR_NAME) - message = "keypair: " + KEY_PAIR_NAME + \ - " (" + keypair["KeyFingerprint"] + ") created" - logs.check( - ( - logger, - "INFO", - "keypair: \"%s\" not found" % KEY_PAIR_NAME - ), - ( - logger, - "INFO", - message - ) - ) - tmp_file_path = "/home/circleci/project/" + KEY_PAIR_NAME + ".pem" - self.assertEqual(value["status"], "created") - self.assertEqual(value["key_name"], KEY_PAIR_NAME) - self.assertEqual(value["file_path"], tmp_file_path) - - @patch("stacker.hooks.keypair.input", create=True) - def test_keypair_missing_create_invalid_path(self, mocked_input): - mocked_input.side_effect = ["create", "$"] - with mock_ec2(): - logger = "stacker.hooks.keypair" - with LogCapture(logger) as logs: - value = ensure_keypair_exists(provider=self.provider, - context=self.context, - keypair=KEY_PAIR_NAME) - message = "\"/home/circleci/project/" + \ - "$" + "\" is not a valid directory" - logs.check( - ( - logger, - "INFO", - "keypair: \"%s\" not found" % KEY_PAIR_NAME - ), - ( - logger, - "ERROR", - message - ) - ) - self.assertFalse(value) - - @patch("stacker.hooks.keypair.input", create=True) - def test_keypair_missing_import_invalid_path(self, mocked_input): - mocked_input.side_effect = ["import", "$"] + +@pytest.fixture +def provider(): + return mock_provider(region=REGION) + + +@pytest.fixture +def context(): + return mock_context(namespace="fake") + + +@pytest.fixture(autouse=True) +def ec2(ssh_key): + # Force moto to generate a deterministic key pair on creation. + # Can be replaced by something more sensible when + # https://github.com/spulec/moto/pull/2108 is merged + + key_pair = {'fingerprint': ssh_key.fingerprint, + 'material': ssh_key.private_key.decode('ascii')} + with mock.patch('moto.ec2.models.random_key_pair', side_effect=[key_pair]): with mock_ec2(): - logger = "stacker.hooks.keypair" - with LogCapture(logger) as logs: - value = ensure_keypair_exists(provider=self.provider, - context=self.context, - keypair=KEY_PAIR_NAME) - er_message = "Failed to find keypair at path: " + \ - "/home/circleci/project/$" - logs.check( - ( - logger, - "INFO", - "keypair: \"%s\" not found" % KEY_PAIR_NAME - ), - ( - logger, - "ERROR", - er_message - ) - ) - self.assertFalse(value) + yield + + +@pytest.fixture(autouse=True) +def ssm(): + with mock_ssm(): + yield + + +@contextmanager +def mock_input(lines=(), isatty=True): + with mock.patch('stacker.hooks.keypair.get_raw_input', + side_effect=lines) as m: + with mock.patch.object(sys.stdin, 'isatty', return_value=isatty): + yield m + + +def assert_key_present(hook_result, key_name, fingerprint): + assert hook_result['key_name'] == key_name + assert hook_result['fingerprint'] == fingerprint + + ec2 = boto3.client('ec2') + response = ec2.describe_key_pairs(KeyNames=[key_name], DryRun=False) + key_pairs = response['KeyPairs'] + + assert len(key_pairs) == 1 + assert key_pairs[0]['KeyName'] == key_name + assert key_pairs[0]['KeyFingerprint'] == fingerprint + + +def test_param_validation(provider, context): + result = ensure_keypair_exists(provider, context, keypair=KEY_PAIR_NAME, + ssm_parameter_name='test', + public_key_path='test') + assert result is False + + +def test_keypair_exists(provider, context): + ec2 = boto3.client('ec2') + keypair = ec2.create_key_pair(KeyName=KEY_PAIR_NAME) + + result = ensure_keypair_exists(provider, context, keypair=KEY_PAIR_NAME) + expected = dict( + status='exists', + key_name=KEY_PAIR_NAME, + fingerprint=keypair['KeyFingerprint']) + assert result == expected + + +def test_import_file(tmpdir, provider, context, ssh_key): + pkey = tmpdir.join("id_rsa.pub") + pkey.write(ssh_key.public_key) + + result = ensure_keypair_exists(provider, context, keypair=KEY_PAIR_NAME, + public_key_path=str(pkey)) + assert_key_present(result, KEY_PAIR_NAME, ssh_key.fingerprint) + assert result['status'] == 'imported' + + +def test_import_bad_key_data(tmpdir, provider, context): + pkey = tmpdir.join("id_rsa.pub") + pkey.write('garbage') + + result = ensure_keypair_exists(provider, context, keypair=KEY_PAIR_NAME, + public_key_path=str(pkey)) + assert result is False + + +@pytest.mark.parametrize('ssm_key_id', (None, 'my-key')) +def test_create_in_ssm(provider, context, ssh_key, ssm_key_id): + result = ensure_keypair_exists(provider, context, keypair=KEY_PAIR_NAME, + ssm_parameter_name='param', + ssm_key_id=ssm_key_id) + + assert_key_present(result, KEY_PAIR_NAME, ssh_key.fingerprint) + assert result['status'] == 'created' + + ssm = boto3.client('ssm') + param = ssm.get_parameter(Name='param', WithDecryption=True)['Parameter'] + assert param['Value'] == ssh_key.private_key.decode('ascii') + assert param['Type'] == 'SecureString' + + params = ssm.describe_parameters()['Parameters'] + param_details = next(p for p in params if p['Name'] == 'param') + assert param_details['Description'] == \ + 'SSH private key for KeyPair "{}" (generated by Stacker)'.format( + KEY_PAIR_NAME) + assert param_details.get('KeyId') == ssm_key_id + + +def test_interactive_non_terminal_input(capsys, provider, context): + with mock_input(isatty=False) as input: + result = ensure_keypair_exists(provider, context, + keypair=KEY_PAIR_NAME) + input.assert_not_called() + assert result is False + + output = capsys.readouterr() + assert len(output.out) == 0 + assert len(output.err) == 0 + + +def test_interactive_retry_cancel(provider, context): + lines = ['garbage', 'cancel'] + with mock_input(lines) as input: + result = ensure_keypair_exists( + provider, context, keypair=KEY_PAIR_NAME) + assert input.call_count == 2 + + assert result is False + + +def test_interactive_import(tmpdir, provider, context, ssh_key): + key_file = tmpdir.join("id_rsa.pub") + key_file.write(ssh_key.public_key) + + lines = ['import', str(key_file)] + with mock_input(lines): + result = ensure_keypair_exists( + provider, context, keypair=KEY_PAIR_NAME) + + assert_key_present(result, KEY_PAIR_NAME, ssh_key.fingerprint) + assert result['status'] == 'imported' + + +def test_interactive_create(tmpdir, provider, context, ssh_key): + key_dir = tmpdir.join('keys') + key_dir.ensure_dir() + key_file = key_dir.join('{}.pem'.format(KEY_PAIR_NAME)) + + lines = ['create', str(key_dir)] + with mock_input(lines): + result = ensure_keypair_exists( + provider, context, keypair=KEY_PAIR_NAME) + + assert_key_present(result, KEY_PAIR_NAME, ssh_key.fingerprint) + assert result['status'] == 'created' + assert key_file.samefile(result['file_path']) + assert key_file.read_binary() == ssh_key.private_key + + +def test_interactive_create_bad_dir(tmpdir, provider, context): + key_dir = tmpdir.join('missing') + + lines = ['create', str(key_dir)] + with mock_input(lines): + result = ensure_keypair_exists( + provider, context, keypair=KEY_PAIR_NAME) + + assert result is False + + +def test_interactive_create_existing_file(tmpdir, provider, context): + key_dir = tmpdir.join('keys') + key_dir.ensure_dir() + key_file = key_dir.join('{}.pem'.format(KEY_PAIR_NAME)) + key_file.ensure() + + lines = ['create', str(key_dir)] + with mock_input(lines): + result = ensure_keypair_exists( + provider, context, keypair=KEY_PAIR_NAME) + + assert result is False From 337bbc21efaebcc7ce086a6a429c55748d1b6711 Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Sun, 7 Apr 2019 18:20:30 -0700 Subject: [PATCH 36/74] Release 1.7.0 --- CHANGELOG.md | 12 ++++++++++++ setup.py | 2 +- stacker/__init__.py | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e48fc3199..0024c839d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ ## Upcoming release +## 1.7.0 (2019-04-07) + +- Additional ECS unit tests [GH-696] +- Keypair unit tests [GH-700] +- Jinja2 templates in plain cloudformation templates [GH-701] +- Custom log output formats [GH-705] +- Python 3.7 unit tests in CircleCI [GH-711] +- Upload blueprint templates with bucket-owner-full-control ACL [GH-713] +- Change test runner from nose to py.test [GH-714] +- support for importing a local public key file with the keypair hook [GH-715] +- support for storing private keys in SSM parameter store with the keypair hook [GH-715] + ## 1.6.0 (2019-01-21) - New lookup format/syntax, making it more generic [GH-665] diff --git a/setup.py b/setup.py index 8aae81862..31d3e480f 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ import os from setuptools import setup, find_packages -VERSION = "1.6.0" +VERSION = "1.7.0" src_dir = os.path.dirname(__file__) diff --git a/stacker/__init__.py b/stacker/__init__.py index 213a08f64..f55f1a97f 100644 --- a/stacker/__init__.py +++ b/stacker/__init__.py @@ -2,4 +2,4 @@ from __future__ import division from __future__ import absolute_import -__version__ = "1.6.0" +__version__ = "1.7.0" From ad6013a03a560c46ba3c63c4d153336273e6da5d Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Mon, 8 Apr 2019 14:39:21 -0700 Subject: [PATCH 37/74] Moving handle_hooks to try and make #708 easier to review (#724) --- stacker/actions/build.py | 4 +- stacker/actions/destroy.py | 6 +-- stacker/hooks/utils.py | 75 +++++++++++++++++++++++++++++++++++ stacker/tests/test_context.py | 2 +- stacker/tests/test_util.py | 3 +- stacker/util.py | 69 -------------------------------- 6 files changed, 83 insertions(+), 76 deletions(-) diff --git a/stacker/actions/build.py b/stacker/actions/build.py index bd2b91714..55a0da9e7 100644 --- a/stacker/actions/build.py +++ b/stacker/actions/build.py @@ -7,7 +7,7 @@ from .base import STACK_POLL_TIME from ..providers.base import Template -from .. import util +from stacker.hooks import utils from ..exceptions import ( MissingParameterException, StackDidNotChange, @@ -196,7 +196,7 @@ def handle_hooks(stage, hooks, provider, context, dump, outline): """ if not outline and not dump and hooks: - util.handle_hooks( + utils.handle_hooks( stage=stage, hooks=hooks, provider=provider, diff --git a/stacker/actions/destroy.py b/stacker/actions/destroy.py index 4f26692ad..cb3baf627 100644 --- a/stacker/actions/destroy.py +++ b/stacker/actions/destroy.py @@ -6,7 +6,7 @@ from .base import BaseAction, plan, build_walker from .base import STACK_POLL_TIME from ..exceptions import StackDoesNotExist -from .. import util +from stacker.hooks.utils import handle_hooks from ..status import ( CompleteStatus, SubmittedStatus, @@ -82,7 +82,7 @@ def pre_run(self, outline=False, *args, **kwargs): """Any steps that need to be taken prior to running the action.""" pre_destroy = self.context.config.pre_destroy if not outline and pre_destroy: - util.handle_hooks( + handle_hooks( stage="pre_destroy", hooks=pre_destroy, provider=self.provider, @@ -106,7 +106,7 @@ def post_run(self, outline=False, *args, **kwargs): """Any steps that need to be taken after running the action.""" post_destroy = self.context.config.post_destroy if not outline and post_destroy: - util.handle_hooks( + handle_hooks( stage="post_destroy", hooks=post_destroy, provider=self.provider, diff --git a/stacker/hooks/utils.py b/stacker/hooks/utils.py index 350d7a6b4..718fda3a5 100644 --- a/stacker/hooks/utils.py +++ b/stacker/hooks/utils.py @@ -2,7 +2,82 @@ from __future__ import division from __future__ import absolute_import import os +import sys +import collections +import logging + +from stacker.util import load_object_from_string + +logger = logging.getLogger(__name__) def full_path(path): return os.path.abspath(os.path.expanduser(path)) + + +def handle_hooks(stage, hooks, provider, context): + """ Used to handle pre/post_build hooks. + + These are pieces of code that we want to run before/after the builder + builds the stacks. + + Args: + stage (string): The current stage (pre_run, post_run, etc). + hooks (list): A list of :class:`stacker.config.Hook` containing the + hooks to execute. + provider (:class:`stacker.provider.base.BaseProvider`): The provider + the current stack is using. + context (:class:`stacker.context.Context`): The current stacker + context. + """ + if not hooks: + logger.debug("No %s hooks defined.", stage) + return + + hook_paths = [] + for i, h in enumerate(hooks): + try: + hook_paths.append(h.path) + except KeyError: + raise ValueError("%s hook #%d missing path." % (stage, i)) + + logger.info("Executing %s hooks: %s", stage, ", ".join(hook_paths)) + for hook in hooks: + data_key = hook.data_key + required = hook.required + kwargs = hook.args or {} + enabled = hook.enabled + if not enabled: + logger.debug("hook with method %s is disabled, skipping", + hook.path) + continue + try: + method = load_object_from_string(hook.path) + except (AttributeError, ImportError): + logger.exception("Unable to load method at %s:", hook.path) + if required: + raise + continue + try: + result = method(context=context, provider=provider, **kwargs) + except Exception: + logger.exception("Method %s threw an exception:", hook.path) + if required: + raise + continue + if not result: + if required: + logger.error("Required hook %s failed. Return value: %s", + hook.path, result) + sys.exit(1) + logger.warning("Non-required hook %s failed. Return value: %s", + hook.path, result) + else: + if isinstance(result, collections.Mapping): + if data_key: + logger.debug("Adding result for hook %s to context in " + "data_key %s.", hook.path, data_key) + context.set_hook_data(data_key, result) + else: + logger.debug("Hook %s returned result data, but no data " + "key set, so ignoring.", hook.path) diff --git a/stacker/tests/test_context.py b/stacker/tests/test_context.py index 088fed5f0..0015aab39 100644 --- a/stacker/tests/test_context.py +++ b/stacker/tests/test_context.py @@ -5,7 +5,7 @@ from stacker.context import Context, get_fqn from stacker.config import load, Config -from stacker.util import handle_hooks +from stacker.hooks.utils import handle_hooks class TestContext(unittest.TestCase): diff --git a/stacker/tests/test_util.py b/stacker/tests/test_util.py index 9c4fa7635..0163ed4c8 100644 --- a/stacker/tests/test_util.py +++ b/stacker/tests/test_util.py @@ -19,7 +19,6 @@ cf_safe_name, load_object_from_string, camel_to_snake, - handle_hooks, merge_map, yaml_to_ordered_dict, get_client_region, @@ -33,6 +32,8 @@ SourceProcessor ) +from stacker.hooks.utils import handle_hooks + from .factories import ( mock_context, mock_provider, diff --git a/stacker/util.py b/stacker/util.py index 4f95a52f6..dc9e1394d 100644 --- a/stacker/util.py +++ b/stacker/util.py @@ -16,7 +16,6 @@ import tempfile import zipfile -import collections from collections import OrderedDict import botocore.client @@ -337,74 +336,6 @@ def cf_safe_name(name): return "".join([uppercase_first_letter(part) for part in parts]) -def handle_hooks(stage, hooks, provider, context): - """ Used to handle pre/post_build hooks. - - These are pieces of code that we want to run before/after the builder - builds the stacks. - - Args: - stage (string): The current stage (pre_run, post_run, etc). - hooks (list): A list of :class:`stacker.config.Hook` containing the - hooks to execute. - provider (:class:`stacker.provider.base.BaseProvider`): The provider - the current stack is using. - context (:class:`stacker.context.Context`): The current stacker - context. - """ - if not hooks: - logger.debug("No %s hooks defined.", stage) - return - - hook_paths = [] - for i, h in enumerate(hooks): - try: - hook_paths.append(h.path) - except KeyError: - raise ValueError("%s hook #%d missing path." % (stage, i)) - - logger.info("Executing %s hooks: %s", stage, ", ".join(hook_paths)) - for hook in hooks: - data_key = hook.data_key - required = hook.required - kwargs = hook.args or {} - enabled = hook.enabled - if not enabled: - logger.debug("hook with method %s is disabled, skipping", - hook.path) - continue - try: - method = load_object_from_string(hook.path) - except (AttributeError, ImportError): - logger.exception("Unable to load method at %s:", hook.path) - if required: - raise - continue - try: - result = method(context=context, provider=provider, **kwargs) - except Exception: - logger.exception("Method %s threw an exception:", hook.path) - if required: - raise - continue - if not result: - if required: - logger.error("Required hook %s failed. Return value: %s", - hook.path, result) - sys.exit(1) - logger.warning("Non-required hook %s failed. Return value: %s", - hook.path, result) - else: - if isinstance(result, collections.Mapping): - if data_key: - logger.debug("Adding result for hook %s to context in " - "data_key %s.", hook.path, data_key) - context.set_hook_data(data_key, result) - else: - logger.debug("Hook %s returned result data, but no data " - "key set, so ignoring.", hook.path) - - def get_config_directory(): """Return the directory the config file is located in. From c6628a313aebc321c4f029ccd6aebe459a4c1045 Mon Sep 17 00:00:00 2001 From: Troy Ready Date: Thu, 9 May 2019 03:46:17 -0700 Subject: [PATCH 38/74] add botocore minimum version requirement (#729) Fixes #726 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 31d3e480f..3e6220c34 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ install_requires = [ "future", "troposphere>=1.9.0", - "botocore", + 'botocore>=1.12.111', # matching boto3 requirement "boto3>=1.9.111<2.0", "PyYAML>=3.13b1", "awacs>=0.6.0", From fde9bd49b8178097a401382a7f6e67ca6b83a542 Mon Sep 17 00:00:00 2001 From: Daniel Miranda Date: Mon, 13 May 2019 20:29:59 -0300 Subject: [PATCH 39/74] hooks: lambda: allow uploading pre-built payloads (#564) * hooks: lambda: rewrite tests using pytest * hooks: lambda: add support for prebuilt payloads * tests: hooks: lambda: fix Python 2.7 and 3.5 compat --- stacker/hooks/aws_lambda.py | 98 ++- stacker/tests/hooks/test_aws_lambda.py | 864 +++++++++++++------------ 2 files changed, 533 insertions(+), 429 deletions(-) diff --git a/stacker/hooks/aws_lambda.py b/stacker/hooks/aws_lambda.py index 4b388f40c..5832559e5 100644 --- a/stacker/hooks/aws_lambda.py +++ b/stacker/hooks/aws_lambda.py @@ -100,6 +100,18 @@ def _calculate_hash(files, root): return file_hash.hexdigest() +def _calculate_prebuilt_hash(f): + file_hash = hashlib.md5() + while True: + chunk = f.read(4096) + if not chunk: + break + + file_hash.update(chunk) + + return file_hash.hexdigest() + + def _find_files(root, includes, excludes, follow_symlinks): """List files inside a directory based on include and exclude rules. @@ -272,6 +284,38 @@ def _check_pattern_list(patterns, key, default=None): 'list of strings'.format(key)) +def _upload_prebuilt_zip(s3_conn, bucket, prefix, name, options, path, + payload_acl): + logging.debug('lambda: using prebuilt ZIP %s', path) + + with open(path, 'rb') as zip_file: + # Default to the MD5 of the ZIP if no explicit version is provided + version = options.get('version') + if not version: + version = _calculate_prebuilt_hash(zip_file) + zip_file.seek(0) + + return _upload_code(s3_conn, bucket, prefix, name, zip_file, + version, payload_acl) + + +def _build_and_upload_zip(s3_conn, bucket, prefix, name, options, path, + follow_symlinks, payload_acl): + includes = _check_pattern_list(options.get('include'), 'include', + default=['**']) + excludes = _check_pattern_list(options.get('exclude'), 'exclude', + default=[]) + + # os.path.join will ignore other parameters if the right-most one is an + # absolute path, which is exactly what we want. + zip_contents, zip_version = _zip_from_file_patterns( + path, includes, excludes, follow_symlinks) + version = options.get('version') or zip_version + + return _upload_code(s3_conn, bucket, prefix, name, zip_contents, version, + payload_acl) + + def _upload_function(s3_conn, bucket, prefix, name, options, follow_symlinks, payload_acl): """Builds a Lambda payload from user configuration and uploads it to S3. @@ -309,30 +353,27 @@ def _upload_function(s3_conn, bucket, prefix, name, options, follow_symlinks, through. """ try: - root = os.path.expanduser(options['path']) + path = os.path.expanduser(options['path']) except KeyError as e: raise ValueError( "missing required property '{}' in function '{}'".format( e.args[0], name)) - includes = _check_pattern_list(options.get('include'), 'include', - default=['**']) - excludes = _check_pattern_list(options.get('exclude'), 'exclude', - default=[]) + if not os.path.isabs(path): + path = os.path.abspath(os.path.join(get_config_directory(), path)) - logger.debug('lambda: processing function %s', name) + if path.endswith('.zip') and os.path.isfile(path): + logging.debug('lambda: using prebuilt zip: %s', path) - # os.path.join will ignore other parameters if the right-most one is an - # absolute path, which is exactly what we want. - if not os.path.isabs(root): - root = os.path.abspath(os.path.join(get_config_directory(), root)) - zip_contents, content_hash = _zip_from_file_patterns(root, - includes, - excludes, - follow_symlinks) + return _upload_prebuilt_zip(s3_conn, bucket, prefix, name, options, + path, payload_acl) + elif os.path.isdir(path): + logging.debug('lambda: building from directory: %s', path) - return _upload_code(s3_conn, bucket, prefix, name, zip_contents, - content_hash, payload_acl) + return _build_and_upload_zip(s3_conn, bucket, prefix, name, options, + path, follow_symlinks, payload_acl) + else: + raise ValueError('Path must be an existing ZIP file or directory') def select_bucket_region(custom_bucket, hook_region, stacker_bucket_region, @@ -400,14 +441,16 @@ def upload_lambda_functions(context, provider, **kwargs): * path (str): - Base directory of the Lambda function payload content. + Base directory or path of a ZIP file of the Lambda function + payload content. + If it not an absolute path, it will be considered relative to the directory containing the stacker configuration file in use. - Files in this directory will be added to the payload ZIP, - according to the include and exclude patterns. If not - patterns are provided, all files in this directory + When a directory, files contained will be added to the + payload ZIP, according to the include and exclude patterns. + If not patterns are provided, all files in the directory (respecting default exclusions) will be used. Files are stored in the archive with path names relative to @@ -415,6 +458,12 @@ def upload_lambda_functions(context, provider, **kwargs): directly under this directory will be added to the root of the ZIP file. + When a ZIP file, it will be uploaded directly to S3. + The hash of whole ZIP file will be used as the version key + by default, which may cause spurious rebuilds when building + the ZIP in different environments. To avoid that, + explicitly provide a `version` option. + * include(str or list[str], optional): Pattern or list of patterns of files to include in the @@ -433,6 +482,15 @@ def upload_lambda_functions(context, provider, **kwargs): such as ``.git``, ``.svn``, ``__pycache__``, ``*.pyc``, ``.gitignore``, etc. + * version(str, optional): + Value to use as the version for the current function, which + will be used to determine if a payload already exists in + S3. The value can be any string, such as a version number + or a git commit. + + Note that when setting this value, to re-build/re-upload a + payload you must change the version manually. + Examples: .. Hook configuration. .. code-block:: yaml diff --git a/stacker/tests/hooks/test_aws_lambda.py b/stacker/tests/hooks/test_aws_lambda.py index 67acc934d..6c2bc948f 100644 --- a/stacker/tests/hooks/test_aws_lambda.py +++ b/stacker/tests/hooks/test_aws_lambda.py @@ -6,7 +6,6 @@ from builtins import range import os.path import os -import unittest import mock import random from io import BytesIO as StringIO @@ -14,481 +13,528 @@ import boto3 import botocore -from troposphere.awslambda import Code +import pytest from moto import mock_s3 -from testfixtures import TempDirectory, ShouldRaise, compare +from troposphere.awslambda import Code -from stacker.context import Context -from stacker.config import Config from stacker.hooks.aws_lambda import ( - upload_lambda_functions, ZIP_PERMS_MASK, _calculate_hash, select_bucket_region, + upload_lambda_functions, ) -from ..factories import mock_provider +from ..factories import mock_context, mock_provider REGION = "us-east-1" -ALL_FILES = ( - 'f1/f1.py', - 'f1/f1.pyc', - 'f1/__init__.py', - 'f1/test/__init__.py', - 'f1/test/f1.py', - 'f1/test/f1.pyc', - 'f1/test2/test.txt', - 'f2/f2.js' -) -F1_FILES = [p[3:] for p in ALL_FILES if p.startswith('f1')] -F2_FILES = [p[3:] for p in ALL_FILES if p.startswith('f2')] - - -class TestLambdaHooks(unittest.TestCase): - @classmethod - def temp_directory_with_files(cls, files=ALL_FILES): - d = TempDirectory() - for f in files: - d.write(f, b'') - return d - - @property - def s3(self): - if not hasattr(self, '_s3'): - self._s3 = boto3.client('s3', region_name=REGION) - return self._s3 - - def assert_s3_zip_file_list(self, bucket, key, files): - object_info = self.s3.get_object(Bucket=bucket, Key=key) - zip_data = StringIO(object_info['Body'].read()) - - found_files = set() - with ZipFile(zip_data, 'r') as zip_file: - for zip_info in zip_file.infolist(): - perms = (zip_info.external_attr & ZIP_PERMS_MASK) >> 16 - self.assertIn(perms, (0o755, 0o644), - 'ZIP member permission must be 755 or 644') - found_files.add(zip_info.filename) - - compare(found_files, set(files)) - - def assert_s3_bucket(self, bucket, present=True): - try: - self.s3.head_bucket(Bucket=bucket) - if not present: - self.fail('s3: bucket {} should not exist'.format(bucket)) - except botocore.exceptions.ClientError as e: - if e.response['Error']['Code'] == '404': - if present: - self.fail('s3: bucket {} does not exist'.format(bucket)) - - def setUp(self): - self.context = Context( - config=Config({'namespace': 'test', 'stacker_bucket': 'test'})) - self.provider = mock_provider(region="us-east-1") - - def run_hook(self, **kwargs): - real_kwargs = { - 'context': self.context, - 'provider': self.provider, - } - real_kwargs.update(kwargs) - return upload_lambda_functions(**real_kwargs) - @mock_s3 - def test_bucket_default(self): - self.assertIsNotNone( - self.run_hook(functions={})) +@pytest.fixture +def all_files(tmpdir): + files = ( + 'f1/f1.py', + 'f1/f1.pyc', + 'f1/__init__.py', + 'f1/test/__init__.py', + 'f1/test/f1.py', + 'f1/test/f1.pyc', + 'f1/test2/test.txt', + 'f2/f2.js' + ) - self.assert_s3_bucket('test') + def create(): + for file in files: + f = tmpdir.join(file) + f.write(b'', ensure=True) + yield f - @mock_s3 - def test_bucket_custom(self): - self.assertIsNotNone( - self.run_hook(bucket='custom', functions={})) + return list(create()) - self.assert_s3_bucket('test', present=False) - self.assert_s3_bucket('custom') - @mock_s3 - def test_prefix(self): - with self.temp_directory_with_files() as d: - results = self.run_hook(prefix='cloudformation-custom-resources/', - functions={ - 'MyFunction': { - 'path': d.path + '/f1' - } - }) +@pytest.fixture +def f1_files(tmpdir, all_files): + return [p for p in all_files if p.relto(tmpdir).startswith('f1')] - self.assertIsNotNone(results) - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, F1_FILES) - self.assertTrue(code.S3Key.startswith( - 'cloudformation-custom-resources/lambda-MyFunction-')) - - @mock_s3 - def test_prefix_missing(self): - with self.temp_directory_with_files() as d: - results = self.run_hook(functions={ +@pytest.fixture +def f2_files(tmpdir, all_files): + return [p for p in all_files if p.relto(tmpdir).startswith('f2')] + + +@pytest.fixture(scope='package') +def prebuilt_zip(stacker_fixture_dir): + path = stacker_fixture_dir.join('test.zip') + content = path.read_binary() + md5 = 'c6fb602d9bde5a522856adabe9949f63' + return dict(path=path, md5=md5, contents=content) + + +@pytest.fixture(autouse=True) +def s3(): + with mock_s3(): + yield boto3.client('s3', region_name=REGION) + + +def assert_s3_zip_file_list(s3, bucket, key, files, root=None): + object_info = s3.get_object(Bucket=bucket, Key=key) + zip_data = StringIO(object_info['Body'].read()) + + expected_files = set() + for f in files: + rel_path = os.path.relpath(str(f), str(root)) if root else str(f) + expected_files.add(rel_path) + + found_files = set() + with ZipFile(zip_data, 'r') as zip_file: + for zip_info in zip_file.infolist(): + perms = (zip_info.external_attr & ZIP_PERMS_MASK) >> 16 + assert perms in (0o755, 0o644) + found_files.add(zip_info.filename) + + assert found_files == set(expected_files) + + +def assert_s3_zip_contents(s3, bucket, key, contents): + object_info = s3.get_object(Bucket=bucket, Key=key) + zip_data = object_info['Body'].read() + + assert zip_data == contents + + +def assert_s3_bucket(s3, bucket, present=True): + try: + s3.head_bucket(Bucket=bucket) + except botocore.exceptions.ClientError as e: + if e.response['Error']['Code'] == '404': + if present: + pytest.fail('s3: bucket {} does not exist'.format(bucket)) + else: + raise + else: + if not present: + pytest.fail('s3: bucket {} should not exist'.format(bucket)) + + +@pytest.fixture +def context(): + return mock_context() + + +@pytest.fixture +def provider(): + return mock_provider(region=REGION) + + +@pytest.fixture +def run_hook(context, provider): + def run(**kwargs): + return upload_lambda_functions(context=context, provider=provider, + **kwargs) + + return run + + +def test_bucket_default(s3, context, run_hook): + result = run_hook(functions={}) + assert result is not None + + assert_s3_bucket(s3, context.bucket_name, present=True) + + +def test_bucket_custom(s3, context, run_hook): + result = run_hook(bucket='custom', functions={}) + assert result is not None + + assert_s3_bucket(s3, context.bucket_name, present=False) + assert_s3_bucket(s3, 'custom', present=True) + + +def test_prefix(tmpdir, s3, all_files, f1_files, run_hook): + root = tmpdir.join('f1') + results = run_hook( + prefix='cloudformation-custom-resources/', + functions={ + 'MyFunction': { + 'path': str(root) + } + }) + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, f1_files, root=root) + assert code.S3Key.startswith( + 'cloudformation-custom-resources/lambda-MyFunction-') + + +def test_prefix_missing(tmpdir, s3, all_files, f1_files, run_hook): + root = tmpdir.join('f1') + results = run_hook( + functions={ + 'MyFunction': { + 'path': str(root) + } + } + ) + + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, f1_files, + root=root) + assert code.S3Key.startswith('lambda-MyFunction-') + + +def test_path_missing(run_hook): + msg = "missing required property 'path' in function 'MyFunction'" + with pytest.raises(ValueError, match=msg): + run_hook( + functions={ 'MyFunction': { - 'path': d.path + '/f1' } - }) + } + ) - self.assertIsNotNone(results) - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, F1_FILES) - self.assertTrue(code.S3Key.startswith('lambda-MyFunction-')) - - @mock_s3 - def test_path_missing(self): - msg = "missing required property 'path' in function 'MyFunction'" - with ShouldRaise(ValueError(msg)): - self.run_hook(functions={ +def test_path_non_zip_non_dir(tmpdir, all_files, run_hook): + root = tmpdir + msg = 'Path must be an existing ZIP file or directory' + with pytest.raises(ValueError, match=msg): + run_hook( + functions={ 'MyFunction': { + 'path': str(root.join('test.txt')) } - }) + } + ) - @mock_s3 - def test_path_relative(self): - get_config_directory = 'stacker.hooks.aws_lambda.get_config_directory' - with self.temp_directory_with_files(['test/test.py']) as d, \ - mock.patch(get_config_directory) as m1: - m1.return_value = d.path - results = self.run_hook(functions={ +def test_path_relative(tmpdir, s3, run_hook): + root = tmpdir + root.join('test/test.py').write(b'', ensure=True) + + get_config_directory = 'stacker.hooks.aws_lambda.get_config_directory' + with mock.patch(get_config_directory, return_value=str(root)): + results = run_hook( + functions={ 'MyFunction': { 'path': 'test' } - }) + } + ) - self.assertIsNotNone(results) + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, ['test.py']) - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, ['test.py']) - @mock_s3 - def test_path_home_relative(self): - test_path = '~/test' +def test_path_home_relative(tmpdir, s3, run_hook): + root = tmpdir + test_path = '~/test' - orig_expanduser = os.path.expanduser - with self.temp_directory_with_files(['test.py']) as d, \ - mock.patch('os.path.expanduser') as m1: - m1.side_effect = lambda p: (d.path if p == test_path - else orig_expanduser(p)) + orig_expanduser = os.path.expanduser + tmpdir.join('test.py').write(b'') - results = self.run_hook(functions={ + def expanduser(path): + return str(root) if path == test_path else orig_expanduser(path) + + with mock.patch('os.path.expanduser', side_effect=expanduser): + results = run_hook( + functions={ 'MyFunction': { 'path': test_path } - }) + } + ) - self.assertIsNotNone(results) + assert results is not None - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, ['test.py']) + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, ['test.py']) - @mock_s3 - def test_multiple_functions(self): - with self.temp_directory_with_files() as d: - results = self.run_hook(functions={ - 'MyFunction': { - 'path': d.path + '/f1' - }, - 'OtherFunction': { - 'path': d.path + '/f2' - } - }) - self.assertIsNotNone(results) +def test_multiple_functions(tmpdir, s3, all_files, f1_files, f2_files, + run_hook): + root1 = tmpdir.join('f1') + root2 = tmpdir.join('f2') + results = run_hook( + functions={ + 'MyFunction': { + 'path': str(root1) + }, + 'OtherFunction': { + 'path': str(root2) + } + } + ) + + assert results is not None + + f1_code = results.get('MyFunction') + assert isinstance(f1_code, Code) + assert_s3_zip_file_list(s3, f1_code.S3Bucket, f1_code.S3Key, f1_files, + root=root1) - f1_code = results.get('MyFunction') - self.assertIsInstance(f1_code, Code) - self.assert_s3_zip_file_list(f1_code.S3Bucket, f1_code.S3Key, F1_FILES) + f2_code = results.get('OtherFunction') + assert isinstance(f2_code, Code) + assert_s3_zip_file_list(s3, f2_code.S3Bucket, f2_code.S3Key, f2_files, + root=root2) - f2_code = results.get('OtherFunction') - self.assertIsInstance(f2_code, Code) - self.assert_s3_zip_file_list(f2_code.S3Bucket, f2_code.S3Key, F2_FILES) - @mock_s3 - def test_patterns_invalid(self): - msg = ("Invalid file patterns in key 'include': must be a string or " - 'list of strings') +def test_patterns_invalid(tmpdir, run_hook): + root = tmpdir - with ShouldRaise(ValueError(msg)): - self.run_hook(functions={ + msg = ("Invalid file patterns in key 'include': must be a string or " + 'list of strings') + with pytest.raises(ValueError, match=msg): + run_hook( + functions={ 'MyFunction': { - 'path': 'test', + 'path': str(root), 'include': {'invalid': 'invalid'} } - }) + } + ) - @mock_s3 - def test_patterns_include(self): - with self.temp_directory_with_files() as d: - results = self.run_hook(functions={ - 'MyFunction': { - 'path': d.path + '/f1', - 'include': ['*.py', 'test2/'] - } - }) - self.assertIsNotNone(results) +def test_patterns_include(tmpdir, s3, all_files, run_hook): + root = tmpdir.join('f1') + results = run_hook( + functions={ + 'MyFunction': { + 'path': str(root), + 'include': ['*.py', 'test2/'] + } + } + ) + + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, [ + 'f1.py', + '__init__.py', + 'test/__init__.py', + 'test/f1.py', + 'test2/test.txt' + ]) + + +def test_patterns_exclude(tmpdir, s3, all_files, run_hook): + root = tmpdir.join('f1') + results = run_hook( + functions={ + 'MyFunction': { + 'path': str(root), + 'exclude': ['*.pyc', 'test/'] + } + } + ) + + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, [ + 'f1.py', + '__init__.py', + 'test2/test.txt' + ]) + + +@mock_s3 +def test_patterns_include_exclude(tmpdir, s3, all_files, run_hook): + root = tmpdir.join('f1') + results = run_hook(functions={ + 'MyFunction': { + 'path': str(root), + 'include': '*.py', + 'exclude': 'test/' + } + }) - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [ - 'f1.py', - '__init__.py', - 'test/__init__.py', - 'test/f1.py', - 'test2/test.txt' - ]) - - @mock_s3 - def test_patterns_exclude(self): - with self.temp_directory_with_files() as d: - results = self.run_hook(functions={ - 'MyFunction': { - 'path': d.path + '/f1', - 'exclude': ['*.pyc', 'test/'] - } - }) + assert results is not None - self.assertIsNotNone(results) + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, [ + 'f1.py', + '__init__.py' + ]) - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [ - 'f1.py', - '__init__.py', - 'test2/test.txt' - ]) - - @mock_s3 - def test_patterns_include_exclude(self): - with self.temp_directory_with_files() as d: - results = self.run_hook(functions={ + +def test_patterns_exclude_all(tmpdir, all_files, run_hook): + root = tmpdir.join('f1') + + msg = ('Empty list of files for Lambda payload. Check your ' + 'include/exclude options for errors.') + with pytest.raises(RuntimeError, match=msg): + run_hook( + functions={ 'MyFunction': { - 'path': d.path + '/f1', - 'include': '*.py', - 'exclude': 'test/' + 'path': str(root), + 'exclude': ['**'] } - }) + } + ) - self.assertIsNotNone(results) - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [ - 'f1.py', - '__init__.py' - ]) +def test_idempotence(tmpdir, s3, all_files, run_hook): + root = tmpdir.join('f1') - @mock_s3 - def test_patterns_exclude_all(self): - msg = ('Empty list of files for Lambda payload. Check your ' - 'include/exclude options for errors.') + bucket_name = 'test' + functions = { + 'MyFunction': { + 'path': str(root) + } + } - with self.temp_directory_with_files() as d, \ - ShouldRaise(RuntimeError(msg)): + s3.create_bucket(Bucket=bucket_name) - results = self.run_hook(functions={ - 'MyFunction': { - 'path': d.path + '/f1', - 'exclude': ['**'] - } - }) + previous = None + for i in range(2): + results = run_hook(bucket=bucket_name, functions=functions) + assert results is not None - self.assertIsNone(results) + code = results.get('MyFunction') + assert isinstance(code, Code) + + if not previous: + previous = code.S3Key + continue + + assert previous == code.S3Key + + +def test_calculate_hash(tmpdir, all_files, f1_files, f2_files): + root = tmpdir + + all_hash_1 = _calculate_hash(map(str, all_files), str(root)) + all_hash_2 = _calculate_hash(map(str, all_files), str(root)) + f1_hash = _calculate_hash(map(str, f1_files), str(root)) + f2_hash = _calculate_hash(map(str, f2_files), str(root)) + + assert all_hash_1 == all_hash_2 + assert f1_hash != all_hash_1 + assert f2_hash != all_hash_1 + assert f1_hash != f2_hash + + +def test_calculate_hash_diff_filename_same_contents(tmpdir, all_files): + root = tmpdir + + files = all_files[:2] + tmpdir.join(files[0]).write('data', ensure=True) + tmpdir.join(files[1]).write('data', ensure=True) + + hash1 = _calculate_hash([str(files[0])], str(root)) + hash2 = _calculate_hash([str(files[1])], str(root)) + + assert hash1 != hash2 + + +def test_calculate_hash_different_ordering(tmpdir, all_files): + root = tmpdir + + all_files_diff_order = random.sample(all_files, k=len(all_files)) + hash1 = _calculate_hash(map(str, all_files), str(root)) + hash2 = _calculate_hash(map(str, all_files_diff_order), str(root)) + assert hash1 == hash2 + + +@pytest.mark.parametrize( + 'case', + [ + dict( + custom_bucket="myBucket", + hook_region="us-east-1", + stacker_bucket_region="us-west-1", + provider_region="eu-west-1", + result="us-east-1" + ), + dict( + custom_bucket="myBucket", + hook_region=None, + stacker_bucket_region="us-west-1", + provider_region="eu-west-1", + result="eu-west-1"), + dict( + custom_bucket=None, + hook_region="us-east-1", + stacker_bucket_region="us-west-1", + provider_region="eu-west-1", + result="us-west-1"), + dict( + custom_bucket=None, + hook_region="us-east-1", + stacker_bucket_region=None, + provider_region="eu-west-1", + result="eu-west-1") + ] +) +def test_select_bucket_region(case): + result = case.pop('result') + assert select_bucket_region(**case) == result - @mock_s3 - def test_idempotence(self): - bucket_name = 'test' - with self.temp_directory_with_files() as d: - functions = { +def test_follow_symlink_nonbool(run_hook): + msg = "follow_symlinks option must be a boolean" + with pytest.raises(ValueError, match=msg): + run_hook( + follow_symlinks="raiseValueError", + functions={ 'MyFunction': { - 'path': d.path + '/f1' } } + ) - self.s3.create_bucket(Bucket=bucket_name) - - previous = None - for i in range(2): - results = self.run_hook(bucket=bucket_name, - functions=functions) - self.assertIsNotNone(results) - - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - - if not previous: - previous = code.S3Key - continue - - compare(previous, code.S3Key, - prefix="zipfile name should not be modified in " - "repeated runs.") - - def test_calculate_hash(self): - with self.temp_directory_with_files() as d1: - root = d1.path - hash1 = _calculate_hash(ALL_FILES, root) - - with self.temp_directory_with_files() as d2: - root = d2.path - hash2 = _calculate_hash(ALL_FILES, root) - - with self.temp_directory_with_files() as d3: - root = d3.path - with open(os.path.join(root, ALL_FILES[0]), "w") as fd: - fd.write("modified file data") - hash3 = _calculate_hash(ALL_FILES, root) - - self.assertEqual(hash1, hash2) - self.assertNotEqual(hash1, hash3) - self.assertNotEqual(hash2, hash3) - - def test_calculate_hash_diff_filename_same_contents(self): - files = ["file1.txt", "f2/file2.txt"] - file1, file2 = files - with TempDirectory() as d: - root = d.path - for fname in files: - d.write(fname, b"data") - hash1 = _calculate_hash([file1], root) - hash2 = _calculate_hash([file2], root) - self.assertNotEqual(hash1, hash2) - - def test_calculate_hash_different_ordering(self): - files1 = ALL_FILES - files2 = random.sample(ALL_FILES, k=len(ALL_FILES)) - with TempDirectory() as d1: - root1 = d1.path - for fname in files1: - d1.write(fname, b"") - with TempDirectory() as d2: - root2 = d2.path - for fname in files2: - d2.write(fname, b"") - hash1 = _calculate_hash(files1, root1) - hash2 = _calculate_hash(files2, root2) - self.assertEqual(hash1, hash2) - - def test_select_bucket_region(self): - tests = ( - (("myBucket", "us-east-1", "us-west-1", "eu-west-1"), "us-east-1"), - (("myBucket", None, "us-west-1", "eu-west-1"), "eu-west-1"), - ((None, "us-east-1", "us-west-1", "eu-west-1"), "us-west-1"), - ((None, "us-east-1", None, "eu-west-1"), "eu-west-1"), - ) +@pytest.fixture +def linked_dir(tmpdir): + linked_dir = tmpdir.join('linked') + linked_dir.mksymlinkto(tmpdir.join('f1')) + return linked_dir - for args, result in tests: - self.assertEqual(select_bucket_region(*args), result) - @mock_s3 - def test_follow_symlink_nonbool(self): - msg = "follow_symlinks option must be a boolean" - with ShouldRaise(ValueError(msg)): - self.run_hook(follow_symlinks="raiseValueError", functions={ - 'MyFunction': { - } - }) - - @mock_s3 - def test_follow_symlink_true(self): - # Testing if symlinks are followed - with self.temp_directory_with_files() as d1: - root1 = d1.path - with self.temp_directory_with_files() as d2: - root2 = d2.path - os.symlink(root1 + "/f1", root2 + "/f3") - results = self.run_hook(follow_symlinks=True, functions={ - 'MyFunction': { - 'path': root2} - }) - self.assertIsNotNone(results) - - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [ - 'f1/f1.py', - 'f1/__init__.py', - 'f1/f1.pyc', - 'f1/test/__init__.py', - 'f1/test/f1.py', - 'f1/test/f1.pyc', - 'f1/test2/test.txt', - 'f2/f2.js', - 'f3/__init__.py', - 'f3/f1.py', - 'f3/f1.pyc', - 'f3/test/__init__.py', - 'f3/test/f1.py', - 'f3/test/f1.pyc', - 'f3/test2/test.txt' - ]) - - @mock_s3 - def test_follow_symlink_false(self): - # testing if syminks are present and not folllowed - with self.temp_directory_with_files() as d1: - root1 = d1.path - with self.temp_directory_with_files() as d2: - root2 = d2.path - os.symlink(root1 + "/f1", root2 + "/f3") - results = self.run_hook(follow_symlinks=False, functions={ - 'MyFunction': { - 'path': root2} - }) - self.assertIsNotNone(results) - - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [ - 'f1/f1.py', - 'f1/__init__.py', - 'f1/f1.pyc', - 'f1/test/__init__.py', - 'f1/test/f1.py', - 'f1/test/f1.pyc', - 'f1/test2/test.txt', - 'f2/f2.js', - ]) - - @mock_s3 - def test_follow_symlink_omitted(self): - # same as test_follow_symlink_false, but default behaivor - with self.temp_directory_with_files() as d1: - root1 = d1.path - with self.temp_directory_with_files() as d2: - root2 = d2.path - os.symlink(root1 + "/f1", root2 + "/f3") - results = self.run_hook(functions={ - 'MyFunction': { - 'path': root2} - }) - self.assertIsNotNone(results) - - code = results.get('MyFunction') - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, [ - 'f1/f1.py', - 'f1/__init__.py', - 'f1/f1.pyc', - 'f1/test/__init__.py', - 'f1/test/f1.py', - 'f1/test/f1.pyc', - 'f1/test2/test.txt', - 'f2/f2.js', - ]) +def test_follow_symlink_true(tmpdir, s3, all_files, f1_files, run_hook, + linked_dir): + root = tmpdir + results = run_hook( + follow_symlinks=True, + functions={ + 'MyFunction': { + 'path': str(root) + } + } + ) + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + + linked_files = [p for p in linked_dir.visit() if p.check(file=1)] + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, + all_files + linked_files, root=tmpdir) + + +def test_follow_symlink_false(tmpdir, s3, all_files, run_hook, linked_dir): + root = tmpdir + results = run_hook( + follow_symlinks=False, + functions={ + 'MyFunction': { + 'path': str(root) + } + } + ) + assert results is not None + + code = results.get('MyFunction') + assert isinstance(code, Code) + assert_s3_zip_file_list(s3, code.S3Bucket, code.S3Key, all_files, + root=tmpdir) From 515494145ac5363be6dd70ec5326c1729cc8e32a Mon Sep 17 00:00:00 2001 From: Troy Ready Date: Thu, 16 May 2019 09:15:46 -0700 Subject: [PATCH 40/74] add CAPABILITY_AUTO_EXPAND capability for macros (#731) Without this capability, creating a stack with a macro results in the following error: An error occurred (InsufficientCapabilitiesException) when calling the CreateStack operation: Requires capabilities : [CAPABILITY_AUTO_EXPAND] --- stacker/providers/aws/default.py | 3 ++- stacker/tests/providers/aws/test_default.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/stacker/providers/aws/default.py b/stacker/providers/aws/default.py index 808531346..aef3fd21b 100644 --- a/stacker/providers/aws/default.py +++ b/stacker/providers/aws/default.py @@ -56,7 +56,8 @@ MAX_TAIL_RETRIES = 15 TAIL_RETRY_SLEEP = 1 GET_EVENTS_SLEEP = 1 -DEFAULT_CAPABILITIES = ["CAPABILITY_NAMED_IAM", ] +DEFAULT_CAPABILITIES = ["CAPABILITY_NAMED_IAM", + "CAPABILITY_AUTO_EXPAND"] def get_cloudformation_client(session): diff --git a/stacker/tests/providers/aws/test_default.py b/stacker/tests/providers/aws/test_default.py index 10dc5577c..7eba91947 100644 --- a/stacker/tests/providers/aws/test_default.py +++ b/stacker/tests/providers/aws/test_default.py @@ -105,6 +105,7 @@ def generate_change_set_response(status, execution_status="AVAILABLE", ], "Capabilities": [ "CAPABILITY_NAMED_IAM", + "CAPABILITY_AUTO_EXPAND" ], "Tags": [ { From 3c14aa4ffdf1334d2ea3f92977e8b1ec8129f401 Mon Sep 17 00:00:00 2001 From: mromaszewicz Date: Wed, 24 Jul 2019 21:57:21 -0700 Subject: [PATCH 41/74] Fix IAM test errors due to missing version (#741) Moto is requiring policy documents to be at least version 2012-10-17, so add that property. --- stacker/hooks/iam.py | 1 + 1 file changed, 1 insertion(+) diff --git a/stacker/hooks/iam.py b/stacker/hooks/iam.py index 009888157..f04b51f28 100644 --- a/stacker/hooks/iam.py +++ b/stacker/hooks/iam.py @@ -46,6 +46,7 @@ def create_ecs_service_role(provider, context, **kwargs): raise policy = Policy( + Version='2012-10-17', Statement=[ Statement( Effect=Allow, From 8278add4f5c989949f789771b38caf8750b45272 Mon Sep 17 00:00:00 2001 From: Adam McElwee Date: Thu, 15 Aug 2019 11:23:38 -0500 Subject: [PATCH 42/74] Ensure that base64 lookup codec encodes the bytes object as a string (#742) --- CHANGELOG.md | 2 ++ stacker/lookups/handlers/file.py | 2 +- stacker/tests/lookups/handlers/test_file.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0024c839d..a1b899499 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,7 @@ ## Upcoming release +- Ensure that base64 lookup codec encodes the bytes object as a string [GH-742] + ## 1.7.0 (2019-04-07) - Additional ECS unit tests [GH-696] diff --git a/stacker/lookups/handlers/file.py b/stacker/lookups/handlers/file.py index 0eb87e74c..8c3e74eef 100644 --- a/stacker/lookups/handlers/file.py +++ b/stacker/lookups/handlers/file.py @@ -218,7 +218,7 @@ def json_codec(raw, parameterized=False): CODECS = { "plain": lambda x: x, - "base64": lambda x: base64.b64encode(x.encode('utf8')), + "base64": lambda x: base64.b64encode(x.encode('utf8')).decode('utf-8'), "parameterized": lambda x: parameterized_codec(x, False), "parameterized-b64": lambda x: parameterized_codec(x, True), "yaml": lambda x: yaml_codec(x, parameterized=False), diff --git a/stacker/tests/lookups/handlers/test_file.py b/stacker/tests/lookups/handlers/test_file.py index 5fb27b809..157aa122d 100644 --- a/stacker/tests/lookups/handlers/test_file.py +++ b/stacker/tests/lookups/handlers/test_file.py @@ -125,7 +125,7 @@ def test_handler_plain(self, _): @mock.patch('stacker.lookups.handlers.file.read_value_from_path') def test_handler_b64(self, content_mock): plain = u'Hello, world' - encoded = base64.b64encode(plain.encode('utf8')) + encoded = base64.b64encode(plain.encode('utf8')).decode('utf-8') content_mock.return_value = plain out = FileLookup.handle(u'base64:file://tmp/test') From 925341bf9c3a3fae50ecb47a29868896adfee252 Mon Sep 17 00:00:00 2001 From: Russell Ballestrini Date: Mon, 14 Oct 2019 08:16:58 -0400 Subject: [PATCH 43/74] Update README.rst --- README.rst | 50 +++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 41 insertions(+), 9 deletions(-) diff --git a/README.rst b/README.rst index 87c5f8e4e..019fda6d6 100644 --- a/README.rst +++ b/README.rst @@ -40,7 +40,7 @@ most templates is to keep them as generic as possible and then use configuration to modify them. At Remind we use stacker to manage all of our Cloudformation stacks - -both in development, staging and production without any major issues. +both in development, staging, and production without any major issues. Requirements ============ @@ -51,15 +51,47 @@ Requirements Stacker Command =============== -The stacker command is built to have sub-commands, much like git. Currently the -commands are: +The ``stacker`` command has sub-commands, similar to git. -- ``build`` which handles taking your stack config and then launching or - updating stacks as necessary. -- ``destroy`` which tears down your stacks -- ``diff`` which compares your currently deployed stack templates to your - config files -- ``info`` which prints information about your currently deployed stacks +Here are some examples: + + ``build``: + handles taking your stack config and then launching or updating stacks as necessary. + + ``destroy``: + tears down your stacks + + ``diff``: + compares your currently deployed stack templates to your config files + + ``info``: + prints information about your currently deployed stacks + +We document these sub-commands in full along with others, in the documentation. + + +Getting Started +=============== + +``stacker_cookiecutter``: https://github.com/cloudtools/stacker_cookiecutter + + We recommend creating your base `stacker` project using ``stacker_cookiecutter``. + This tool will install all the needed dependencies as well as the directory + structures and files. The files it produces are well documented with comments + to help people new to ``stacker``. + +``stacker_blueprints``: https://github.com/cloudtools/stacker_blueprints + + This repository holds working examples of ``stacker`` blueprints. + Each blueprint works in isolation and may be referenced, extended, or + copied into your project files. The blueprints are written in Python + and use the troposphere_ library. + +``stacker reference documentation``: + + We document all functionality and features of stacker in our extensive + reference documentation located at readthedocs_. + Docker ====== From 71ad3d44aa61e676574f4e8bdd86807031b3f2c5 Mon Sep 17 00:00:00 2001 From: Russell Ballestrini Date: Mon, 14 Oct 2019 08:23:19 -0400 Subject: [PATCH 44/74] Update README.rst --- README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 019fda6d6..b43a8fc95 100644 --- a/README.rst +++ b/README.rst @@ -76,9 +76,9 @@ Getting Started ``stacker_cookiecutter``: https://github.com/cloudtools/stacker_cookiecutter We recommend creating your base `stacker` project using ``stacker_cookiecutter``. - This tool will install all the needed dependencies as well as the directory - structures and files. The files it produces are well documented with comments - to help people new to ``stacker``. + This tool will install all the needed dependencies and created the project + directory structure and files. The resulting files are well documented + with comments to explain their purpose and examples on how to extend. ``stacker_blueprints``: https://github.com/cloudtools/stacker_blueprints From 638db4795caa77912d11f7ea0eb42ee0d8fa7f59 Mon Sep 17 00:00:00 2001 From: "Eric J. Holmes" Date: Fri, 15 Nov 2019 15:52:30 -0800 Subject: [PATCH 45/74] Add link to AWS OSS Blog post --- README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.rst b/README.rst index b43a8fc95..ae3e2e97d 100644 --- a/README.rst +++ b/README.rst @@ -91,6 +91,10 @@ Getting Started We document all functionality and features of stacker in our extensive reference documentation located at readthedocs_. + +``AWS OSS Blog``: https://aws.amazon.com/blogs/opensource/using-aws-codepipeline-and-open-source-tools-for-at-scale-infrastructure-deployment/ + + The AWS OSS Blog has a getting started guide using stacker with AWS CodePipeline. Docker From 7fc9648bcc01284715876e815d04fcf9f63ca0d9 Mon Sep 17 00:00:00 2001 From: mromaszewicz Date: Tue, 19 Nov 2019 20:51:27 -0800 Subject: [PATCH 46/74] Locked stacks still have requirements (#746) This fixes 745. Locked stacks still have dependencies, since there is no difference between a locked stack and an unlocked stack at creation time. --- stacker/stack.py | 6 ------ stacker/tests/test_plan.py | 24 ++++++++++++++++++++++++ stacker/tests/test_stack.py | 26 -------------------------- 3 files changed, 24 insertions(+), 32 deletions(-) diff --git a/stacker/stack.py b/stacker/stack.py index aa5ab81b4..950fcd548 100644 --- a/stacker/stack.py +++ b/stacker/stack.py @@ -85,12 +85,6 @@ def required_by(self): @property def requires(self): - # By definition, a locked stack has no dependencies, because we won't - # be performing an update operation on the stack. This means, resolving - # outputs from dependencies is unnecessary. - if self.locked and not self.force: - return [] - requires = set(self.definition.requires or []) # Add any dependencies based on output lookups diff --git a/stacker/tests/test_plan.py b/stacker/tests/test_plan.py index a88c5e460..dda72569b 100644 --- a/stacker/tests/test_plan.py +++ b/stacker/tests/test_plan.py @@ -115,6 +115,30 @@ def fn(stack, status=None): self.assertEquals(calls, ['namespace-vpc.1', 'namespace-bastion.1']) + def test_execute_plan_locked(self): + # Locked stacks still need to have their requires evaluated when + # they're being created. + vpc = Stack( + definition=generate_definition('vpc', 1), + context=self.context) + bastion = Stack( + definition=generate_definition('bastion', 1, requires=[vpc.name]), + locked=True, + context=self.context) + + calls = [] + + def fn(stack, status=None): + calls.append(stack.fqn) + return COMPLETE + + graph = build_graph([Step(vpc, fn), Step(bastion, fn)]) + plan = build_plan( + description="Test", graph=graph) + plan.execute(walk) + + self.assertEquals(calls, ['namespace-vpc.1', 'namespace-bastion.1']) + def test_execute_plan_filtered(self): vpc = Stack( definition=generate_definition('vpc', 1), diff --git a/stacker/tests/test_stack.py b/stacker/tests/test_stack.py index c1bba0156..ccdab6622 100644 --- a/stacker/tests/test_stack.py +++ b/stacker/tests/test_stack.py @@ -49,32 +49,6 @@ def test_stack_requires(self): stack.requires, ) - def test_stack_requires_when_locked(self): - definition = generate_definition( - base_name="vpc", - stack_id=1, - variables={ - "Var1": "${noop fakeStack3::FakeOutput}", - "Var2": ( - "some.template.value:${output fakeStack2::FakeOutput}:" - "${output fakeStack::FakeOutput}" - ), - "Var3": "${output fakeStack::FakeOutput}," - "${output fakeStack2::FakeOutput}", - }, - requires=["fakeStack"], - ) - stack = Stack(definition=definition, context=self.context) - - stack.locked = True - self.assertEqual(len(stack.requires), 0) - - # TODO(ejholmes): When the stack is in `--force`, it's not really - # locked. Maybe it would be better if `stack.locked` were false when - # the stack is in `--force`. - stack.force = True - self.assertEqual(len(stack.requires), 2) - def test_stack_requires_circular_ref(self): definition = generate_definition( base_name="vpc", From 106ddf31675618308aeceeb9826ad0d93e53d323 Mon Sep 17 00:00:00 2001 From: Kyle Finley Date: Sun, 9 Feb 2020 13:25:42 -0800 Subject: [PATCH 47/74] change diff to use CFN change sets instead of comparing template dicts (#744) * provider: add method to create, print, and delete a changeset for diff * refactor diff command to use stack changeset rather than diff text * provider: cleanup changeset temp stack, improve full output * update providers/aws/test_default - remove invalidated diff tests - fixing aws provider tests - add aws provider test for my changes - linting fix * blueprints: add method for retrieving output definitions * add output handling for dependent stacks * move most of logic to provider, fix issues with blueprints and rxref * update tests for additions * update docs * update changelog --- CHANGELOG.md | 1 + docs/commands.rst | 17 +- stacker/actions/diff.py | 115 +--------- stacker/blueprints/base.py | 12 + stacker/blueprints/raw.py | 11 + stacker/providers/aws/default.py | 192 ++++++++++++++-- stacker/tests/actions/test_diff.py | 76 ------ stacker/tests/providers/aws/test_default.py | 241 +++++++++++++++++++- 8 files changed, 451 insertions(+), 214 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a1b899499..7711f3765 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ ## Upcoming release - Ensure that base64 lookup codec encodes the bytes object as a string [GH-742] +- Use CloudFormation Change Sets for `stacker diff` ## 1.7.0 (2019-04-07) diff --git a/docs/commands.rst b/docs/commands.rst index 9c906223b..b90ff40b4 100644 --- a/docs/commands.rst +++ b/docs/commands.rst @@ -103,7 +103,7 @@ already been destroyed). config The config file where stack configuration is located. Must be in yaml format. If `-` is provided, then the config will be read from stdin. - + optional arguments: -h, --help show this help message and exit -e ENV=VALUE, --env ENV=VALUE @@ -182,10 +182,17 @@ config. Diff ---- -Diff attempts to show the differences between what stacker expects to push up -into CloudFormation, and what already exists in CloudFormation. This command -is not perfect, as following things like *Ref* and *GetAtt* are not currently -possible, but it should give a good idea if anything has changed. +Diff creates a CloudFormation Change Set for each stack and displays the +resulting changes. This works for stacks that already exist and new stacks. + +For stacks that are dependent on outputs from other stacks in the same file, +stacker will infer that an update was made to the "parent" stack and invalidate +outputs from resources that were changed and replace their value with +````. This is done to +illustrate the potential blast radius of a change and assist in tracking down +why subsequent stacks could change. This inference is not perfect but takes a +"best effort" approach to showing potential change between stacks that rely on +each others outputs. :: diff --git a/stacker/actions/diff.py b/stacker/actions/diff.py index 97801ae7d..64ac74de8 100644 --- a/stacker/actions/diff.py +++ b/stacker/actions/diff.py @@ -3,16 +3,12 @@ from __future__ import absolute_import from builtins import str from builtins import object -import difflib -import json import logging from operator import attrgetter from .base import plan, build_walker from . import build -from ..ui import ui from .. import exceptions -from ..util import parse_cloudformation_template from ..status import ( NotSubmittedStatus, NotUpdatedStatus, @@ -148,49 +144,6 @@ def diff_parameters(old_params, new_params): return diff -def normalize_json(template): - """Normalize our template for diffing. - - Args: - template(str): string representing the template - - Returns: - list: json representation of the parameters - """ - obj = parse_cloudformation_template(template) - json_str = json.dumps( - obj, sort_keys=True, indent=4, default=str, separators=(',', ': '), - ) - result = [] - lines = json_str.split("\n") - for line in lines: - result.append(line + "\n") - return result - - -def build_stack_changes(stack_name, new_stack, old_stack, new_params, - old_params): - """Builds a list of strings to represent the the parameters (if changed) - and stack diff""" - from_file = "old_%s" % (stack_name,) - to_file = "new_%s" % (stack_name,) - lines = difflib.context_diff( - old_stack, new_stack, - fromfile=from_file, tofile=to_file, - n=7) # ensure at least a few lines of context are displayed afterward - - template_changes = list(lines) - log_lines = [] - if not template_changes: - log_lines.append("*** No changes to template ***") - param_diffs = diff_parameters(old_params, new_params) - if param_diffs: - log_lines.append(format_params_diff(param_diffs)) - if template_changes: - log_lines.append("".join(template_changes)) - return log_lines - - class Action(build.Action): """ Responsible for diff'ing CF stacks in AWS and on disk @@ -198,25 +151,10 @@ class Action(build.Action): are determined automatically based on references to output values from other stacks). - The plan is then used to pull the current CloudFormation template from - AWS and compare it to the generated templated based on the current - config. + The plan is then used to create a changeset for a stack using a + generated template based on the current config. """ - def _build_new_template(self, stack, parameters): - """Constructs the parameters & contents of a new stack and returns a - list(str) representation to be output to the user - """ - log_lines = ["New template parameters:"] - for param in sorted(parameters, - key=lambda param: param['ParameterKey']): - log_lines.append("%s = %s" % (param['ParameterKey'], - param['ParameterValue'])) - - log_lines.append("\nNew template contents:") - log_lines.append("".join(stack)) - return log_lines - def _diff_stack(self, stack, **kwargs): """Handles the diffing a stack in CloudFormation vs our config""" if self.cancel.wait(0): @@ -229,51 +167,18 @@ def _diff_stack(self, stack, **kwargs): return NotUpdatedStatus() provider = self.build_provider(stack) - - provider_stack = provider.get_stack(stack.fqn) - - # get the current stack template & params from AWS - try: - [old_template, old_params] = provider.get_stack_info( - provider_stack) - except exceptions.StackDoesNotExist: - old_template = None - old_params = {} + tags = build.build_stack_tags(stack) stack.resolve(self.context, provider) - # generate our own template & params parameters = self.build_parameters(stack) - new_params = dict() - for p in parameters: - new_params[p['ParameterKey']] = p['ParameterValue'] - new_template = stack.blueprint.rendered - new_stack = normalize_json(new_template) - - output = ["============== Stack: %s ==============" % (stack.name,)] - # If this is a completely new template dump our params & stack - if not old_template: - output.extend(self._build_new_template(new_stack, parameters)) - else: - # Diff our old & new stack/parameters - old_template = parse_cloudformation_template(old_template) - if isinstance(old_template, str): - # YAML templates returned from CFN need parsing again - # "AWSTemplateFormatVersion: \"2010-09-09\"\nParam..." - # -> - # AWSTemplateFormatVersion: "2010-09-09" - old_template = parse_cloudformation_template(old_template) - old_stack = normalize_json( - json.dumps(old_template, - sort_keys=True, - indent=4, - default=str) - ) - output.extend(build_stack_changes(stack.name, new_stack, old_stack, - new_params, old_params)) - ui.info('\n' + '\n'.join(output)) - stack.set_outputs( - provider.get_output_dict(provider_stack)) + try: + outputs = provider.get_stack_changes( + stack, self._template(stack.blueprint), parameters, tags + ) + stack.set_outputs(outputs) + except exceptions.StackDidNotChange: + logger.info('No changes: %s', stack.fqn) return COMPLETE diff --git a/stacker/blueprints/base.py b/stacker/blueprints/base.py index 1c32acc88..ec1cd52a7 100644 --- a/stacker/blueprints/base.py +++ b/stacker/blueprints/base.py @@ -339,6 +339,18 @@ def get_parameter_definitions(self): output[var_name] = cfn_attrs return output + def get_output_definitions(self): + """Gets the output definitions. + + Returns: + dict: output definitions. Keys are output names, the values + are dicts containing key/values for various output + properties. + + """ + return {k: output.to_dict() for k, output in + self.template.outputs.items()} + def get_required_parameter_definitions(self): """Returns all template parameters that do not have a default value. diff --git a/stacker/blueprints/raw.py b/stacker/blueprints/raw.py index 2a5f1c444..28c1c31ae 100644 --- a/stacker/blueprints/raw.py +++ b/stacker/blueprints/raw.py @@ -137,6 +137,17 @@ def get_parameter_definitions(self): """ return get_template_params(self.to_dict()) + def get_output_definitions(self): + """Gets the output definitions. + + Returns: + dict: output definitions. Keys are output names, the values + are dicts containing key/values for various output + properties. + + """ + return self.to_dict().get('Outputs', {}) + def resolve_variables(self, provided_variables): """Resolve the values of the blueprint variables. diff --git a/stacker/providers/aws/default.py b/stacker/providers/aws/default.py index aef3fd21b..cbd49814e 100644 --- a/stacker/providers/aws/default.py +++ b/stacker/providers/aws/default.py @@ -21,6 +21,7 @@ from ..base import BaseProvider from ... import exceptions from ...ui import ui +from ...util import parse_cloudformation_template from stacker.session_cache import get_session from ...actions.diff import ( @@ -144,8 +145,49 @@ def requires_replacement(changeset): "Replacement", False) == "True"] +def output_full_changeset(full_changeset=None, params_diff=None, + answer=None, fqn=None): + """Optionally output full changeset. + + Args: + full_changeset (list, optional): A list of the full changeset that will + be output if the user specifies verbose. + params_diff (list, optional): A list of DictValue detailing the + differences between two parameters returned by + :func:`stacker.actions.diff.diff_dictionaries` + answer (str, optional): predetermined answer to the prompt if it has + already been answered or inferred. + fqn (str, optional): fully qualified name of the stack. + + """ + if not answer: + answer = ui.ask('Show full change set? [y/n] ').lower() + if answer == 'n': + return + if answer in ['y', 'v']: + if fqn: + msg = '%s full changeset' % (fqn) + else: + msg = 'Full changeset' + if params_diff: + logger.info( + "%s:\n\n%s\n%s", + msg, + format_params_diff(params_diff), + yaml.safe_dump(full_changeset), + ) + else: + logger.info( + "%s:\n%s", + msg, + yaml.safe_dump(full_changeset), + ) + return + raise exceptions.CancelExecution + + def ask_for_approval(full_changeset=None, params_diff=None, - include_verbose=False): + include_verbose=False, fqn=None): """Prompt the user for approval to execute a change set. Args: @@ -155,7 +197,8 @@ def ask_for_approval(full_changeset=None, params_diff=None, differences between two parameters returned by :func:`stacker.actions.diff.diff_dictionaries` include_verbose (bool, optional): Boolean for whether or not to include - the verbose option + the verbose option. + fqn (str): fully qualified name of the stack. """ approval_options = ['y', 'n'] @@ -166,18 +209,9 @@ def ask_for_approval(full_changeset=None, params_diff=None, '/'.join(approval_options))).lower() if include_verbose and approve == "v": - if params_diff: - logger.info( - "Full changeset:\n\n%s\n%s", - format_params_diff(params_diff), - yaml.safe_dump(full_changeset), - ) - else: - logger.info( - "Full changeset:\n%s", - yaml.safe_dump(full_changeset), - ) - return ask_for_approval() + output_full_changeset(full_changeset=full_changeset, + params_diff=params_diff, answer=approve, fqn=fqn) + return ask_for_approval(fqn=fqn) elif approve != "y": raise exceptions.CancelExecution @@ -544,9 +578,11 @@ class Provider(BaseProvider): RECREATION_STATUSES = ( "CREATE_FAILED", "ROLLBACK_FAILED", - "ROLLBACK_COMPLETE", + "ROLLBACK_COMPLETE" ) + REVIEW_STATUS = "REVIEW_IN_PROGRESS" + def __init__(self, session, region=None, interactive=False, replacements_only=False, recreate_failed=False, service_role=None, **kwargs): @@ -589,6 +625,9 @@ def is_stack_rolling_back(self, stack, **kwargs): def is_stack_failed(self, stack, **kwargs): return self.get_stack_status(stack) in self.FAILED_STATUSES + def is_stack_in_review(self, stack, **kwargs): + return self.get_stack_status(stack) == self.REVIEW_STATUS + def tail_stack(self, stack, cancel, log_func=None, **kwargs): def _log_func(e): event_args = [e['ResourceStatus'], e['ResourceType'], @@ -829,7 +868,7 @@ def prepare_stack_for_update(self, stack, tags): 'Proceed carefully!\n\n' % (stack_name, stack_status)) sys.stdout.flush() - ask_for_approval(include_verbose=False) + ask_for_approval(include_verbose=False, fqn=stack_name) logger.warn('Destroying stack \"%s\" for re-creation', stack_name) self.destroy_stack(stack) @@ -939,6 +978,7 @@ def interactive_update_stack(self, fqn, template, old_parameters, full_changeset=full_changeset, params_diff=params_diff, include_verbose=True, + fqn=fqn, ) finally: ui.unlock() @@ -1059,8 +1099,128 @@ def get_stack_info(self, stack): parameters = self.params_as_dict(stack.get('Parameters', [])) + if isinstance(template, str): # handle yaml templates + template = parse_cloudformation_template(template) + return [json.dumps(template), parameters] + def get_stack_changes(self, stack, template, parameters, + tags, **kwargs): + """Get the changes from a ChangeSet. + + Args: + stack (:class:`stacker.stack.Stack`): the stack to get changes + template (:class:`stacker.providers.base.Template`): A Template + object to compaired to. + parameters (list): A list of dictionaries that defines the + parameter list to be applied to the Cloudformation stack. + tags (list): A list of dictionaries that defines the tags + that should be applied to the Cloudformation stack. + + Returns: + dict: Stack outputs with inferred changes. + + """ + try: + stack_details = self.get_stack(stack.fqn) + # handling for orphaned changeset temp stacks + if self.get_stack_status( + stack_details) == self.REVIEW_STATUS: + raise exceptions.StackDoesNotExist(stack.fqn) + _old_template, old_params = self.get_stack_info( + stack_details + ) + old_template = parse_cloudformation_template(_old_template) + change_type = 'UPDATE' + except exceptions.StackDoesNotExist: + old_params = {} + old_template = {} + change_type = 'CREATE' + + changes, change_set_id = create_change_set( + self.cloudformation, stack.fqn, template, parameters, tags, + change_type, service_role=self.service_role, **kwargs + ) + new_parameters_as_dict = self.params_as_dict( + [x + if 'ParameterValue' in x + else {'ParameterKey': x['ParameterKey'], + 'ParameterValue': old_params[x['ParameterKey']]} + for x in parameters] + ) + params_diff = diff_parameters(old_params, new_parameters_as_dict) + + if changes or params_diff: + ui.lock() + try: + if self.interactive: + output_summary(stack.fqn, 'changes', changes, + params_diff, + replacements_only=self.replacements_only) + output_full_changeset(full_changeset=changes, + params_diff=params_diff, + fqn=stack.fqn) + else: + output_full_changeset(full_changeset=changes, + params_diff=params_diff, + answer='y', fqn=stack.fqn) + finally: + ui.unlock() + + self.cloudformation.delete_change_set( + ChangeSetName=change_set_id + ) + + # ensure current stack outputs are loaded + self.get_outputs(stack.fqn) + + # infer which outputs may have changed + refs_to_invalidate = [] + for change in changes: + resc_change = change.get('ResourceChange', {}) + if resc_change.get('Type') == 'Add': + continue # we don't care about anything new + # scope of changes that can invalidate a change + if resc_change and (resc_change.get('Replacement') == 'True' or + 'Properties' in resc_change['Scope']): + logger.debug('%s added to invalidation list for %s', + resc_change['LogicalResourceId'], stack.fqn) + refs_to_invalidate.append(resc_change['LogicalResourceId']) + + # invalidate cached outputs with inferred changes + for output, props in old_template.get('Outputs', {}).items(): + if any(r in str(props['Value']) for r in refs_to_invalidate): + self._outputs[stack.fqn].pop(output) + logger.debug('Removed %s from the outputs of %s', + output, stack.fqn) + + # push values for new + invalidated outputs to outputs + for output_name, output_params in \ + stack.blueprint.get_output_definitions().items(): + if output_name not in self._outputs[stack.fqn]: + self._outputs[stack.fqn][output_name] = ( + ''.format( + stack.fqn, output_name, + str(output_params['Value']) + ) + ) + + # when creating a changeset for a new stack, CFN creates a temporary + # stack with a status of REVIEW_IN_PROGRESS. this is only removed if + # the changeset is executed or it is manually deleted. + if change_type == 'CREATE': + try: + temp_stack = self.get_stack(stack.fqn) + if self.is_stack_in_review(temp_stack): + logger.debug('Removing temporary stack that is created ' + 'with a ChangeSet of type "CREATE"') + self.destroy_stack(temp_stack) + except exceptions.StackDoesNotExist: + # not an issue if the stack was already cleaned up + logger.debug('Stack does not exist: %s', stack.fqn) + + return self.get_outputs(stack.fqn) + @staticmethod def params_as_dict(parameters_list): parameters = dict() diff --git a/stacker/tests/actions/test_diff.py b/stacker/tests/actions/test_diff.py index dc1e8245a..10963a8bf 100644 --- a/stacker/tests/actions/test_diff.py +++ b/stacker/tests/actions/test_diff.py @@ -1,14 +1,12 @@ from __future__ import print_function from __future__ import division from __future__ import absolute_import -import os import unittest from operator import attrgetter from stacker.actions.diff import ( diff_dictionaries, diff_parameters, - normalize_json, DictValue ) @@ -86,77 +84,3 @@ def test_diff_parameters_no_changes(self): param_diffs = diff_parameters(old_params, new_params) self.assertEquals(param_diffs, []) - - -class TestDiffFunctions(unittest.TestCase): - """Test functions in diff.""" - - def test_normalize_json(self): - """Ensure normalize_json parses yaml correctly.""" - with open(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), # noqa - 'fixtures', - 'cfn_template.yaml'), 'r') as yamlfile: - template = yamlfile.read() - normalized_template = [ - '{\n', - ' "AWSTemplateFormatVersion": "2010-09-09",\n', - ' "Description": "TestTemplate",\n', - ' "Outputs": {\n', - ' "DummyId": {\n', - ' "Value": "dummy-1234"\n', - ' }\n', - ' },\n', - ' "Parameters": {\n', - ' "Param1": {\n', - ' "Type": "String"\n', - ' },\n', - ' "Param2": {\n', - ' "Default": "default",\n', - ' "Type": "CommaDelimitedList"\n', - ' }\n', - ' },\n', - ' "Resources": {\n', - ' "Bucket": {\n', - ' "Properties": {\n', - ' "BucketName": {\n', - ' "Fn::Join": [\n', - ' "-",\n', - ' [\n', - ' {\n', - ' "Ref": "AWS::StackName"\n', - ' },\n', - ' {\n', - ' "Ref": "AWS::Region"\n', - ' }\n', - ' ]\n', - ' ]\n', - ' }\n', - ' },\n', - ' "Type": "AWS::S3::Bucket"\n', - ' },\n', - ' "Dummy": {\n', - ' "Type": "AWS::CloudFormation::WaitConditionHandle"\n', - ' }\n', - ' }\n', - '}\n' - ] - self.assertEquals(normalized_template, normalize_json(template)) - - def test_normalize_json_date(self): - """Ensure normalize_json handles objects loaded as datetime objects""" - - template = """ -AWSTemplateFormatVersion: '2010-09-09' -Description: ECS Cluster Application -Resources: - ECSTaskRoleDefault: - Type: AWS::IAM::Role - Properties: - AssumeRolePolicyDocument: - Version: 2012-10-17 # datetime.date(2012, 10, 17) - Statement: - - Effect: Allow - Principal: - Service: ecs-tasks.amazonaws.com - Action: sts:AssumeRole""" - self.assertTrue(normalize_json(template)) diff --git a/stacker/tests/providers/aws/test_default.py b/stacker/tests/providers/aws/test_default.py index 7eba91947..83a54a8f8 100644 --- a/stacker/tests/providers/aws/test_default.py +++ b/stacker/tests/providers/aws/test_default.py @@ -4,6 +4,7 @@ from builtins import range import copy from datetime import datetime +import os.path import random import string import threading @@ -31,6 +32,7 @@ create_change_set, summarize_params_diff, generate_cloudformation_args, + output_full_changeset ) from stacker import exceptions @@ -59,12 +61,40 @@ def generate_describe_stacks_stack(stack_name, tags = tags or [] return { "StackName": stack_name, + "StackId": stack_name, "CreationTime": creation_time or datetime(2015, 1, 1), "StackStatus": stack_status, "Tags": tags } +def generate_get_template(file_name='cfn_template.json', + stages_available=['Original']): + fixture_dir = os.path.join(os.path.dirname(__file__), '../../fixtures') + with open(os.path.join(fixture_dir, file_name), 'r') as f: + return { + "StagesAvailable": stages_available, + "TemplateBody": f.read() + } + + +def generate_stack_object(stack_name, outputs=None): + mock_stack = MagicMock(['name', 'fqn', 'blueprint']) + if not outputs: + outputs = { + "FakeOutput": { + "Value": {"Ref": "FakeResource"} + } + } + mock_stack.name = stack_name + mock_stack.fqn = stack_name + mock_stack.blueprint = MagicMock(['get_output_definitions']) + mock_stack.blueprint.get_output_definitions = MagicMock( + return_value=outputs + ) + return mock_stack + + def generate_resource_change(replacement=True): resource_change = { "Action": "Modify", @@ -195,8 +225,7 @@ def test_summarize_params_diff(self): self.assertEqual(summarize_params_diff(only_removed_params_diff), "Parameters Removed: ParamD\n") - @patch("stacker.providers.aws.default.format_params_diff") - def test_ask_for_approval(self, patched_format): + def test_ask_for_approval(self): get_input_path = "stacker.ui.get_raw_input" with patch(get_input_path, return_value="y"): self.assertIsNone(ask_for_approval([], [], None)) @@ -207,16 +236,15 @@ def test_ask_for_approval(self, patched_format): ask_for_approval([], []) with patch(get_input_path, side_effect=["v", "n"]) as mock_get_input: - with patch("yaml.safe_dump") as mock_safe_dump: + with patch( + "stacker.providers.aws.default.output_full_changeset" + ) as mock_full_changeset: with self.assertRaises(exceptions.CancelExecution): ask_for_approval([], [], True) - self.assertEqual(mock_safe_dump.call_count, 1) + self.assertEqual(mock_full_changeset.call_count, 1) self.assertEqual(mock_get_input.call_count, 2) - self.assertEqual(patched_format.call_count, 0) - - @patch("stacker.providers.aws.default.format_params_diff") - def test_ask_for_approval_with_params_diff(self, patched_format): + def test_ask_for_approval_with_params_diff(self): get_input_path = "stacker.ui.get_raw_input" params_diff = [ DictValue('ParamA', None, 'new-param-value'), @@ -231,12 +259,47 @@ def test_ask_for_approval_with_params_diff(self, patched_format): ask_for_approval([], params_diff) with patch(get_input_path, side_effect=["v", "n"]) as mock_get_input: - with patch("yaml.safe_dump") as mock_safe_dump: + with patch( + "stacker.providers.aws.default.output_full_changeset" + ) as mock_full_changeset: with self.assertRaises(exceptions.CancelExecution): ask_for_approval([], params_diff, True) - self.assertEqual(mock_safe_dump.call_count, 1) + self.assertEqual(mock_full_changeset.call_count, 1) self.assertEqual(mock_get_input.call_count, 2) + @patch("stacker.providers.aws.default.format_params_diff") + @patch('stacker.providers.aws.default.yaml.safe_dump') + def test_output_full_changeset(self, mock_safe_dump, patched_format): + get_input_path = "stacker.ui.get_raw_input" + + safe_dump_counter = 0 + + for v in ['y', 'v', 'Y', 'V']: + with patch(get_input_path, return_value=v) as prompt: + self.assertIsNone(output_full_changeset(full_changeset=[], + params_diff=[], + fqn=None)) + self.assertEqual(prompt.call_count, 1) + safe_dump_counter += 1 + self.assertEqual(mock_safe_dump.call_count, safe_dump_counter) + self.assertEqual(patched_format.call_count, 0) + + for v in ['n', 'N']: + with patch(get_input_path, return_value=v) as prompt: + output_full_changeset(full_changeset=[], params_diff=[], + answer=None, fqn=None) + self.assertEqual(prompt.call_count, 1) + self.assertEqual(mock_safe_dump.call_count, safe_dump_counter) + self.assertEqual(patched_format.call_count, 0) + + with self.assertRaises(exceptions.CancelExecution): + output_full_changeset(full_changeset=[], params_diff=[], + answer='x', fqn=None) + + output_full_changeset(full_changeset=[], params_diff=['mock'], + answer='y', fqn=None) + safe_dump_counter += 1 + self.assertEqual(mock_safe_dump.call_count, safe_dump_counter) self.assertEqual(patched_format.call_count, 1) def test_wait_till_change_set_complete_success(self): @@ -566,6 +629,110 @@ def test_noninteractive_changeset_update_with_stack_policy(self): parameters=[], stack_policy=Template(body="{}"), tags=[], ) + @patch('stacker.providers.aws.default.output_full_changeset') + def test_get_stack_changes_update(self, mock_output_full_cs): + stack_name = "MockStack" + mock_stack = generate_stack_object(stack_name) + + self.stubber.add_response( + 'describe_stacks', + {'Stacks': [generate_describe_stacks_stack(stack_name)]} + ) + self.stubber.add_response( + 'get_template', + generate_get_template('cfn_template.yaml') + ) + self.stubber.add_response( + "create_change_set", + {'Id': 'CHANGESETID', 'StackId': stack_name} + ) + changes = [] + changes.append(generate_change()) + + self.stubber.add_response( + "describe_change_set", + generate_change_set_response( + status="CREATE_COMPLETE", execution_status="AVAILABLE", + changes=changes, + ) + ) + self.stubber.add_response("delete_change_set", {}) + self.stubber.add_response( + 'describe_stacks', + {'Stacks': [generate_describe_stacks_stack(stack_name)]} + ) + + with self.stubber: + result = self.provider.get_stack_changes( + stack=mock_stack, template=Template( + url="http://fake.template.url.com/" + ), parameters=[], tags=[]) + + mock_output_full_cs.assert_called_with(full_changeset=changes, + params_diff=[], + fqn=stack_name, + answer='y') + expected_outputs = { + 'FakeOutput': ''.format( + str({"Ref": "FakeResource"}) + ) + } + self.assertEqual(self.provider.get_outputs(stack_name), + expected_outputs) + self.assertEqual(result, expected_outputs) + + @patch('stacker.providers.aws.default.output_full_changeset') + def test_get_stack_changes_create(self, mock_output_full_cs): + stack_name = "MockStack" + mock_stack = generate_stack_object(stack_name) + + self.stubber.add_response( + 'describe_stacks', + {'Stacks': [generate_describe_stacks_stack( + stack_name, stack_status='REVIEW_IN_PROGRESS' + )]} + ) + self.stubber.add_response( + "create_change_set", + {'Id': 'CHANGESETID', 'StackId': stack_name} + ) + changes = [] + changes.append(generate_change()) + + self.stubber.add_response( + "describe_change_set", + generate_change_set_response( + status="CREATE_COMPLETE", execution_status="AVAILABLE", + changes=changes, + ) + ) + self.stubber.add_response("delete_change_set", {}) + self.stubber.add_response( + 'describe_stacks', + {'Stacks': [generate_describe_stacks_stack( + stack_name, stack_status='REVIEW_IN_PROGRESS' + )]} + ) + self.stubber.add_response( + 'describe_stacks', + {'Stacks': [generate_describe_stacks_stack( + stack_name, stack_status='REVIEW_IN_PROGRESS' + )]} + ) + + self.stubber.add_response("delete_stack", {}) + + with self.stubber: + self.provider.get_stack_changes( + stack=mock_stack, template=Template( + url="http://fake.template.url.com/" + ), parameters=[], tags=[]) + + mock_output_full_cs.assert_called_with(full_changeset=changes, + params_diff=[], + fqn=stack_name, + answer='y') + def test_tail_stack_retry_on_missing_stack(self): stack_name = "SlowToCreateStack" stack = MagicMock(spec=Stack) @@ -698,7 +865,8 @@ def test_update_stack_execute_success_no_stack_policy(self, patched_approval.assert_called_with(full_changeset=changes, params_diff=[], - include_verbose=True) + include_verbose=True, + fqn=stack_name) self.assertEqual(patched_approval.call_count, 1) @@ -737,7 +905,8 @@ def test_update_stack_execute_success_with_stack_policy(self, patched_approval.assert_called_with(full_changeset=changes, params_diff=[], - include_verbose=True) + include_verbose=True, + fqn=stack_name) self.assertEqual(patched_approval.call_count, 1) @@ -758,3 +927,51 @@ def test_select_update_method(self): self.provider.select_update_method(**i[0]), i[1] ) + + @patch('stacker.providers.aws.default.output_full_changeset') + @patch('stacker.providers.aws.default.output_summary') + def test_get_stack_changes_interactive(self, mock_output_summary, + mock_output_full_cs): + stack_name = "MockStack" + mock_stack = generate_stack_object(stack_name) + + self.stubber.add_response( + 'describe_stacks', + {'Stacks': [generate_describe_stacks_stack(stack_name)]} + ) + self.stubber.add_response( + 'get_template', + generate_get_template('cfn_template.yaml') + ) + self.stubber.add_response( + "create_change_set", + {'Id': 'CHANGESETID', 'StackId': stack_name} + ) + changes = [] + changes.append(generate_change()) + + self.stubber.add_response( + "describe_change_set", + generate_change_set_response( + status="CREATE_COMPLETE", execution_status="AVAILABLE", + changes=changes, + ) + ) + self.stubber.add_response("delete_change_set", {}) + self.stubber.add_response( + 'describe_stacks', + {'Stacks': [generate_describe_stacks_stack(stack_name)]} + ) + + with self.stubber: + self.provider.get_stack_changes( + stack=mock_stack, template=Template( + url="http://fake.template.url.com/" + ), parameters=[], tags=[]) + + mock_output_summary.assert_called_with(stack_name, 'changes', + changes, [], + replacements_only=False) + mock_output_full_cs.assert_called_with(full_changeset=changes, + params_diff=[], + fqn=stack_name) From 46bb7fef3c307e14ce6d8c18051af6f5df93e51f Mon Sep 17 00:00:00 2001 From: fbattistella <53009741+fbattistella@users.noreply.github.com> Date: Mon, 17 Feb 2020 01:15:45 -0500 Subject: [PATCH 48/74] Doc Update: Sturdy -> Onica (#739) * sturdy -> onica sturdy joined onica a good while ago ;) * Update organizations_using_stacker.rst --- docs/organizations_using_stacker.rst | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/organizations_using_stacker.rst b/docs/organizations_using_stacker.rst index df625fb80..0faec3ca5 100644 --- a/docs/organizations_using_stacker.rst +++ b/docs/organizations_using_stacker.rst @@ -18,16 +18,16 @@ Remind_ .. _Remind: https://www.remind.com/ -`Sturdy Networks`_ +`Onica`_ - At Sturdy we design, architect, deploy, support custom SaaS applications, and - offer custom solutions for the Internet of Things in AWS. We are an Amazon - Web Services Advanced Consulting Partner that migrates new AWS customers from - on-prem to the cloud as well as offers guidance and Managed DevOps services - to customers already in AWS. - + Onica is a global technology consulting company at the forefront of + cloud computing. Through collaboration with Amazon Web Services, + we help customers embrace a broad spectrum of innovative solutions. + From migration strategy to operational excellence, cloud native + development, and immersive transformation. Onica is a full spectrum + AWS integrator. -.. _`Sturdy Networks`: https://sturdy.cloud +.. _`Onica`: https://www.onica.com AltoStack_ From 471cc28bbfea4049f51c83a7dd3b2f0c7e011904 Mon Sep 17 00:00:00 2001 From: mromaszewicz Date: Sun, 16 Feb 2020 22:20:00 -0800 Subject: [PATCH 49/74] Add YAML environment file support (#740) * Add YAML environment file support Environment files ending in .yaml or .yml will be rendered using a different strategy. The env file is treated as a dict, with the value for each key being a parsed YAML object. After parsing the config, we look for things that look like variable references, and replace the references with objects. * Fix lint errors * Fix more lint errors Now with the correct version of flake8 * Fix IAM test errors due to missing version Moto is requiring policy documents to be at least version 2012-10-17, so add that property. * Fix python 2.7 issues Python 2.7 strings and Python 3 strings work differently, and regular expression matching is different too. Rewrite in a way which works with both language versions. * Remove debug printout * Fix IAM test errors due to missing version (#741) Moto is requiring policy documents to be at least version 2012-10-17, so add that property. * Use case insensitive file match When checking for yaml suffixes on environment files, ignore case. * Add YAML environment file support Environment files ending in .yaml or .yml will be rendered using a different strategy. The env file is treated as a dict, with the value for each key being a parsed YAML object. After parsing the config, we look for things that look like variable references, and replace the references with objects. * Fix lint errors * Fix more lint errors Now with the correct version of flake8 * Fix python 2.7 issues Python 2.7 strings and Python 3 strings work differently, and regular expression matching is different too. Rewrite in a way which works with both language versions. * Remove debug printout * Use case insensitive file match When checking for yaml suffixes on environment files, ignore case. * Small documentation update Yaml config example should use .yml file suffix, not .env Co-authored-by: Marcin Romaszewicz <47459980+deepmap-marcinr@users.noreply.github.com> --- docs/environments.rst | 62 ++++++++++++- stacker/commands/stacker/base.py | 30 ++++-- stacker/config/__init__.py | 146 ++++++++++++++++++++++++++---- stacker/environment.py | 30 +++++- stacker/exceptions.py | 8 ++ stacker/tests/test_config.py | 84 ++++++++++++++++- stacker/tests/test_environment.py | 7 +- 7 files changed, 336 insertions(+), 31 deletions(-) diff --git a/docs/environments.rst b/docs/environments.rst index 420dd20a2..77ce1b87b 100644 --- a/docs/environments.rst +++ b/docs/environments.rst @@ -3,7 +3,15 @@ Environments ============ When running stacker, you can optionally provide an "environment" file. The -stacker config file will be interpolated as a `string.Template +environment file defines values, which can then be referred to by name from +your stack config file. The environment file is interpreted as YAML if it +ends in `.yaml` or `.yml`, otherwise it's interpreted as simple key/value +pairs. + +Key/Value environments +---------------------- + +The stacker config file will be interpolated as a `string.Template `_ using the key/value pairs from the environment file. The format of the file is a single key/value per line, separated by a colon (**:**), like this:: @@ -43,6 +51,58 @@ files in your config. For example:: variables: InstanceType: ${web_instance_type} +YAML environments +----------------- + +YAML environments allow for more complex environment configuration rather +than simple text substitution, and support YAML features like anchors and +references. To build on the example above, let's define a stack that's +a little more complex:: + + stacks: + - name: webservers + class_path: stacker_blueprints.asg.AutoscalingGroup + variables: + InstanceType: ${web_instance_type} + IngressCIDRsByPort: ${ingress_cidrs_by_port} + +We've defined a stack which expects a list of ingress CIDR's allowed access to +each port. Our environment files would look like this:: + + # in the file: stage.yml + web_instance_type: m3.medium + ingress_cidrs_by_port: + 80: + - 192.168.1.0/8 + 8080: + - 0.0.0.0/0 + + # in the file: prod.env + web_instance_type: c4.xlarge + ingress_cidrs_by_port: + 80: + - 192.168.1.0/8 + 443: + - 10.0.0.0/16 + - 10.1.0.0/16 + +The YAML format allows for specifying lists, maps, and supports all `pyyaml` +functionality allowed in `safe_load()` function. + +Variable substitution in the YAML case is a bit more complex than in the +`string.Template` case. Objects can only be substituted for variables in the +case where we perform a full substitution, such as this:: + + vpcID: ${vpc_variable} + +We can not substitute an object in a sub-string, such as this:: + + vpcID: prefix-${vpc_variable} + +It makes no sense to substitute a complex object in this case, and we will raise +an error if that happens. You can still perform this substitution with +primitives; numbers, strings, but not dicts or lists. + .. note:: Namespace defined in the environment file has been deprecated in favor of defining the namespace in the config and will be removed in a future release. diff --git a/stacker/commands/stacker/base.py b/stacker/commands/stacker/base.py index c3f2084d7..f49aa64b4 100644 --- a/stacker/commands/stacker/base.py +++ b/stacker/commands/stacker/base.py @@ -7,8 +7,13 @@ import signal from collections import Mapping import logging +import os.path -from ...environment import parse_environment +from ...environment import ( + DictWithSourceType, + parse_environment, + parse_yaml_environment +) logger = logging.getLogger(__name__) @@ -63,8 +68,14 @@ def key_value_arg(string): def environment_file(input_file): """Reads a stacker environment file and returns the resulting data.""" + + is_yaml = os.path.splitext(input_file)[1].lower() in ['.yaml', '.yml'] + with open(input_file) as fd: - return parse_environment(fd.read()) + if is_yaml: + return parse_yaml_environment(fd.read()) + else: + return parse_environment(fd.read()) class BaseCommand(object): @@ -158,12 +169,17 @@ def add_arguments(self, parser): "-v", "--verbose", action="count", default=0, help="Increase output verbosity. May be specified up to twice.") parser.add_argument( - "environment", type=environment_file, nargs='?', default={}, - help="Path to a simple `key: value` pair environment file. The " - "values in the environment file can be used in the stack " - "config as if it were a string.Template type: " + "environment", type=environment_file, nargs='?', + default=DictWithSourceType('simple'), + help="Path to an environment file. The file can be a simple " + "`key: value` pair environment file, or a YAML file ending in" + ".yaml or .yml. In the simple key:value case, values in the " + "environment file can be used in the stack config as if it " + "were a string.Template type: " "https://docs.python.org/2/library/" - "string.html#template-strings.") + "string.html#template-strings. In the YAML case, variable" + "references in the stack config are replaced with the objects" + "in the environment after parsing") parser.add_argument( "config", type=argparse.FileType(), help="The config file where stack configuration is located. Must " diff --git a/stacker/config/__init__.py b/stacker/config/__init__.py index 5fdde4162..4c2192b28 100644 --- a/stacker/config/__init__.py +++ b/stacker/config/__init__.py @@ -3,10 +3,12 @@ from __future__ import absolute_import from future import standard_library standard_library.install_aliases() +from past.types import basestring from builtins import str import copy import sys import logging +import re from string import Template from io import StringIO @@ -32,6 +34,7 @@ from ..lookups import register_lookup_handler from ..util import merge_map, yaml_to_ordered_dict, SourceProcessor from .. import exceptions +from ..environment import DictWithSourceType # register translators (yaml constructors) from .translators import * # NOQA @@ -83,33 +86,138 @@ def render(raw_config, environment=None): Args: raw_config (str): the raw stacker configuration string. - environment (dict, optional): any environment values that should be - passed to the config + environment (DictWithSourceType, optional): any environment values that + should be passed to the config Returns: str: the stacker configuration populated with any values passed from the environment """ - - t = Template(raw_config) - buff = StringIO() if not environment: environment = {} - try: - substituted = t.substitute(environment) - except KeyError as e: - raise exceptions.MissingEnvironment(e.args[0]) - except ValueError: - # Support "invalid" placeholders for lookup placeholders. - substituted = t.safe_substitute(environment) - - if not isinstance(substituted, str): - substituted = substituted.decode('utf-8') - - buff.write(substituted) - buff.seek(0) - return buff.read() + # If we have a naked dict, we got here through the old non-YAML path, so + # we can't have a YAML config file. + is_yaml = False + if type(environment) == DictWithSourceType: + is_yaml = environment.source_type == 'yaml' + + if is_yaml: + # First, read the config as yaml + config = yaml.safe_load(raw_config) + + # Next, we need to walk the yaml structure, and find all things which + # look like variable references. This regular expression is copied from + # string.template to match variable references identically as the + # simple configuration case below. We've got two cases of this pattern, + # since python 2.7 doesn't support re.fullmatch(), so we have to add + # the end of line anchor to the inner patterns. + idpattern = r'[_a-z][_a-z0-9]*' + pattern = r""" + %(delim)s(?: + (?P%(id)s) | # delimiter and a Python identifier + {(?P%(id)s)} # delimiter and a braced identifier + ) + """ % {'delim': re.escape('$'), + 'id': idpattern, + } + full_pattern = r""" + %(delim)s(?: + (?P%(id)s)$ | # delimiter and a Python identifier + {(?P%(id)s)}$ # delimiter and a braced identifier + ) + """ % {'delim': re.escape('$'), + 'id': idpattern, + } + exp = re.compile(pattern, re.IGNORECASE | re.VERBOSE) + full_exp = re.compile(full_pattern, re.IGNORECASE | re.VERBOSE) + new_config = substitute_references(config, environment, exp, full_exp) + # Now, re-encode the whole thing as YAML and return that. + return yaml.safe_dump(new_config) + else: + t = Template(raw_config) + buff = StringIO() + + try: + substituted = t.substitute(environment) + except KeyError as e: + raise exceptions.MissingEnvironment(e.args[0]) + except ValueError: + # Support "invalid" placeholders for lookup placeholders. + substituted = t.safe_substitute(environment) + + if not isinstance(substituted, str): + substituted = substituted.decode('utf-8') + + buff.write(substituted) + buff.seek(0) + return buff.read() + + +def substitute_references(root, environment, exp, full_exp): + # We need to check for something being a string in both python 2.7 and + # 3+. The aliases in the future package don't work for yaml sourced + # strings, so we have to spin our own. + def isstr(s): + try: + return isinstance(s, basestring) + except NameError: + return isinstance(s, str) + + if isinstance(root, list): + result = [] + for x in root: + result.append(substitute_references(x, environment, exp, full_exp)) + return result + elif isinstance(root, dict): + result = {} + for k, v in root.items(): + result[k] = substitute_references(v, environment, exp, full_exp) + return result + elif isstr(root): + # Strings are the special type where all substitutions happen. If we + # encounter a string object in the expression tree, we need to perform + # one of two different kinds of matches on it. First, if the entire + # string is a variable, we can replace it with an arbitrary object; + # dict, list, primitive. If the string contains variables within it, + # then we have to do string substitution. + match_obj = full_exp.match(root.strip()) + if match_obj: + matches = match_obj.groupdict() + var_name = matches['named'] or matches['braced'] + if var_name is not None: + value = environment.get(var_name) + if value is None: + raise exceptions.MissingEnvironment(var_name) + return value + + # Returns if an object is a basic type. Once again, the future package + # overrides don't work for string here, so we have to special case it + def is_basic_type(o): + if isstr(o): + return True + basic_types = [int, bool, float] + for t in basic_types: + if isinstance(o, t): + return True + return False + + # If we got here, then we didn't have any full matches, now perform + # partial substitutions within a string. + def replace(mo): + name = mo.groupdict()['braced'] or mo.groupdict()['named'] + if not name: + return root[mo.start():mo.end()] + val = environment.get(name) + if val is None: + raise exceptions.MissingEnvironment(name) + if not is_basic_type(val): + raise exceptions.WrongEnvironmentType(name) + return str(val) + value = exp.sub(replace, root) + return value + # In all other unhandled cases, return a copy of the input + return copy.copy(root) def parse(raw_config): diff --git a/stacker/environment.py b/stacker/environment.py index 4ac753611..e4a2be174 100644 --- a/stacker/environment.py +++ b/stacker/environment.py @@ -2,9 +2,27 @@ from __future__ import division from __future__ import absolute_import +import yaml + + +class DictWithSourceType(dict): + """An environment dict which keeps track of its source. + + Environment files may be loaded from simple key/value files, or from + structured YAML files, and we need to render them using a different + strategy based on their source. This class adds a source_type property + to a dict which keeps track of whether the source for the dict is + yaml or simple. + """ + def __init__(self, source_type, *args): + dict.__init__(self, args) + if source_type not in ['yaml', 'simple']: + raise ValueError('source_type must be yaml or simple') + self.source_type = source_type + def parse_environment(raw_environment): - environment = {} + environment = DictWithSourceType('simple') for line in raw_environment.split('\n'): line = line.strip() if not line: @@ -20,3 +38,13 @@ def parse_environment(raw_environment): environment[key] = value.strip() return environment + + +def parse_yaml_environment(raw_environment): + environment = DictWithSourceType('yaml') + parsed_env = yaml.safe_load(raw_environment) + + if type(parsed_env) != dict: + raise ValueError('Environment must be valid YAML') + environment.update(parsed_env) + return environment diff --git a/stacker/exceptions.py b/stacker/exceptions.py index e1ae8339f..e4b5f7939 100644 --- a/stacker/exceptions.py +++ b/stacker/exceptions.py @@ -158,6 +158,14 @@ def __init__(self, key, *args, **kwargs): super(MissingEnvironment, self).__init__(message, *args, **kwargs) +class WrongEnvironmentType(Exception): + + def __init__(self, key, *args, **kwargs): + self.key = key + message = "Environment key %s can't be merged into a string" % (key,) + super(WrongEnvironmentType, self).__init__(message, *args, **kwargs) + + class ImproperlyConfigured(Exception): def __init__(self, cls, error, *args, **kwargs): diff --git a/stacker/tests/test_config.py b/stacker/tests/test_config.py index 87876c0d2..9795784a2 100644 --- a/stacker/tests/test_config.py +++ b/stacker/tests/test_config.py @@ -4,6 +4,7 @@ from builtins import next import sys import unittest +import yaml from stacker.config import ( render_parse_load, @@ -14,7 +15,10 @@ process_remote_sources ) from stacker.config import Config, Stack -from stacker.environment import parse_environment +from stacker.environment import ( + parse_environment, + parse_yaml_environment +) from stacker import exceptions from stacker.lookups.registry import LOOKUP_HANDLERS @@ -49,6 +53,84 @@ def test_render_blank_env_values(self): c = render(conf, e) self.assertEqual("namespace: !!str", c) + def test_render_yaml(self): + conf = """ + namespace: ${namespace} + list_var: ${env_list} + dict_var: ${env_dict} + str_var: ${env_str} + nested_list: + - ${list_1} + - ${dict_1} + - ${str_1} + nested_dict: + a: ${list_1} + b: ${dict_1} + c: ${str_1} + empty: ${empty_string} + substr: prefix-${str_1}-suffix + multiple: ${str_1}-${str_2} + dont_match_this: ${output something} + """ + env = """ + namespace: test + env_list: &listAnchor + - a + - b + - c + env_dict: &dictAnchor + a: 1 + b: 2 + c: 3 + env_str: Hello World! + list_1: *listAnchor + dict_1: *dictAnchor + str_1: another str + str_2: hello + empty_string: "" + """ + e = parse_yaml_environment(env) + c = render(conf, e) + + # Parse the YAML again, so that we can check structure + pc = yaml.safe_load(c) + + exp_dict = {'a': 1, 'b': 2, 'c': 3} + exp_list = ['a', 'b', 'c'] + + self.assertEquals(pc['namespace'], 'test') + self.assertEquals(pc['list_var'], exp_list) + self.assertEquals(pc['dict_var'], exp_dict) + self.assertEquals(pc['str_var'], 'Hello World!') + self.assertEquals(pc['nested_list'][0], exp_list) + self.assertEquals(pc['nested_list'][1], exp_dict) + self.assertEquals(pc['nested_list'][2], 'another str') + self.assertEquals(pc['nested_dict']['a'], exp_list) + self.assertEquals(pc['nested_dict']['b'], exp_dict) + self.assertEquals(pc['nested_dict']['c'], 'another str') + self.assertEquals(pc['empty'], '') + self.assertEquals(pc['substr'], 'prefix-another str-suffix') + self.assertEquals(pc['multiple'], 'another str-hello') + self.assertEquals(pc['dont_match_this'], '${output something}') + + def test_render_yaml_errors(self): + # We shouldn't be able to substitute an object into a string + conf = "something: prefix-${var_name}" + env = """ + var_name: + foo: bar + """ + e = parse_yaml_environment(env) + with self.assertRaises(exceptions.WrongEnvironmentType): + render(conf, e) + + # Missing keys need to raise errors too + conf = "something: ${variable}" + env = "some_other_variable: 5" + e = parse_yaml_environment(env) + with self.assertRaises(exceptions.MissingEnvironment): + render(conf, e) + def test_config_validate_missing_stack_source(self): config = Config({ "namespace": "prod", diff --git a/stacker/tests/test_environment.py b/stacker/tests/test_environment.py index 1d8acd979..bed424333 100644 --- a/stacker/tests/test_environment.py +++ b/stacker/tests/test_environment.py @@ -3,7 +3,10 @@ from __future__ import absolute_import import unittest -from stacker.environment import parse_environment +from stacker.environment import ( + DictWithSourceType, + parse_environment +) test_env = """key1: value1 # some: comment @@ -31,7 +34,7 @@ class TestEnvironment(unittest.TestCase): def test_simple_key_value_parsing(self): parsed_env = parse_environment(test_env) - self.assertTrue(isinstance(parsed_env, dict)) + self.assertTrue(isinstance(parsed_env, DictWithSourceType)) self.assertEqual(parsed_env["key1"], "value1") self.assertEqual(parsed_env["key2"], "value2") self.assertEqual(parsed_env["key3"], "some:complex::value") From b6d311846f4ac04f8e7e0dfc100ae741e1098d90 Mon Sep 17 00:00:00 2001 From: Joseph Johansson Date: Sun, 16 Feb 2020 23:22:11 -0700 Subject: [PATCH 50/74] Fix ami lookup 'Name' key error (#734) * Fix ami lookup Name key error * updating changelog --- CHANGELOG.md | 1 + stacker/lookups/handlers/ami.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7711f3765..49d4ee270 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,5 @@ ## Upcoming release +- Fixing AMI lookup Key error on 'Name' - Ensure that base64 lookup codec encodes the bytes object as a string [GH-742] - Use CloudFormation Change Sets for `stacker diff` diff --git a/stacker/lookups/handlers/ami.py b/stacker/lookups/handlers/ami.py index 8d51c0619..fa4e660c8 100644 --- a/stacker/lookups/handlers/ami.py +++ b/stacker/lookups/handlers/ami.py @@ -93,7 +93,7 @@ def handle(cls, value, provider, **kwargs): key=operator.itemgetter('CreationDate'), reverse=True) for image in images: - if re.match("^%s$" % name_regex, image['Name']): + if re.match("^%s$" % name_regex, image.get('Name', '')): return image['ImageId'] raise ImageNotFound(value) From 9cf93c325a3bc513ad826a170af2b1bd10b9740a Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Mon, 17 Feb 2020 00:35:43 -0800 Subject: [PATCH 51/74] Fix jinja req and keypair tests --- setup.py | 2 +- stacker/tests/hooks/test_keypair.py | 2 ++ stacker/tests/lookups/handlers/test_kms.py | 36 ---------------------- 3 files changed, 3 insertions(+), 37 deletions(-) delete mode 100644 stacker/tests/lookups/handlers/test_kms.py diff --git a/setup.py b/setup.py index 3e6220c34..bf652531c 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ "PyYAML>=3.13b1", "awacs>=0.6.0", "gitpython>=2.0,<3.0", - "jinja2>=2.7,<3.0", + "jinja2>=2.7,<3.0a", "schematics>=2.0.1,<2.1.0", "formic2", "python-dateutil>=2.0,<3.0", diff --git a/stacker/tests/hooks/test_keypair.py b/stacker/tests/hooks/test_keypair.py index 49686c594..7858d0748 100644 --- a/stacker/tests/hooks/test_keypair.py +++ b/stacker/tests/hooks/test_keypair.py @@ -137,6 +137,8 @@ def test_create_in_ssm(provider, context, ssh_key, ssm_key_id): assert param_details['Description'] == \ 'SSH private key for KeyPair "{}" (generated by Stacker)'.format( KEY_PAIR_NAME) + # The default ssm key id + ssm_key_id = ssm_key_id or "alias/aws/ssm" assert param_details.get('KeyId') == ssm_key_id diff --git a/stacker/tests/lookups/handlers/test_kms.py b/stacker/tests/lookups/handlers/test_kms.py deleted file mode 100644 index bb199a639..000000000 --- a/stacker/tests/lookups/handlers/test_kms.py +++ /dev/null @@ -1,36 +0,0 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -import codecs -import unittest - -from moto import mock_kms - -import boto3 - -from stacker.lookups.handlers.kms import KmsLookup - - -class TestKMSHandler(unittest.TestCase): - def setUp(self): - self.plain = b"my secret" - with mock_kms(): - kms = boto3.client("kms", region_name="us-east-1") - self.secret = kms.encrypt( - KeyId="alias/stacker", - Plaintext=codecs.encode(self.plain, 'base64').decode('utf-8'), - )["CiphertextBlob"] - if isinstance(self.secret, bytes): - self.secret = self.secret.decode() - - def test_kms_handler(self): - with mock_kms(): - decrypted = KmsLookup.handle(self.secret) - self.assertEqual(decrypted, self.plain) - - def test_kms_handler_with_region(self): - region = "us-east-1" - value = "%s@%s" % (region, self.secret) - with mock_kms(): - decrypted = KmsLookup.handle(value) - self.assertEqual(decrypted, self.plain) From 98a1cbfc884bda4796999ab8d09a893504413e2c Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Mon, 17 Feb 2020 00:53:49 -0800 Subject: [PATCH 52/74] fix diff functional tests --- .../18_stacker_diff-simple_diff_with_output_lookups.bats | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_suite/18_stacker_diff-simple_diff_with_output_lookups.bats b/tests/test_suite/18_stacker_diff-simple_diff_with_output_lookups.bats index 65814a1ee..0bc9b0db6 100644 --- a/tests/test_suite/18_stacker_diff-simple_diff_with_output_lookups.bats +++ b/tests/test_suite/18_stacker_diff-simple_diff_with_output_lookups.bats @@ -41,6 +41,6 @@ EOF assert "$status" -eq 0 assert_has_line "\-InstanceType = m3.large" assert_has_line "+InstanceType = m3.xlarge" - assert_has_line "+ \"VPC1\": {" - assert_has_line "+ \"Type\": \"AWS::CloudFormation::WaitConditionHandle\"" + assert_has_line "LogicalResourceId: VPC1" + assert_has_line "ResourceType: AWS::CloudFormation::WaitConditionHandle" } From 722ba88bfa24c61f360d7ef21fc3b232960af16b Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Mon, 17 Feb 2020 01:34:57 -0800 Subject: [PATCH 53/74] Move template to an actual resource This fixes an issue with diffing based on changesets. Before we auto-diffed the parameters, even if it wouldn't result in a change in the actual stack. Now we require the stack to change before the diff actually triggers. This fixes the diff test for the raw template. --- stacker/tests/fixtures/cfn_template.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/stacker/tests/fixtures/cfn_template.json b/stacker/tests/fixtures/cfn_template.json index 623cbd662..ec30eced8 100644 --- a/stacker/tests/fixtures/cfn_template.json +++ b/stacker/tests/fixtures/cfn_template.json @@ -12,7 +12,10 @@ }, "Resources": { "Dummy": { - "Type": "AWS::CloudFormation::WaitConditionHandle" + "Type": "AWS::SNS::Topic", + "Properties": { + "DisplayName": {"Ref" : "Param1"} + } } }, "Outputs": { From 44a98ef39810ab6ae60291a81a383f2ccbd60099 Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Mon, 17 Feb 2020 01:47:32 -0800 Subject: [PATCH 54/74] Fix raw unit tests --- stacker/tests/blueprints/test_raw.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/stacker/tests/blueprints/test_raw.py b/stacker/tests/blueprints/test_raw.py index 9f93f7c82..4c18bb113 100644 --- a/stacker/tests/blueprints/test_raw.py +++ b/stacker/tests/blueprints/test_raw.py @@ -101,7 +101,10 @@ def test_to_json(self): }, "Resources": { "Dummy": { - "Type": "AWS::CloudFormation::WaitConditionHandle" + "Type": "AWS::SNS::Topic", + "Properties": { + "DisplayName": {"Ref": "Param1"} + } } }, "Outputs": { From c113f640e67fff6433c26f458c945080df1745ad Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Mon, 17 Feb 2020 11:18:06 -0800 Subject: [PATCH 55/74] Fix new SNS based functional tests --- stacker/tests/fixtures/mock_blueprints.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/stacker/tests/fixtures/mock_blueprints.py b/stacker/tests/fixtures/mock_blueprints.py index adfbd0f4c..519cde208 100644 --- a/stacker/tests/fixtures/mock_blueprints.py +++ b/stacker/tests/fixtures/mock_blueprints.py @@ -11,6 +11,7 @@ import awacs.cloudformation import awacs.iam import awacs.sts +import awacs.sns from troposphere.cloudformation import WaitCondition, WaitConditionHandle @@ -51,6 +52,9 @@ def create_template(self): cloudformation_scope = Sub( "arn:aws:cloudformation:*:${AWS::AccountId}:" "stack/${StackerNamespace}-*") + sns_scope = Sub( + "arn:aws:sns:*:${AWS::AccountId}:" + "${StackerNamespace}-*") changeset_scope = "*" # This represents the precise IAM permissions that stacker itself @@ -119,7 +123,17 @@ def create_template(self): awacs.cloudformation.DescribeStacks, awacs.cloudformation.DescribeStackEvents ] + ), + Statement( + Effect="Allow", + Resource=[sns_scope], + Action=[ + awacs.sns.CreateTopic, + awacs.sns.DeleteTopic, + awacs.sns.GetTopicAttributes + ] ) + ] ) ) From 5215e6a57aad90b0ce3c5f019bcd5fc2f329d1e5 Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Mon, 17 Feb 2020 15:13:28 -0800 Subject: [PATCH 56/74] Sleep for 20 seconds between tests to avoid rate limiting --- tests/test_helper.bash | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/test_helper.bash b/tests/test_helper.bash index 1d0d52194..26ec8f146 100644 --- a/tests/test_helper.bash +++ b/tests/test_helper.bash @@ -1,5 +1,8 @@ #!/usr/bin/env bash +# used to make sure we sleep for 20 seconds between tests to avoid rate limiting +sleep 20 + # To make the tests run faster, we don't wait between calls to DescribeStacks # to check on the status of Create/Update. export STACKER_STACK_POLL_TIME=0 From 13ec7a2eb03caf5c71cb127ad0b1ab8a59f60826 Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Mon, 17 Feb 2020 16:43:56 -0800 Subject: [PATCH 57/74] Twiddle with times to try and fix timeouts --- tests/test_helper.bash | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/test_helper.bash b/tests/test_helper.bash index 26ec8f146..fb9ff858d 100644 --- a/tests/test_helper.bash +++ b/tests/test_helper.bash @@ -1,11 +1,8 @@ #!/usr/bin/env bash -# used to make sure we sleep for 20 seconds between tests to avoid rate limiting -sleep 20 - # To make the tests run faster, we don't wait between calls to DescribeStacks # to check on the status of Create/Update. -export STACKER_STACK_POLL_TIME=0 +export STACKER_STACK_POLL_TIME=2 if [ -z "$STACKER_NAMESPACE" ]; then >&2 echo "To run these tests, you must set a STACKER_NAMESPACE environment variable" @@ -46,6 +43,8 @@ assert_has_line() { # information. If you need to execute the stacker binary _without_ calling # "run", you can use "command stacker". stacker() { + # Sleep between runs of stacker to try and avoid rate limiting issues. + sleep 2 echo "$ stacker $@" run command stacker "$@" echo "$output" From 9bbb391edc5f0866431a22b04d7ab309d7a27dad Mon Sep 17 00:00:00 2001 From: Dashiel Lopez Mendez Date: Wed, 26 Feb 2020 18:29:11 -0600 Subject: [PATCH 58/74] Add the import-related stack statuses (#752) --- stacker/providers/aws/default.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/stacker/providers/aws/default.py b/stacker/providers/aws/default.py index cbd49814e..b6dfeb94d 100644 --- a/stacker/providers/aws/default.py +++ b/stacker/providers/aws/default.py @@ -546,6 +546,7 @@ class Provider(BaseProvider): IN_PROGRESS_STATUSES = ( "CREATE_IN_PROGRESS", + "IMPORT_IN_PROGRESS", "UPDATE_IN_PROGRESS", "DELETE_IN_PROGRESS", "UPDATE_COMPLETE_CLEANUP_IN_PROGRESS", @@ -553,6 +554,7 @@ class Provider(BaseProvider): ROLLING_BACK_STATUSES = ( "ROLLBACK_IN_PROGRESS", + "IMPORT_ROLLBACK_IN_PROGRESS", "UPDATE_ROLLBACK_IN_PROGRESS" ) @@ -561,6 +563,7 @@ class Provider(BaseProvider): "ROLLBACK_FAILED", "ROLLBACK_COMPLETE", "DELETE_FAILED", + "IMPORT_ROLLBACK_FAILED", "UPDATE_ROLLBACK_FAILED", # Note: UPDATE_ROLLBACK_COMPLETE is in both the FAILED and COMPLETE # sets, because we need to wait for it when a rollback is triggered, @@ -571,7 +574,9 @@ class Provider(BaseProvider): COMPLETE_STATUSES = ( "CREATE_COMPLETE", "DELETE_COMPLETE", + "IMPORT_COMPLETE", "UPDATE_COMPLETE", + "IMPORT_ROLLBACK_COMPLETE", "UPDATE_ROLLBACK_COMPLETE", ) From 6221107483fb4ff08e91856e824fd5e0e510988f Mon Sep 17 00:00:00 2001 From: Kyle Finley Date: Tue, 21 Apr 2020 08:21:15 -0700 Subject: [PATCH 59/74] fix stack.set_outputs not being called by diff if stack did not change (#754) * fix stack.set_outputs not being called if stack did not change resolves cloudtools/stacker#753 * set_outputs for locked stacks --- stacker/actions/diff.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/stacker/actions/diff.py b/stacker/actions/diff.py index 64ac74de8..157ff0b18 100644 --- a/stacker/actions/diff.py +++ b/stacker/actions/diff.py @@ -163,10 +163,12 @@ def _diff_stack(self, stack, **kwargs): if not build.should_submit(stack): return NotSubmittedStatus() + provider = self.build_provider(stack) + if not build.should_update(stack): + stack.set_outputs(provider.get_outputs(stack.fqn)) return NotUpdatedStatus() - provider = self.build_provider(stack) tags = build.build_stack_tags(stack) stack.resolve(self.context, provider) @@ -179,6 +181,7 @@ def _diff_stack(self, stack, **kwargs): stack.set_outputs(outputs) except exceptions.StackDidNotChange: logger.info('No changes: %s', stack.fqn) + stack.set_outputs(provider.get_outputs(stack.fqn)) return COMPLETE From 0f8c233f3fbf756387288b9d7c304d22c6b05dd3 Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Tue, 21 Apr 2020 08:58:02 -0700 Subject: [PATCH 60/74] Fix python 2.7/3.5 dependency issue --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index bf652531c..1f8737bd4 100644 --- a/setup.py +++ b/setup.py @@ -17,6 +17,7 @@ "schematics>=2.0.1,<2.1.0", "formic2", "python-dateutil>=2.0,<3.0", + "MarkupSafe<2.0", # 2.0 dropped python 2.7, 3.5 support - temporary ] setup_requires = ['pytest-runner'] From 06a4beed9a586f6f975c0896449229724c3795b1 Mon Sep 17 00:00:00 2001 From: Russell Ballestrini Date: Fri, 29 May 2020 16:27:22 -0400 Subject: [PATCH 61/74] add cf notification arns (#756) * add cf notification arns parameter * fixed lint errors * added NotificationARNs arg to test * adjustments based on feedback, added test modified: stacker/providers/aws/default.py modified: stacker/stack.py modified: stacker/tests/providers/aws/test_default.py * docs * pin more-itertools to version which supports py27 modified: setup.py Co-authored-by: Tim Kukhmazov --- docs/config.rst | 3 ++ setup.py | 1 + stacker/actions/build.py | 4 +- stacker/config/__init__.py | 3 ++ stacker/context.py | 1 + stacker/providers/aws/default.py | 57 ++++++++++++++++----- stacker/stack.py | 18 +++++-- stacker/tests/providers/aws/test_default.py | 42 +++++++++++++-- 8 files changed, 106 insertions(+), 23 deletions(-) diff --git a/docs/config.rst b/docs/config.rst index a6804926b..7e8421ee7 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -410,6 +410,9 @@ A stack has the following keys: an exception if the stack is in an `IN_PROGRESS` state. You can set this option to `wait` and stacker will wait for the previous update to complete before attempting to update the stack. +**notification_arns**: + (optional): If provided, accepts a list of None or many AWS SNS Topic ARNs + which will be notified of this stack's CloudFormation state changes. Stacks Example ~~~~~~~~~~~~~~ diff --git a/setup.py b/setup.py index 1f8737bd4..4bff71360 100644 --- a/setup.py +++ b/setup.py @@ -18,6 +18,7 @@ "formic2", "python-dateutil>=2.0,<3.0", "MarkupSafe<2.0", # 2.0 dropped python 2.7, 3.5 support - temporary + "more-itertools<6.0.0", # 6.0.0 dropped python 2.7 support - temporary ] setup_requires = ['pytest-runner'] diff --git a/stacker/actions/build.py b/stacker/actions/build.py index 55a0da9e7..63cb729eb 100644 --- a/stacker/actions/build.py +++ b/stacker/actions/build.py @@ -341,7 +341,8 @@ def _launch_stack(self, stack, **kwargs): logger.debug("Creating new stack: %s", stack.fqn) provider.create_stack(stack.fqn, template, parameters, tags, force_change_set, - stack_policy=stack_policy) + stack_policy=stack_policy, + notification_arns=stack.notification_arns) return SubmittedStatus("creating new stack") try: @@ -359,6 +360,7 @@ def _launch_stack(self, stack, **kwargs): force_interactive=stack.protected, force_change_set=force_change_set, stack_policy=stack_policy, + notification_arns=stack.notification_arns ) logger.debug("Updating existing stack: %s", stack.fqn) diff --git a/stacker/config/__init__.py b/stacker/config/__init__.py index 4c2192b28..29ec0cf90 100644 --- a/stacker/config/__init__.py +++ b/stacker/config/__init__.py @@ -442,6 +442,9 @@ class Stack(Model): in_progress_behavior = StringType(serialize_when_none=False) + notification_arns = ListType( + StringType, serialize_when_none=False, default=[]) + def validate_class_path(self, data, value): if value and data["template_path"]: raise ValidationError( diff --git a/stacker/context.py b/stacker/context.py index 0eac9236f..242bb0cd6 100644 --- a/stacker/context.py +++ b/stacker/context.py @@ -161,6 +161,7 @@ def get_stacks(self): locked=stack_def.locked, enabled=stack_def.enabled, protected=stack_def.protected, + notification_arns=stack_def.notification_arns ) stacks.append(stack) self._stacks = stacks diff --git a/stacker/providers/aws/default.py b/stacker/providers/aws/default.py index b6dfeb94d..c45f46119 100644 --- a/stacker/providers/aws/default.py +++ b/stacker/providers/aws/default.py @@ -334,9 +334,17 @@ def wait_till_change_set_complete(cfn_client, change_set_id, try_count=25, return response -def create_change_set(cfn_client, fqn, template, parameters, tags, - change_set_type='UPDATE', replacements_only=False, - service_role=None): +def create_change_set( + cfn_client, + fqn, + template, + parameters, + tags, + change_set_type='UPDATE', + replacements_only=False, + service_role=None, + notification_arns=None +): logger.debug("Attempting to create change set of type %s for stack: %s.", change_set_type, fqn) @@ -344,7 +352,8 @@ def create_change_set(cfn_client, fqn, template, parameters, tags, fqn, parameters, tags, template, change_set_type=change_set_type, service_role=service_role, - change_set_name=get_change_set_name() + change_set_name=get_change_set_name(), + notification_arns=notification_arns ) try: response = cfn_client.create_change_set(**args) @@ -414,12 +423,18 @@ def check_tags_contain(actual, expected): return actual_set >= expected_set -def generate_cloudformation_args(stack_name, parameters, tags, template, - capabilities=DEFAULT_CAPABILITIES, - change_set_type=None, - service_role=None, - stack_policy=None, - change_set_name=None): +def generate_cloudformation_args( + stack_name, + parameters, + tags, + template, + capabilities=DEFAULT_CAPABILITIES, + change_set_type=None, + service_role=None, + stack_policy=None, + change_set_name=None, + notification_arns=None, +): """Used to generate the args for common cloudformation API interactions. This is used for create_stack/update_stack/create_change_set calls in @@ -443,6 +458,8 @@ def generate_cloudformation_args(stack_name, parameters, tags, template, object representing a stack policy. change_set_name (str, optional): An optional change set name to use with create_change_set. + notification_arns (list, optional): An optional list of SNS topic ARNs + to send CloudFormation Events to. Returns: dict: A dictionary of arguments to be used in the Cloudformation API @@ -461,6 +478,9 @@ def generate_cloudformation_args(stack_name, parameters, tags, template, if change_set_name: args["ChangeSetName"] = change_set_name + if notification_arns: + args["NotificationARNs"] = notification_arns + if change_set_type: args["ChangeSetType"] = change_set_type @@ -738,9 +758,13 @@ def destroy_stack(self, stack, **kwargs): self.cloudformation.delete_stack(**args) return True - def create_stack(self, fqn, template, parameters, tags, - force_change_set=False, stack_policy=None, - **kwargs): + def create_stack( + self, fqn, template, parameters, tags, + force_change_set=False, + stack_policy=None, + notification_arns=None, + **kwargs + ): """Create a new Cloudformation stack. Args: @@ -754,6 +778,8 @@ def create_stack(self, fqn, template, parameters, tags, force_change_set (bool): Whether or not to force change set use. stack_policy (:class:`stacker.providers.base.Template`): A template object representing a stack policy. + notification_arns (list, optional): An optional list of SNS topic + ARNs to send CloudFormation Events to. """ logger.debug("Attempting to create stack %s:.", fqn) @@ -780,6 +806,7 @@ def create_stack(self, fqn, template, parameters, tags, fqn, parameters, tags, template, service_role=self.service_role, stack_policy=stack_policy, + notification_arns=notification_arns ) try: @@ -1029,7 +1056,8 @@ def noninteractive_changeset_update(self, fqn, template, old_parameters, ) def default_update_stack(self, fqn, template, old_parameters, parameters, - tags, stack_policy=None, **kwargs): + tags, stack_policy=None, + notification_arns=[], **kwargs): """Update a Cloudformation stack in default mode. Args: @@ -1051,6 +1079,7 @@ def default_update_stack(self, fqn, template, old_parameters, parameters, fqn, parameters, tags, template, service_role=self.service_role, stack_policy=stack_policy, + notification_arns=notification_arns ) try: diff --git a/stacker/stack.py b/stacker/stack.py index 950fcd548..08b24335e 100644 --- a/stacker/stack.py +++ b/stacker/stack.py @@ -54,12 +54,23 @@ class Stack(object): blueprint. locked (bool, optional): Whether or not the stack is locked. force (bool, optional): Whether to force updates on this stack. - enabled (bool, optional): Whether this stack is enabled + enabled (bool, optional): Whether this stack is enabled. + protected (boot, optional): Whether this stack is protected. + notification_arns (list, optional): An optional list of SNS topic ARNs + to send CloudFormation Events to. """ - def __init__(self, definition, context, variables=None, mappings=None, - locked=False, force=False, enabled=True, protected=False): + def __init__( + self, definition, context, + variables=None, + mappings=None, + locked=False, + force=False, + enabled=True, + protected=False, + notification_arns=None, + ): self.logging = True self.name = definition.name self.fqn = context.get_fqn(definition.stack_name or self.name) @@ -75,6 +86,7 @@ def __init__(self, definition, context, variables=None, mappings=None, self.context = context self.outputs = None self.in_progress_behavior = definition.in_progress_behavior + self.notification_arns = notification_arns def __repr__(self): return self.fqn diff --git a/stacker/tests/providers/aws/test_default.py b/stacker/tests/providers/aws/test_default.py index 83a54a8f8..92e24b7b5 100644 --- a/stacker/tests/providers/aws/test_default.py +++ b/stacker/tests/providers/aws/test_default.py @@ -403,11 +403,18 @@ def test_generate_cloudformation_args(self): template_url = "http://fake.s3url.com/blah.json" template_body = '{"fake_body": "woot"}' std_args = { - "stack_name": stack_name, "parameters": [], "tags": [], - "template": Template(url=template_url)} - std_return = {"StackName": stack_name, "Parameters": [], "Tags": [], - "Capabilities": DEFAULT_CAPABILITIES, - "TemplateURL": template_url} + "stack_name": stack_name, + "parameters": [], + "tags": [], + "template": Template(url=template_url) + } + std_return = { + "StackName": stack_name, + "Parameters": [], + "Tags": [], + "Capabilities": DEFAULT_CAPABILITIES, + "TemplateURL": template_url, + } result = generate_cloudformation_args(**std_args) self.assertEqual(result, std_return) @@ -439,6 +446,31 @@ def test_generate_cloudformation_args(self): result = generate_cloudformation_args(**std_args) self.assertEqual(result, template_body_result) + def test_generate_cloudformation_args_with_notification_arns(self): + stack_name = "mystack" + template_url = "http://fake.s3url.com/blah.json" + std_args = { + "stack_name": stack_name, + "parameters": [], + "tags": [], + "template": Template(url=template_url), + "notification_arns": [ + "arn:aws:sns:us-east-1:1234567890:test-cf-deploy-notify-sns-topic-CfDeployNotify" # noqa + ] + } + std_return = { + "StackName": stack_name, + "Parameters": [], + "Tags": [], + "Capabilities": DEFAULT_CAPABILITIES, + "TemplateURL": template_url, + "NotificationARNs": [ + "arn:aws:sns:us-east-1:1234567890:test-cf-deploy-notify-sns-topic-CfDeployNotify" # noqa + ] + } + result = generate_cloudformation_args(**std_args) + self.assertEqual(result, std_return) + class TestProviderDefaultMode(unittest.TestCase): def setUp(self): From 72eb1b0fc8afd3456ea48e8a140fcfca56a4036d Mon Sep 17 00:00:00 2001 From: TomRitserveldt Date: Sat, 22 Aug 2020 14:52:21 +0200 Subject: [PATCH 62/74] specify dependency for boto3 in the correct way for multiple version specifiers (#761) see https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-dependencies Co-authored-by: Tom Ritserveldt --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 4bff71360..77fc4878a 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ "future", "troposphere>=1.9.0", 'botocore>=1.12.111', # matching boto3 requirement - "boto3>=1.9.111<2.0", + "boto3>=1.9.111,<2.0", "PyYAML>=3.13b1", "awacs>=0.6.0", "gitpython>=2.0,<3.0", From 4e32f25eabdd75025ad2623990464d690d40654f Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Sat, 22 Aug 2020 09:48:16 -0700 Subject: [PATCH 63/74] Release 1.7.1 (#760) * Release 1.7.1 * pin "rsa" to a version which supports Python 2.7 modified: setup.py * pin python-jose<3.2.0 because it dropped python 2.7 Co-authored-by: russellballestrini --- CHANGELOG.md | 10 +++++++++- setup.py | 4 +++- stacker/__init__.py | 2 +- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 49d4ee270..fca00ee81 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,16 @@ ## Upcoming release -- Fixing AMI lookup Key error on 'Name' +## 1.7.1 (2020-08-17) +- Fixing AMI lookup Key error on 'Name' +- hooks: lambda: allow uploading pre-built payloads [GH-#564] - Ensure that base64 lookup codec encodes the bytes object as a string [GH-742] - Use CloudFormation Change Sets for `stacker diff` +- Locked stacks still have requirements [GH-746] +- change diff to use CFN change sets instead of comparing template dicts [GH-744] +- Add YAML environment file support [GH-740] +- fix `stack.set_outputs` not being called by diff if stack did not change [GH-754] +- Fix python 2.7/3.5 dependency issue +- add cf notification arns [GH-756] ## 1.7.0 (2019-04-07) diff --git a/setup.py b/setup.py index 77fc4878a..31943d821 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ import os from setuptools import setup, find_packages -VERSION = "1.7.0" +VERSION = "1.7.1" src_dir = os.path.dirname(__file__) @@ -19,6 +19,8 @@ "python-dateutil>=2.0,<3.0", "MarkupSafe<2.0", # 2.0 dropped python 2.7, 3.5 support - temporary "more-itertools<6.0.0", # 6.0.0 dropped python 2.7 support - temporary + "rsa==4.5", # 4.6 dropped python 2.7 support - temporary + "python-jose<3.2.0", # 3.2.0 dropped python 2.7 support - temporary ] setup_requires = ['pytest-runner'] diff --git a/stacker/__init__.py b/stacker/__init__.py index f55f1a97f..41ab7ac2b 100644 --- a/stacker/__init__.py +++ b/stacker/__init__.py @@ -2,4 +2,4 @@ from __future__ import division from __future__ import absolute_import -__version__ = "1.7.0" +__version__ = "1.7.1" From eda3a520055d63c2ee0d41c75246a71764451908 Mon Sep 17 00:00:00 2001 From: matt beary <1661988+hauntingEcho@users.noreply.github.com> Date: Mon, 14 Sep 2020 12:20:47 -0500 Subject: [PATCH 64/74] address breaking moto change to awslambda (#763) Moto changed awslambda to be an extra, as of v1.3.15: https://github.com/spulec/moto/pull/3281/files --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 31943d821..dcf8173c9 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ "pytest~=4.3", "pytest-cov~=2.6", "mock~=2.0", - "moto~=1.3.7", + "moto[awslambda]~=1.3.16", "testfixtures~=4.10.0", "flake8-future-import", ] From c022c2f22cf9cafcc2a0c51e07d254a2049b2e97 Mon Sep 17 00:00:00 2001 From: Bruno Coelho Date: Thu, 29 Oct 2020 17:25:33 +0000 Subject: [PATCH 65/74] Added Python version validation before update kms decrypt output (#765) Co-authored-by: cfpipeline --- stacker/lookups/handlers/kms.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/stacker/lookups/handlers/kms.py b/stacker/lookups/handlers/kms.py index ba80d2779..1a516f3d9 100644 --- a/stacker/lookups/handlers/kms.py +++ b/stacker/lookups/handlers/kms.py @@ -2,6 +2,7 @@ from __future__ import division from __future__ import absolute_import import codecs +import sys from stacker.session_cache import get_session from . import LookupHandler @@ -63,5 +64,12 @@ def handle(cls, value, **kwargs): # get raw but still encrypted value from base64 version. decoded = codecs.decode(value, 'base64') + # check python version in your system + python3_or_later = sys.version_info[0] >= 3 + # decrypt and return the plain text raw value. - return kms.decrypt(CiphertextBlob=decoded)["Plaintext"] + if python3_or_later: + return kms.decrypt(CiphertextBlob=decoded)["Plaintext"]\ + .decode('utf-8') + else: + return kms.decrypt(CiphertextBlob=decoded)["Plaintext"] From f563a6f5a23550c7a668a1500bcea2b4e94f5bbf Mon Sep 17 00:00:00 2001 From: Michael Barrett Date: Mon, 9 Nov 2020 17:24:53 -0800 Subject: [PATCH 66/74] Release 1.7.2 (#767) --- CHANGELOG.md | 6 +++++- setup.py | 2 +- stacker/__init__.py | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fca00ee81..7f0aaacec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,12 @@ ## Upcoming release +## 1.7.2 (2020-11-09) +- address breaking moto change to awslambda [GH-763] +- Added Python version validation before update kms decrypt output [GH-765] + ## 1.7.1 (2020-08-17) - Fixing AMI lookup Key error on 'Name' -- hooks: lambda: allow uploading pre-built payloads [GH-#564] +- hooks: lambda: allow uploading pre-built payloads [GH-564] - Ensure that base64 lookup codec encodes the bytes object as a string [GH-742] - Use CloudFormation Change Sets for `stacker diff` - Locked stacks still have requirements [GH-746] diff --git a/setup.py b/setup.py index dcf8173c9..546875195 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ import os from setuptools import setup, find_packages -VERSION = "1.7.1" +VERSION = "1.7.2" src_dir = os.path.dirname(__file__) diff --git a/stacker/__init__.py b/stacker/__init__.py index 41ab7ac2b..9d4c630b4 100644 --- a/stacker/__init__.py +++ b/stacker/__init__.py @@ -2,4 +2,4 @@ from __future__ import division from __future__ import absolute_import -__version__ = "1.7.1" +__version__ = "1.7.2" From 13d64cdfbca3dac6943f9e1d8dc097dc511f1fff Mon Sep 17 00:00:00 2001 From: Till Heistermann Date: Thu, 3 Feb 2022 23:43:52 +0100 Subject: [PATCH 67/74] Remove Python 2 backport imports from future and builtins --- stacker/__init__.py | 3 --- stacker/actions/base.py | 4 ---- stacker/actions/build.py | 3 --- stacker/actions/destroy.py | 3 --- stacker/actions/diff.py | 5 ----- stacker/actions/graph.py | 3 --- stacker/actions/info.py | 3 --- stacker/awscli_yamlhelper.py | 3 --- stacker/blueprints/base.py | 5 ----- stacker/blueprints/raw.py | 3 --- stacker/blueprints/testutil.py | 3 --- stacker/blueprints/variables/types.py | 4 ---- stacker/commands/__init__.py | 3 --- stacker/commands/stacker/__init__.py | 3 --- stacker/commands/stacker/base.py | 4 ---- stacker/commands/stacker/build.py | 3 --- stacker/commands/stacker/destroy.py | 3 --- stacker/commands/stacker/diff.py | 3 --- stacker/commands/stacker/graph.py | 3 --- stacker/commands/stacker/info.py | 3 --- stacker/config/__init__.py | 6 ------ stacker/config/translators/__init__.py | 3 --- stacker/config/translators/kms.py | 3 --- stacker/context.py | 4 ---- stacker/dag/__init__.py | 6 ------ stacker/environment.py | 3 --- stacker/exceptions.py | 3 --- stacker/hooks/aws_lambda.py | 5 ----- stacker/hooks/command.py | 3 --- stacker/hooks/ecs.py | 3 --- stacker/hooks/iam.py | 4 ---- stacker/hooks/keypair.py | 3 --- stacker/hooks/route53.py | 3 --- stacker/hooks/utils.py | 3 --- stacker/logger/__init__.py | 3 --- stacker/lookups/__init__.py | 3 --- stacker/lookups/handlers/__init__.py | 3 --- stacker/lookups/handlers/ami.py | 3 --- stacker/lookups/handlers/default.py | 3 --- stacker/lookups/handlers/dynamodb.py | 4 ---- stacker/lookups/handlers/envvar.py | 3 --- stacker/lookups/handlers/file.py | 4 ---- stacker/lookups/handlers/hook_data.py | 3 --- stacker/lookups/handlers/kms.py | 3 --- stacker/lookups/handlers/output.py | 3 --- stacker/lookups/handlers/rxref.py | 3 --- stacker/lookups/handlers/split.py | 3 --- stacker/lookups/handlers/ssmstore.py | 4 ---- stacker/lookups/handlers/xref.py | 3 --- stacker/lookups/registry.py | 3 --- stacker/plan.py | 4 ---- stacker/providers/aws/default.py | 7 ------- stacker/providers/base.py | 4 ---- stacker/session_cache.py | 3 --- stacker/stack.py | 4 ---- stacker/status.py | 4 ---- stacker/target.py | 3 --- stacker/tests/actions/test_base.py | 5 ----- stacker/tests/actions/test_build.py | 4 ---- stacker/tests/actions/test_destroy.py | 4 ---- stacker/tests/actions/test_diff.py | 3 --- stacker/tests/blueprints/test_base.py | 3 --- stacker/tests/blueprints/test_raw.py | 3 --- stacker/tests/blueprints/test_testutil.py | 3 --- stacker/tests/conftest.py | 1 - stacker/tests/factories.py | 4 ---- stacker/tests/fixtures/mock_blueprints.py | 4 ---- stacker/tests/fixtures/mock_hooks.py | 3 --- stacker/tests/fixtures/mock_lookups.py | 3 --- stacker/tests/hooks/test_aws_lambda.py | 6 ------ stacker/tests/hooks/test_command.py | 3 --- stacker/tests/hooks/test_ecs.py | 3 --- stacker/tests/hooks/test_iam.py | 3 --- stacker/tests/hooks/test_keypair.py | 3 --- stacker/tests/lookups/handlers/test_ami.py | 3 --- stacker/tests/lookups/handlers/test_default.py | 3 --- stacker/tests/lookups/handlers/test_dynamodb.py | 3 --- stacker/tests/lookups/handlers/test_envvar.py | 3 --- stacker/tests/lookups/handlers/test_file.py | 3 --- stacker/tests/lookups/handlers/test_hook_data.py | 3 --- stacker/tests/lookups/handlers/test_output.py | 3 --- stacker/tests/lookups/handlers/test_rxref.py | 3 --- stacker/tests/lookups/handlers/test_split.py | 3 --- stacker/tests/lookups/handlers/test_ssmstore.py | 4 ---- stacker/tests/lookups/handlers/test_xref.py | 3 --- stacker/tests/lookups/test_registry.py | 3 --- stacker/tests/providers/aws/test_default.py | 4 ---- stacker/tests/test_config.py | 4 ---- stacker/tests/test_context.py | 3 --- stacker/tests/test_dag.py | 3 --- stacker/tests/test_environment.py | 3 --- stacker/tests/test_lookups.py | 3 --- stacker/tests/test_parse_user_data.py | 3 --- stacker/tests/test_plan.py | 4 ---- stacker/tests/test_stack.py | 3 --- stacker/tests/test_stacker.py | 3 --- stacker/tests/test_util.py | 5 ----- stacker/tests/test_variables.py | 3 --- stacker/tokenize_userdata.py | 3 --- stacker/ui.py | 5 ----- stacker/util.py | 5 ----- stacker/variables.py | 4 ---- 102 files changed, 352 deletions(-) diff --git a/stacker/__init__.py b/stacker/__init__.py index 9d4c630b4..5613493a5 100644 --- a/stacker/__init__.py +++ b/stacker/__init__.py @@ -1,5 +1,2 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import __version__ = "1.7.2" diff --git a/stacker/actions/base.py b/stacker/actions/base.py index 0763e5245..aab5386eb 100644 --- a/stacker/actions/base.py +++ b/stacker/actions/base.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import object import os import sys import logging diff --git a/stacker/actions/build.py b/stacker/actions/build.py index 63cb729eb..39714adb7 100644 --- a/stacker/actions/build.py +++ b/stacker/actions/build.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import logging from .base import BaseAction, plan, build_walker diff --git a/stacker/actions/destroy.py b/stacker/actions/destroy.py index cb3baf627..23addfe47 100644 --- a/stacker/actions/destroy.py +++ b/stacker/actions/destroy.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import logging from .base import BaseAction, plan, build_walker diff --git a/stacker/actions/diff.py b/stacker/actions/diff.py index 157ff0b18..d2e773141 100644 --- a/stacker/actions/diff.py +++ b/stacker/actions/diff.py @@ -1,8 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import str -from builtins import object import logging from operator import attrgetter diff --git a/stacker/actions/graph.py b/stacker/actions/graph.py index 1f069a68d..84a8a1f3a 100644 --- a/stacker/actions/graph.py +++ b/stacker/actions/graph.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import logging import sys import json diff --git a/stacker/actions/info.py b/stacker/actions/info.py index 1508de2f0..c2b8262bc 100644 --- a/stacker/actions/info.py +++ b/stacker/actions/info.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import logging from .base import BaseAction diff --git a/stacker/awscli_yamlhelper.py b/stacker/awscli_yamlhelper.py index f8e18fbf6..b0a24e487 100644 --- a/stacker/awscli_yamlhelper.py +++ b/stacker/awscli_yamlhelper.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import # Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You diff --git a/stacker/blueprints/base.py b/stacker/blueprints/base.py index ec1cd52a7..fe26b303d 100644 --- a/stacker/blueprints/base.py +++ b/stacker/blueprints/base.py @@ -1,9 +1,4 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import str from past.builtins import basestring -from builtins import object import copy import hashlib import logging diff --git a/stacker/blueprints/raw.py b/stacker/blueprints/raw.py index 28c1c31ae..1cb819a60 100644 --- a/stacker/blueprints/raw.py +++ b/stacker/blueprints/raw.py @@ -1,7 +1,4 @@ """Blueprint representing raw template module.""" -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import hashlib import json diff --git a/stacker/blueprints/testutil.py b/stacker/blueprints/testutil.py index 2824c52c8..3c0bd6242 100644 --- a/stacker/blueprints/testutil.py +++ b/stacker/blueprints/testutil.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import difflib import json import unittest diff --git a/stacker/blueprints/variables/types.py b/stacker/blueprints/variables/types.py index 5bfa77108..53e20a9b8 100644 --- a/stacker/blueprints/variables/types.py +++ b/stacker/blueprints/variables/types.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import object class TroposphereType(object): diff --git a/stacker/commands/__init__.py b/stacker/commands/__init__.py index d783c17fd..cecf98c04 100644 --- a/stacker/commands/__init__.py +++ b/stacker/commands/__init__.py @@ -1,4 +1 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from .stacker import Stacker # NOQA diff --git a/stacker/commands/stacker/__init__.py b/stacker/commands/stacker/__init__.py index 6c96ac286..9cec3f2cc 100644 --- a/stacker/commands/stacker/__init__.py +++ b/stacker/commands/stacker/__init__.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import logging from .build import Build diff --git a/stacker/commands/stacker/base.py b/stacker/commands/stacker/base.py index f49aa64b4..52e06b4ee 100644 --- a/stacker/commands/stacker/base.py +++ b/stacker/commands/stacker/base.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import object import argparse import threading import signal diff --git a/stacker/commands/stacker/build.py b/stacker/commands/stacker/build.py index 501bc3560..9c3d1a737 100644 --- a/stacker/commands/stacker/build.py +++ b/stacker/commands/stacker/build.py @@ -5,9 +5,6 @@ skip executing anything against the stack. """ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from .base import BaseCommand, cancel from ...actions import build diff --git a/stacker/commands/stacker/destroy.py b/stacker/commands/stacker/destroy.py index 333631744..af1632890 100644 --- a/stacker/commands/stacker/destroy.py +++ b/stacker/commands/stacker/destroy.py @@ -5,9 +5,6 @@ stacks. """ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from .base import BaseCommand, cancel from ...actions import destroy diff --git a/stacker/commands/stacker/diff.py b/stacker/commands/stacker/diff.py index 27ddf0818..0b9eb8409 100644 --- a/stacker/commands/stacker/diff.py +++ b/stacker/commands/stacker/diff.py @@ -3,9 +3,6 @@ Sometimes small changes can have big impacts. Run "stacker diff" before "stacker build" to detect bad things(tm) from happening in advance! """ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from .base import BaseCommand from ...actions import diff diff --git a/stacker/commands/stacker/graph.py b/stacker/commands/stacker/graph.py index c1360633d..7cf11a083 100644 --- a/stacker/commands/stacker/graph.py +++ b/stacker/commands/stacker/graph.py @@ -1,9 +1,6 @@ """Prints the the relationships between steps as a graph. """ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from .base import BaseCommand from ...actions import graph diff --git a/stacker/commands/stacker/info.py b/stacker/commands/stacker/info.py index ac847bbec..29fcd88f1 100644 --- a/stacker/commands/stacker/info.py +++ b/stacker/commands/stacker/info.py @@ -1,7 +1,4 @@ """Gets information on the CloudFormation stacks based on the given config.""" -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from .base import BaseCommand from ...actions import info diff --git a/stacker/config/__init__.py b/stacker/config/__init__.py index 29ec0cf90..d14ec49ce 100644 --- a/stacker/config/__init__.py +++ b/stacker/config/__init__.py @@ -1,10 +1,4 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library -standard_library.install_aliases() from past.types import basestring -from builtins import str import copy import sys import logging diff --git a/stacker/config/translators/__init__.py b/stacker/config/translators/__init__.py index 001824432..810211484 100644 --- a/stacker/config/translators/__init__.py +++ b/stacker/config/translators/__init__.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import yaml from .kms import kms_simple_constructor diff --git a/stacker/config/translators/kms.py b/stacker/config/translators/kms.py index 9c2e1fe4d..8fa9af0a6 100644 --- a/stacker/config/translators/kms.py +++ b/stacker/config/translators/kms.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import # NOTE: The translator is going to be deprecated in favor of the lookup from ...lookups.handlers.kms import KmsLookup diff --git a/stacker/context.py b/stacker/context.py index 242bb0cd6..c441be1c8 100644 --- a/stacker/context.py +++ b/stacker/context.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import object import collections import logging diff --git a/stacker/dag/__init__.py b/stacker/dag/__init__.py index ca12f20bf..47415ebfa 100644 --- a/stacker/dag/__init__.py +++ b/stacker/dag/__init__.py @@ -1,9 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library -standard_library.install_aliases() -from builtins import object import collections import logging from threading import Thread diff --git a/stacker/environment.py b/stacker/environment.py index e4a2be174..fa38f66c8 100644 --- a/stacker/environment.py +++ b/stacker/environment.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import yaml diff --git a/stacker/exceptions.py b/stacker/exceptions.py index e4b5f7939..1d05083fc 100644 --- a/stacker/exceptions.py +++ b/stacker/exceptions.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import class InvalidConfig(Exception): diff --git a/stacker/hooks/aws_lambda.py b/stacker/hooks/aws_lambda.py index 5832559e5..71c38f0fc 100644 --- a/stacker/hooks/aws_lambda.py +++ b/stacker/hooks/aws_lambda.py @@ -1,8 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library -standard_library.install_aliases() from past.builtins import basestring import os import os.path diff --git a/stacker/hooks/command.py b/stacker/hooks/command.py index 2539753d8..7a6ade53d 100644 --- a/stacker/hooks/command.py +++ b/stacker/hooks/command.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import logging import os diff --git a/stacker/hooks/ecs.py b/stacker/hooks/ecs.py index 308c2eccc..ef3f52abc 100644 --- a/stacker/hooks/ecs.py +++ b/stacker/hooks/ecs.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import # A lot of this code exists to deal w/ the broken ECS connect_to_region # function, and will be removed once this pull request is accepted: # https://github.com/boto/boto/pull/3143 diff --git a/stacker/hooks/iam.py b/stacker/hooks/iam.py index f04b51f28..07ec7b68c 100644 --- a/stacker/hooks/iam.py +++ b/stacker/hooks/iam.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import input import copy import logging diff --git a/stacker/hooks/keypair.py b/stacker/hooks/keypair.py index 3114729cd..bf0487792 100644 --- a/stacker/hooks/keypair.py +++ b/stacker/hooks/keypair.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import logging import os diff --git a/stacker/hooks/route53.py b/stacker/hooks/route53.py index c163e091d..a3cca1f23 100644 --- a/stacker/hooks/route53.py +++ b/stacker/hooks/route53.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import logging from stacker.session_cache import get_session diff --git a/stacker/hooks/utils.py b/stacker/hooks/utils.py index 718fda3a5..bb8d9924f 100644 --- a/stacker/hooks/utils.py +++ b/stacker/hooks/utils.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import os import sys import collections diff --git a/stacker/logger/__init__.py b/stacker/logger/__init__.py index 72c7efa1d..b077f0007 100644 --- a/stacker/logger/__init__.py +++ b/stacker/logger/__init__.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import sys import logging diff --git a/stacker/lookups/__init__.py b/stacker/lookups/__init__.py index 4db0bb04f..e8513a560 100644 --- a/stacker/lookups/__init__.py +++ b/stacker/lookups/__init__.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from past.builtins import basestring from collections import namedtuple import re diff --git a/stacker/lookups/handlers/__init__.py b/stacker/lookups/handlers/__init__.py index 6b18bed59..d11b04c4b 100644 --- a/stacker/lookups/handlers/__init__.py +++ b/stacker/lookups/handlers/__init__.py @@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import division class LookupHandler(object): diff --git a/stacker/lookups/handlers/ami.py b/stacker/lookups/handlers/ami.py index fa4e660c8..1b30ef032 100644 --- a/stacker/lookups/handlers/ami.py +++ b/stacker/lookups/handlers/ami.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from stacker.session_cache import get_session import re import operator diff --git a/stacker/lookups/handlers/default.py b/stacker/lookups/handlers/default.py index fc2b5c845..ed9ac5f84 100644 --- a/stacker/lookups/handlers/default.py +++ b/stacker/lookups/handlers/default.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from . import LookupHandler diff --git a/stacker/lookups/handlers/dynamodb.py b/stacker/lookups/handlers/dynamodb.py index 9dcd97ce8..f6f965be1 100644 --- a/stacker/lookups/handlers/dynamodb.py +++ b/stacker/lookups/handlers/dynamodb.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import str from botocore.exceptions import ClientError import re from stacker.session_cache import get_session diff --git a/stacker/lookups/handlers/envvar.py b/stacker/lookups/handlers/envvar.py index a1d9ed5fd..37e38fe6d 100644 --- a/stacker/lookups/handlers/envvar.py +++ b/stacker/lookups/handlers/envvar.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import os from . import LookupHandler diff --git a/stacker/lookups/handlers/file.py b/stacker/lookups/handlers/file.py index 8c3e74eef..9e7f0d2a0 100644 --- a/stacker/lookups/handlers/file.py +++ b/stacker/lookups/handlers/file.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import bytes, str import base64 import json diff --git a/stacker/lookups/handlers/hook_data.py b/stacker/lookups/handlers/hook_data.py index c27f65b93..e2c1c7828 100644 --- a/stacker/lookups/handlers/hook_data.py +++ b/stacker/lookups/handlers/hook_data.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from . import LookupHandler diff --git a/stacker/lookups/handlers/kms.py b/stacker/lookups/handlers/kms.py index 1a516f3d9..40e4dede7 100644 --- a/stacker/lookups/handlers/kms.py +++ b/stacker/lookups/handlers/kms.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import codecs import sys from stacker.session_cache import get_session diff --git a/stacker/lookups/handlers/output.py b/stacker/lookups/handlers/output.py index a40ba0fb3..10d37de0f 100644 --- a/stacker/lookups/handlers/output.py +++ b/stacker/lookups/handlers/output.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import re from collections import namedtuple diff --git a/stacker/lookups/handlers/rxref.py b/stacker/lookups/handlers/rxref.py index 858a13a3d..546f242d4 100644 --- a/stacker/lookups/handlers/rxref.py +++ b/stacker/lookups/handlers/rxref.py @@ -11,9 +11,6 @@ some-relative-fully-qualified-stack-name::SomeOutputName} """ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from . import LookupHandler from .output import deconstruct diff --git a/stacker/lookups/handlers/split.py b/stacker/lookups/handlers/split.py index 8908c7002..28fad2fbd 100644 --- a/stacker/lookups/handlers/split.py +++ b/stacker/lookups/handlers/split.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from . import LookupHandler TYPE_NAME = "split" diff --git a/stacker/lookups/handlers/ssmstore.py b/stacker/lookups/handlers/ssmstore.py index 2da724d30..1bff97c4f 100644 --- a/stacker/lookups/handlers/ssmstore.py +++ b/stacker/lookups/handlers/ssmstore.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import str from stacker.session_cache import get_session diff --git a/stacker/lookups/handlers/xref.py b/stacker/lookups/handlers/xref.py index a318d252b..484171be5 100644 --- a/stacker/lookups/handlers/xref.py +++ b/stacker/lookups/handlers/xref.py @@ -10,9 +10,6 @@ conf_value: ${xref some-fully-qualified-stack-name::SomeOutputName} """ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from . import LookupHandler from .output import deconstruct diff --git a/stacker/lookups/registry.py b/stacker/lookups/registry.py index 7d0fab46d..aaaf7bffb 100644 --- a/stacker/lookups/registry.py +++ b/stacker/lookups/registry.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import logging import warnings diff --git a/stacker/plan.py b/stacker/plan.py index 24b415e04..2318b7cb3 100644 --- a/stacker/plan.py +++ b/stacker/plan.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import object import os import logging import time diff --git a/stacker/providers/aws/default.py b/stacker/providers/aws/default.py index c45f46119..4328e4880 100644 --- a/stacker/providers/aws/default.py +++ b/stacker/providers/aws/default.py @@ -1,10 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library -standard_library.install_aliases() -from builtins import range -from builtins import object import json import yaml import logging diff --git a/stacker/providers/base.py b/stacker/providers/base.py index c48291f13..1257fd863 100644 --- a/stacker/providers/base.py +++ b/stacker/providers/base.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import object def not_implemented(method): diff --git a/stacker/session_cache.py b/stacker/session_cache.py index 9442d4726..fa67d1da8 100644 --- a/stacker/session_cache.py +++ b/stacker/session_cache.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import boto3 import logging from .ui import ui diff --git a/stacker/stack.py b/stacker/stack.py index 08b24335e..ddbf51547 100644 --- a/stacker/stack.py +++ b/stacker/stack.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import object import copy from . import util diff --git a/stacker/status.py b/stacker/status.py index 395d575d4..42753a39d 100644 --- a/stacker/status.py +++ b/stacker/status.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import object import operator diff --git a/stacker/target.py b/stacker/target.py index b57b3e672..a422947bf 100644 --- a/stacker/target.py +++ b/stacker/target.py @@ -1,6 +1,3 @@ -from __future__ import division -from __future__ import absolute_import -from __future__ import print_function class Target(object): diff --git a/stacker/tests/actions/test_base.py b/stacker/tests/actions/test_base.py index 320740b1b..3223f605e 100644 --- a/stacker/tests/actions/test_base.py +++ b/stacker/tests/actions/test_base.py @@ -1,8 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library -standard_library.install_aliases() import unittest diff --git a/stacker/tests/actions/test_build.py b/stacker/tests/actions/test_build.py index 018101401..34deb8329 100644 --- a/stacker/tests/actions/test_build.py +++ b/stacker/tests/actions/test_build.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import str import unittest from collections import namedtuple diff --git a/stacker/tests/actions/test_destroy.py b/stacker/tests/actions/test_destroy.py index 697afd660..059358422 100644 --- a/stacker/tests/actions/test_destroy.py +++ b/stacker/tests/actions/test_destroy.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import object import unittest import mock diff --git a/stacker/tests/actions/test_diff.py b/stacker/tests/actions/test_diff.py index 10963a8bf..08277b638 100644 --- a/stacker/tests/actions/test_diff.py +++ b/stacker/tests/actions/test_diff.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest from operator import attrgetter diff --git a/stacker/tests/blueprints/test_base.py b/stacker/tests/blueprints/test_base.py index 52187aaa6..6863b7e56 100644 --- a/stacker/tests/blueprints/test_base.py +++ b/stacker/tests/blueprints/test_base.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest import sys from mock import patch diff --git a/stacker/tests/blueprints/test_raw.py b/stacker/tests/blueprints/test_raw.py index 4c18bb113..60b6714b2 100644 --- a/stacker/tests/blueprints/test_raw.py +++ b/stacker/tests/blueprints/test_raw.py @@ -1,7 +1,4 @@ """Test module for blueprint-from-raw-template module.""" -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import json import unittest diff --git a/stacker/tests/blueprints/test_testutil.py b/stacker/tests/blueprints/test_testutil.py index c741af659..da7be3fa8 100644 --- a/stacker/tests/blueprints/test_testutil.py +++ b/stacker/tests/blueprints/test_testutil.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest from troposphere import ecr diff --git a/stacker/tests/conftest.py b/stacker/tests/conftest.py index 6597ebc81..310688038 100644 --- a/stacker/tests/conftest.py +++ b/stacker/tests/conftest.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import, division, print_function import logging import os diff --git a/stacker/tests/factories.py b/stacker/tests/factories.py index f930c5177..2115a511a 100644 --- a/stacker/tests/factories.py +++ b/stacker/tests/factories.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import object from mock import MagicMock from stacker.context import Context diff --git a/stacker/tests/fixtures/mock_blueprints.py b/stacker/tests/fixtures/mock_blueprints.py index 519cde208..a26ad5a2f 100644 --- a/stacker/tests/fixtures/mock_blueprints.py +++ b/stacker/tests/fixtures/mock_blueprints.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import range from troposphere import GetAtt, Output, Sub, Ref from troposphere import iam diff --git a/stacker/tests/fixtures/mock_hooks.py b/stacker/tests/fixtures/mock_hooks.py index 67a5d3ecb..2f6c6b451 100644 --- a/stacker/tests/fixtures/mock_hooks.py +++ b/stacker/tests/fixtures/mock_hooks.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import def mock_hook(provider, context, **kwargs): diff --git a/stacker/tests/fixtures/mock_lookups.py b/stacker/tests/fixtures/mock_lookups.py index 36d3caf4a..07c471564 100644 --- a/stacker/tests/fixtures/mock_lookups.py +++ b/stacker/tests/fixtures/mock_lookups.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import TYPE_NAME = "mock" diff --git a/stacker/tests/hooks/test_aws_lambda.py b/stacker/tests/hooks/test_aws_lambda.py index 6c2bc948f..cf76d5187 100644 --- a/stacker/tests/hooks/test_aws_lambda.py +++ b/stacker/tests/hooks/test_aws_lambda.py @@ -1,9 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library -standard_library.install_aliases() -from builtins import range import os.path import os import mock diff --git a/stacker/tests/hooks/test_command.py b/stacker/tests/hooks/test_command.py index f0d8ef455..fdb470a4f 100644 --- a/stacker/tests/hooks/test_command.py +++ b/stacker/tests/hooks/test_command.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import os import unittest diff --git a/stacker/tests/hooks/test_ecs.py b/stacker/tests/hooks/test_ecs.py index 12998590f..5623401b1 100644 --- a/stacker/tests/hooks/test_ecs.py +++ b/stacker/tests/hooks/test_ecs.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest import boto3 diff --git a/stacker/tests/hooks/test_iam.py b/stacker/tests/hooks/test_iam.py index d194f4f06..197f7d6a9 100644 --- a/stacker/tests/hooks/test_iam.py +++ b/stacker/tests/hooks/test_iam.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest import boto3 diff --git a/stacker/tests/hooks/test_keypair.py b/stacker/tests/hooks/test_keypair.py index 7858d0748..0d9b4c8b6 100644 --- a/stacker/tests/hooks/test_keypair.py +++ b/stacker/tests/hooks/test_keypair.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import sys from collections import namedtuple from contextlib import contextmanager diff --git a/stacker/tests/lookups/handlers/test_ami.py b/stacker/tests/lookups/handlers/test_ami.py index 0e34b7b47..b3eb78634 100644 --- a/stacker/tests/lookups/handlers/test_ami.py +++ b/stacker/tests/lookups/handlers/test_ami.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest import mock from botocore.stub import Stubber diff --git a/stacker/tests/lookups/handlers/test_default.py b/stacker/tests/lookups/handlers/test_default.py index a59ccd6d8..e9ce36a11 100644 --- a/stacker/tests/lookups/handlers/test_default.py +++ b/stacker/tests/lookups/handlers/test_default.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from mock import MagicMock import unittest diff --git a/stacker/tests/lookups/handlers/test_dynamodb.py b/stacker/tests/lookups/handlers/test_dynamodb.py index 44b6cc693..17a84831b 100644 --- a/stacker/tests/lookups/handlers/test_dynamodb.py +++ b/stacker/tests/lookups/handlers/test_dynamodb.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest import mock from botocore.stub import Stubber diff --git a/stacker/tests/lookups/handlers/test_envvar.py b/stacker/tests/lookups/handlers/test_envvar.py index 71c9bf8a5..19cfc1b12 100644 --- a/stacker/tests/lookups/handlers/test_envvar.py +++ b/stacker/tests/lookups/handlers/test_envvar.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest from stacker.lookups.handlers.envvar import EnvvarLookup import os diff --git a/stacker/tests/lookups/handlers/test_file.py b/stacker/tests/lookups/handlers/test_file.py index 157aa122d..1fcb9ea5d 100644 --- a/stacker/tests/lookups/handlers/test_file.py +++ b/stacker/tests/lookups/handlers/test_file.py @@ -1,8 +1,5 @@ # encoding: utf-8 -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest import mock diff --git a/stacker/tests/lookups/handlers/test_hook_data.py b/stacker/tests/lookups/handlers/test_hook_data.py index 6dc0014d1..f24e740bd 100644 --- a/stacker/tests/lookups/handlers/test_hook_data.py +++ b/stacker/tests/lookups/handlers/test_hook_data.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest diff --git a/stacker/tests/lookups/handlers/test_output.py b/stacker/tests/lookups/handlers/test_output.py index 3891dfe25..46c1b00cf 100644 --- a/stacker/tests/lookups/handlers/test_output.py +++ b/stacker/tests/lookups/handlers/test_output.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from mock import MagicMock import unittest diff --git a/stacker/tests/lookups/handlers/test_rxref.py b/stacker/tests/lookups/handlers/test_rxref.py index b5e7cb828..18fcc8b50 100644 --- a/stacker/tests/lookups/handlers/test_rxref.py +++ b/stacker/tests/lookups/handlers/test_rxref.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from mock import MagicMock import unittest diff --git a/stacker/tests/lookups/handlers/test_split.py b/stacker/tests/lookups/handlers/test_split.py index 990799bb2..b9b534c87 100644 --- a/stacker/tests/lookups/handlers/test_split.py +++ b/stacker/tests/lookups/handlers/test_split.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest from stacker.lookups.handlers.split import SplitLookup diff --git a/stacker/tests/lookups/handlers/test_ssmstore.py b/stacker/tests/lookups/handlers/test_ssmstore.py index daff2444d..0a3903934 100644 --- a/stacker/tests/lookups/handlers/test_ssmstore.py +++ b/stacker/tests/lookups/handlers/test_ssmstore.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import str import unittest import mock from botocore.stub import Stubber diff --git a/stacker/tests/lookups/handlers/test_xref.py b/stacker/tests/lookups/handlers/test_xref.py index cb611ed65..7936b7897 100644 --- a/stacker/tests/lookups/handlers/test_xref.py +++ b/stacker/tests/lookups/handlers/test_xref.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from mock import MagicMock import unittest diff --git a/stacker/tests/lookups/test_registry.py b/stacker/tests/lookups/test_registry.py index 1dc0b41f1..2807cbf98 100644 --- a/stacker/tests/lookups/test_registry.py +++ b/stacker/tests/lookups/test_registry.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest from mock import MagicMock diff --git a/stacker/tests/providers/aws/test_default.py b/stacker/tests/providers/aws/test_default.py index 92e24b7b5..ea67a7270 100644 --- a/stacker/tests/providers/aws/test_default.py +++ b/stacker/tests/providers/aws/test_default.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import range import copy from datetime import datetime import os.path diff --git a/stacker/tests/test_config.py b/stacker/tests/test_config.py index 9795784a2..44b172be5 100644 --- a/stacker/tests/test_config.py +++ b/stacker/tests/test_config.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import next import sys import unittest import yaml diff --git a/stacker/tests/test_context.py b/stacker/tests/test_context.py index 0015aab39..c1436fea2 100644 --- a/stacker/tests/test_context.py +++ b/stacker/tests/test_context.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest from stacker.context import Context, get_fqn diff --git a/stacker/tests/test_dag.py b/stacker/tests/test_dag.py index ab83d39a4..94bcd584e 100644 --- a/stacker/tests/test_dag.py +++ b/stacker/tests/test_dag.py @@ -1,7 +1,4 @@ """ Tests on the DAG implementation """ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import threading import pytest diff --git a/stacker/tests/test_environment.py b/stacker/tests/test_environment.py index bed424333..05f4e291f 100644 --- a/stacker/tests/test_environment.py +++ b/stacker/tests/test_environment.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest from stacker.environment import ( diff --git a/stacker/tests/test_lookups.py b/stacker/tests/test_lookups.py index 34bd9a7d6..444f776c7 100644 --- a/stacker/tests/test_lookups.py +++ b/stacker/tests/test_lookups.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest from stacker.lookups import extract_lookups, extract_lookups_from_string diff --git a/stacker/tests/test_parse_user_data.py b/stacker/tests/test_parse_user_data.py index 6395bf9e4..3ab688461 100644 --- a/stacker/tests/test_parse_user_data.py +++ b/stacker/tests/test_parse_user_data.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest import yaml diff --git a/stacker/tests/test_plan.py b/stacker/tests/test_plan.py index dda72569b..422c217a3 100644 --- a/stacker/tests/test_plan.py +++ b/stacker/tests/test_plan.py @@ -1,7 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import range import os import shutil import tempfile diff --git a/stacker/tests/test_stack.py b/stacker/tests/test_stack.py index ccdab6622..c29c1bb36 100644 --- a/stacker/tests/test_stack.py +++ b/stacker/tests/test_stack.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import from mock import MagicMock import unittest diff --git a/stacker/tests/test_stacker.py b/stacker/tests/test_stacker.py index 237b9628f..6ce58d58b 100644 --- a/stacker/tests/test_stacker.py +++ b/stacker/tests/test_stacker.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest from stacker.commands import Stacker diff --git a/stacker/tests/test_util.py b/stacker/tests/test_util.py index 0163ed4c8..22c8836a3 100644 --- a/stacker/tests/test_util.py +++ b/stacker/tests/test_util.py @@ -1,8 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library -standard_library.install_aliases() import unittest diff --git a/stacker/tests/test_variables.py b/stacker/tests/test_variables.py index 2b1acbc55..e58a128a9 100644 --- a/stacker/tests/test_variables.py +++ b/stacker/tests/test_variables.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import unittest diff --git a/stacker/tokenize_userdata.py b/stacker/tokenize_userdata.py index 2d8cd1448..dcc68ce27 100644 --- a/stacker/tokenize_userdata.py +++ b/stacker/tokenize_userdata.py @@ -1,6 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import re from troposphere import Ref, GetAtt diff --git a/stacker/ui.py b/stacker/ui.py index a3ffec388..074b759dd 100644 --- a/stacker/ui.py +++ b/stacker/ui.py @@ -1,8 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import input -from builtins import object import threading import logging from getpass import getpass diff --git a/stacker/util.py b/stacker/util.py index dc9e1394d..e1fc53962 100644 --- a/stacker/util.py +++ b/stacker/util.py @@ -1,8 +1,3 @@ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from builtins import str -from builtins import object import copy import uuid import importlib diff --git a/stacker/variables.py b/stacker/variables.py index 5f49b2074..b54193a01 100644 --- a/stacker/variables.py +++ b/stacker/variables.py @@ -1,11 +1,7 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import division import re from past.builtins import basestring -from builtins import object from string import Template from .exceptions import InvalidLookupCombination, UnresolvedVariable, \ From 1284b3772d76234dc4ba6a0a636a8c4f0c177d46 Mon Sep 17 00:00:00 2001 From: Till Heistermann Date: Thu, 3 Feb 2022 23:49:52 +0100 Subject: [PATCH 68/74] Relax requirements that were pinned down because they no longer supported python 2 --- requirements.in | 15 +++++++++++++++ setup.py | 38 ++++++++++---------------------------- test-requirements.in | 6 ++++++ 3 files changed, 31 insertions(+), 28 deletions(-) create mode 100644 requirements.in create mode 100644 test-requirements.in diff --git a/requirements.in b/requirements.in new file mode 100644 index 000000000..25c1d330c --- /dev/null +++ b/requirements.in @@ -0,0 +1,15 @@ +troposphere>=1.9.0 +botocore>=1.12.111 +boto3>=1.9.111,<2.0 +PyYAML>=3.13b1 +awacs>=0.6.0 +gitpython>=3.0 +jinja2>=2.7 +schematics>=2.0.1,<2.1.0 +formic2 +python-dateutil>=2.0,<3.0 +MarkupSafe>=2 +more-itertools +rsa>=4.7 +python-jose +future diff --git a/setup.py b/setup.py index 546875195..9805a40d9 100644 --- a/setup.py +++ b/setup.py @@ -5,34 +5,15 @@ src_dir = os.path.dirname(__file__) -install_requires = [ - "future", - "troposphere>=1.9.0", - 'botocore>=1.12.111', # matching boto3 requirement - "boto3>=1.9.111,<2.0", - "PyYAML>=3.13b1", - "awacs>=0.6.0", - "gitpython>=2.0,<3.0", - "jinja2>=2.7,<3.0a", - "schematics>=2.0.1,<2.1.0", - "formic2", - "python-dateutil>=2.0,<3.0", - "MarkupSafe<2.0", # 2.0 dropped python 2.7, 3.5 support - temporary - "more-itertools<6.0.0", # 6.0.0 dropped python 2.7 support - temporary - "rsa==4.5", # 4.6 dropped python 2.7 support - temporary - "python-jose<3.2.0", # 3.2.0 dropped python 2.7 support - temporary -] +def get_install_requirements(path): + content = open(os.path.join(os.path.dirname(__file__), path)).read() + return [req for req in content.split("\n") if req != "" and not req.startswith("#")] + +install_requires = get_install_requirements("requirements.in") setup_requires = ['pytest-runner'] -tests_require = [ - "pytest~=4.3", - "pytest-cov~=2.6", - "mock~=2.0", - "moto[awslambda]~=1.3.16", - "testfixtures~=4.10.0", - "flake8-future-import", -] +tests_require = get_install_requirements("test-requirements.in") scripts = [ "scripts/compare_env", @@ -68,8 +49,9 @@ def read(filename): "Development Status :: 5 - Production/Stable", "Environment :: Console", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", ], ) diff --git a/test-requirements.in b/test-requirements.in new file mode 100644 index 000000000..91876602e --- /dev/null +++ b/test-requirements.in @@ -0,0 +1,6 @@ +pytest~=4.3 +pytest-cov~=2.6 +mock~=2.0 +moto[awslambda,ec2]~=3.0.0 +testfixtures~=4.10.0 +flake8 From 52f50de81761da799aedc925fe8bfebcc5e171f9 Mon Sep 17 00:00:00 2001 From: Till Heistermann Date: Thu, 3 Feb 2022 23:52:06 +0100 Subject: [PATCH 69/74] Migrate base blueptint to breaking changes in troposphere 3.0.0 --- Makefile | 4 ++-- requirements.in | 2 +- stacker/blueprints/base.py | 2 +- stacker/tests/blueprints/test_base.py | 8 ++++---- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Makefile b/Makefile index 2af72f238..7c2a504ba 100644 --- a/Makefile +++ b/Makefile @@ -4,8 +4,8 @@ build: docker build -t remind101/stacker . lint: - flake8 --require-code --min-version=2.7 --ignore FI50,FI51,FI53,FI14,E402,W503,W504,W605 --exclude stacker/tests/ stacker - flake8 --require-code --min-version=2.7 --ignore FI50,FI51,FI53,FI14,E402,N802,W605 stacker/tests # ignore setUp naming + flake8 --ignore FI50,FI51,FI53,FI14,E402,W503,W504,W605 --exclude stacker/tests/ stacker + flake8 --ignore FI50,FI51,FI53,FI14,E402,N802,W605 stacker/tests # ignore setUp naming test-unit: clean python setup.py test diff --git a/requirements.in b/requirements.in index 25c1d330c..02ee345f0 100644 --- a/requirements.in +++ b/requirements.in @@ -1,4 +1,4 @@ -troposphere>=1.9.0 +troposphere>=3.0.0 botocore>=1.12.111 boto3>=1.9.111,<2.0 PyYAML>=3.13b1 diff --git a/stacker/blueprints/base.py b/stacker/blueprints/base.py index fe26b303d..b6a456ac4 100644 --- a/stacker/blueprints/base.py +++ b/stacker/blueprints/base.py @@ -534,7 +534,7 @@ def set_template_description(self, description): template. """ - self.template.add_description(description) + self.template.set_description(description) def add_output(self, name, value): """Simple helper for adding outputs. diff --git a/stacker/tests/blueprints/test_base.py b/stacker/tests/blueprints/test_base.py index 6863b7e56..2eb39d1c7 100644 --- a/stacker/tests/blueprints/test_base.py +++ b/stacker/tests/blueprints/test_base.py @@ -70,8 +70,8 @@ class TestBlueprint(Blueprint): } def create_template(self): - self.template.add_version('2010-09-09') - self.template.add_description('TestBlueprint') + self.template.set_version('2010-09-09') + self.template.set_description('TestBlueprint') expected_json = """{ "AWSTemplateFormatVersion": "2010-09-09", @@ -105,8 +105,8 @@ class TestBlueprint(Blueprint): VARIABLES = {} def create_template(self): - self.template.add_version('2010-09-09') - self.template.add_description('TestBlueprint') + self.template.set_version('2010-09-09') + self.template.set_description('TestBlueprint') self.add_output(output_name, output_value) bp = TestBlueprint(name="test", context=mock_context()) From 72b59a05df6d2b8c69686e2d9082cb64c48c5409 Mon Sep 17 00:00:00 2001 From: Till Heistermann Date: Thu, 3 Feb 2022 23:52:32 +0100 Subject: [PATCH 70/74] Update README.rst about supported python versions --- README.rst | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/README.rst b/README.rst index ae3e2e97d..d07f6d40d 100644 --- a/README.rst +++ b/README.rst @@ -45,8 +45,7 @@ both in development, staging, and production without any major issues. Requirements ============ -* Python 2.7 -* Python 3.5+ +* Python 3.7+ Stacker Command =============== @@ -60,7 +59,7 @@ Here are some examples: ``destroy``: tears down your stacks - + ``diff``: compares your currently deployed stack templates to your config files @@ -76,26 +75,26 @@ Getting Started ``stacker_cookiecutter``: https://github.com/cloudtools/stacker_cookiecutter We recommend creating your base `stacker` project using ``stacker_cookiecutter``. - This tool will install all the needed dependencies and created the project + This tool will install all the needed dependencies and created the project directory structure and files. The resulting files are well documented with comments to explain their purpose and examples on how to extend. - + ``stacker_blueprints``: https://github.com/cloudtools/stacker_blueprints This repository holds working examples of ``stacker`` blueprints. - Each blueprint works in isolation and may be referenced, extended, or + Each blueprint works in isolation and may be referenced, extended, or copied into your project files. The blueprints are written in Python and use the troposphere_ library. - + ``stacker reference documentation``: - + We document all functionality and features of stacker in our extensive reference documentation located at readthedocs_. ``AWS OSS Blog``: https://aws.amazon.com/blogs/opensource/using-aws-codepipeline-and-open-source-tools-for-at-scale-infrastructure-deployment/ The AWS OSS Blog has a getting started guide using stacker with AWS CodePipeline. - + Docker ====== From 91c18957dde7697d8851b77e08da420b9004c74f Mon Sep 17 00:00:00 2001 From: Till Heistermann Date: Thu, 3 Feb 2022 23:53:36 +0100 Subject: [PATCH 71/74] Change CI Tests to test on offically supported python version range 3.7-3.10 --- .circleci/config.yml | 72 ++++++++++++++++++++++---------------------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b8915e1d7..1286b1d52 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -5,44 +5,44 @@ workflows: test-all: jobs: - lint - - unit-test-27: + - unit-test-37: requires: - lint - - functional-test-27: + - functional-test-37: requires: - - unit-test-27 - - unit-test-35: + - unit-test-37 + - unit-test-38: requires: - lint - - functional-test-35: + - functional-test-38: requires: - - unit-test-35 - - functional-test-27 - - unit-test-36: + - unit-test-38 + - functional-test-37 + - unit-test-39: requires: - lint - - functional-test-36: + - functional-test-39: requires: - - unit-test-36 - - functional-test-35 - - unit-test-37: + - unit-test-39 + - functional-test-38 + - unit-test-310: requires: - lint - - functional-test-37: + - functional-test-310: requires: - - unit-test-37 - - functional-test-36 + - unit-test-310 + - functional-test-39 - cleanup-functional-buckets: requires: - - functional-test-27 - - functional-test-35 - - functional-test-36 - functional-test-37 + - functional-test-38 + - functional-test-39 + - functional-test-310 jobs: lint: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.7 steps: - checkout - run: sudo pip install flake8 codecov pep8-naming flake8-future-import @@ -50,32 +50,32 @@ jobs: - run: flake8 --version - run: sudo make lint - unit-test-27: + unit-test-37: docker: - - image: circleci/python:2.7 + - image: circleci/python:3.7 steps: &unit_test_steps - checkout - run: sudo python setup.py install - run: sudo make test-unit - unit-test-35: + unit-test-38: docker: - - image: circleci/python:3.5 + - image: circleci/python:3.8 steps: *unit_test_steps - unit-test-36: + unit-test-39: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.9 steps: *unit_test_steps - unit-test-37: + unit-test-310: docker: - - image: circleci/python:3.7 + - image: circleci/python:3.10 steps: *unit_test_steps - functional-test-27: + functional-test-37: docker: - - image: circleci/python:2.7 + - image: circleci/python:3.7 steps: &functional_test_steps - checkout - run: @@ -94,24 +94,24 @@ jobs: export STACKER_ROLE=arn:aws:iam::459170252436:role/cloudtools-functional-tests-sta-FunctionalTestRole-1M9HFJ9VQVMFX sudo -E make test-functional - functional-test-35: + functional-test-38: docker: - - image: circleci/python:3.5 + - image: circleci/python:3.8 steps: *functional_test_steps - functional-test-36: + functional-test-39: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.9 steps: *functional_test_steps - functional-test-37: + functional-test-310: docker: - - image: circleci/python:3.7 + - image: circleci/python:3.10 steps: *functional_test_steps cleanup-functional-buckets: docker: - - image: circleci/python:2.7 + - image: circleci/python:3.7 steps: - checkout - run: From 15016d28122dd2a17448bb742495f30175995465 Mon Sep 17 00:00:00 2001 From: Till Heistermann Date: Fri, 4 Feb 2022 01:11:57 +0100 Subject: [PATCH 72/74] remove flake8-future-import plugin from lint checks on circlcie --- .circleci/config.yml | 2 +- Makefile | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 1286b1d52..2d28263c4 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -45,7 +45,7 @@ jobs: - image: circleci/python:3.7 steps: - checkout - - run: sudo pip install flake8 codecov pep8-naming flake8-future-import + - run: sudo pip install flake8 codecov pep8-naming - run: sudo python setup.py install - run: flake8 --version - run: sudo make lint diff --git a/Makefile b/Makefile index 7c2a504ba..01f48c3d5 100644 --- a/Makefile +++ b/Makefile @@ -4,8 +4,8 @@ build: docker build -t remind101/stacker . lint: - flake8 --ignore FI50,FI51,FI53,FI14,E402,W503,W504,W605 --exclude stacker/tests/ stacker - flake8 --ignore FI50,FI51,FI53,FI14,E402,N802,W605 stacker/tests # ignore setUp naming + flake8 --ignore E402,W503,W504,W605 --exclude stacker/tests/ stacker + flake8 --ignore E402,N802,W605 stacker/tests # ignore setUp naming test-unit: clean python setup.py test From f63798fcaf1b80855edc4f923e2e410c73687619 Mon Sep 17 00:00:00 2001 From: Till Heistermann Date: Fri, 4 Feb 2022 01:21:27 +0100 Subject: [PATCH 73/74] Ignore new N818 error code form pep8-naming linter --- Makefile | 4 ++-- test-requirements.in | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 01f48c3d5..5f3adebe2 100644 --- a/Makefile +++ b/Makefile @@ -4,8 +4,8 @@ build: docker build -t remind101/stacker . lint: - flake8 --ignore E402,W503,W504,W605 --exclude stacker/tests/ stacker - flake8 --ignore E402,N802,W605 stacker/tests # ignore setUp naming + flake8 --ignore E402,W503,W504,W605,N818 --exclude stacker/tests/ stacker + flake8 --ignore E402,N802,W605,N818 stacker/tests # ignore setUp naming test-unit: clean python setup.py test diff --git a/test-requirements.in b/test-requirements.in index 91876602e..d9c630a1b 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -4,3 +4,4 @@ mock~=2.0 moto[awslambda,ec2]~=3.0.0 testfixtures~=4.10.0 flake8 +pep8-naming \ No newline at end of file From 0025baaae8447886f8d0f2f703e6be4f84179e63 Mon Sep 17 00:00:00 2001 From: Till Heistermann Date: Fri, 4 Feb 2022 02:01:13 +0100 Subject: [PATCH 74/74] make tests pass on python 3.10 --- requirements.in | 2 +- stacker/commands/stacker/base.py | 2 +- stacker/context.py | 4 ++-- stacker/dag/__init__.py | 11 +++-------- stacker/hooks/utils.py | 4 ++-- stacker/lookups/handlers/file.py | 5 +---- test-requirements.in | 4 ++-- 7 files changed, 12 insertions(+), 20 deletions(-) diff --git a/requirements.in b/requirements.in index 02ee345f0..901c8136e 100644 --- a/requirements.in +++ b/requirements.in @@ -5,7 +5,7 @@ PyYAML>=3.13b1 awacs>=0.6.0 gitpython>=3.0 jinja2>=2.7 -schematics>=2.0.1,<2.1.0 +schematics>=2.1.0 formic2 python-dateutil>=2.0,<3.0 MarkupSafe>=2 diff --git a/stacker/commands/stacker/base.py b/stacker/commands/stacker/base.py index 52e06b4ee..3f3374b5c 100644 --- a/stacker/commands/stacker/base.py +++ b/stacker/commands/stacker/base.py @@ -1,7 +1,7 @@ import argparse import threading import signal -from collections import Mapping +from collections.abc import Mapping import logging import os.path diff --git a/stacker/context.py b/stacker/context.py index c441be1c8..391e1fa85 100644 --- a/stacker/context.py +++ b/stacker/context.py @@ -1,4 +1,4 @@ -import collections +import collections.abc import logging from stacker.config import Config @@ -189,7 +189,7 @@ def set_hook_data(self, key, data): as returned from a hook. """ - if not isinstance(data, collections.Mapping): + if not isinstance(data, collections.abc.Mapping): raise ValueError("Hook (key: %s) data must be an instance of " "collections.Mapping (a dictionary for " "example)." % key) diff --git a/stacker/dag/__init__.py b/stacker/dag/__init__.py index 47415ebfa..ed51a2a0b 100644 --- a/stacker/dag/__init__.py +++ b/stacker/dag/__init__.py @@ -1,16 +1,11 @@ -import collections import logging from threading import Thread from copy import copy, deepcopy -from collections import deque +import collections.abc +from collections import deque, OrderedDict logger = logging.getLogger(__name__) -try: - from collections import OrderedDict -except ImportError: - from ordereddict import OrderedDict - class DAGValidationError(Exception): pass @@ -310,7 +305,7 @@ def from_dict(self, graph_dict): for new_node in graph_dict: self.add_node(new_node) for ind_node, dep_nodes in graph_dict.items(): - if not isinstance(dep_nodes, collections.Iterable): + if not isinstance(dep_nodes, collections.abc.Iterable): raise TypeError('%s: dict values must be lists' % ind_node) for dep_node in dep_nodes: self.add_edge(ind_node, dep_node) diff --git a/stacker/hooks/utils.py b/stacker/hooks/utils.py index bb8d9924f..52331ea96 100644 --- a/stacker/hooks/utils.py +++ b/stacker/hooks/utils.py @@ -1,6 +1,6 @@ import os import sys -import collections +import collections.abc import logging from stacker.util import load_object_from_string @@ -70,7 +70,7 @@ def handle_hooks(stage, hooks, provider, context): logger.warning("Non-required hook %s failed. Return value: %s", hook.path, result) else: - if isinstance(result, collections.Mapping): + if isinstance(result, collections.abc.Mapping): if data_key: logger.debug("Adding result for hook %s to context in " "data_key %s.", hook.path, data_key) diff --git a/stacker/lookups/handlers/file.py b/stacker/lookups/handlers/file.py index 9e7f0d2a0..46c0a3d13 100644 --- a/stacker/lookups/handlers/file.py +++ b/stacker/lookups/handlers/file.py @@ -2,10 +2,7 @@ import base64 import json import re -try: - from collections.abc import Mapping, Sequence -except ImportError: - from collections import Mapping, Sequence +from collections.abc import Mapping, Sequence import yaml diff --git a/test-requirements.in b/test-requirements.in index d9c630a1b..c30b54636 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -1,7 +1,7 @@ -pytest~=4.3 +pytest~=6.0 pytest-cov~=2.6 mock~=2.0 moto[awslambda,ec2]~=3.0.0 -testfixtures~=4.10.0 +testfixtures~=6.18.3 flake8 pep8-naming \ No newline at end of file