diff --git a/lambda_uploader/config.py b/lambda_uploader/config.py index c4ef7b4..1572290 100644 --- a/lambda_uploader/config.py +++ b/lambda_uploader/config.py @@ -35,7 +35,7 @@ u'alias': None, u'alias_description': None, u'ignore': [], u'extra_files': [], u'vpc': None, u's3_bucket': None, u's3_key': None, u'runtime': 'python2.7', - u'variables': {}, u'subscription': {}, u'tracing': {}} + u'variables': {}, u'subscription': {}, u'tracing': {}, u'image_uri': None} class Config(object): @@ -79,6 +79,12 @@ def alias_description(self): else: return self._config['alias_description'] + ''' + Public method to set image uri + ''' + def set_image_uri(self, image_uri): + self._config['image_uri'] = image_uri + ''' Public method to set the S3 bucket and keyname ''' diff --git a/lambda_uploader/shell.py b/lambda_uploader/shell.py index 14de098..8a751ac 100644 --- a/lambda_uploader/shell.py +++ b/lambda_uploader/shell.py @@ -57,6 +57,9 @@ def _execute(args): cfg = config.Config(pth, args.config, role=args.role, variables=args.variables) + if args.image_uri: + cfg.image_uri(args.image_uri) + if args.s3_bucket: cfg.set_s3(args.s3_bucket, args.s3_key) @@ -70,22 +73,25 @@ def _execute(args): # build and include virtualenv, the default venv = None - if args.no_build: - pkg = package.create_package(pth) + if not cfg.image_uri: + if args.no_build: + pkg = package.create_package(pth) + else: + _print('Building Package') + requirements = cfg.requirements + if args.requirements: + requirements = path.abspath(args.requirements) + extra_files = cfg.extra_files + if args.extra_files: + extra_files = args.extra_files + pkg = package.build_package(pth, requirements, + venv, cfg.ignore, extra_files, + pyexec=cfg.runtime) + + if not args.no_clean: + pkg.clean_workspace() else: - _print('Building Package') - requirements = cfg.requirements - if args.requirements: - requirements = path.abspath(args.requirements) - extra_files = cfg.extra_files - if args.extra_files: - extra_files = args.extra_files - pkg = package.build_package(pth, requirements, - venv, cfg.ignore, extra_files, - pyexec=cfg.runtime) - - if not args.no_clean: - pkg.clean_workspace() + pkg = None if not args.no_upload: # Set publish if flagged to do so @@ -109,7 +115,8 @@ def _execute(args): _print('Creating subscription') subscribers.create_subscriptions(cfg, args.profile) - pkg.clean_zipfile() + if pkg: + pkg.clean_zipfile() _print('Fin') @@ -164,6 +171,9 @@ def main(arv=None): default=None, help=alias_help) parser.add_argument('--alias-description', '-m', dest='alias_description', default=None, help='alias description') + parser.add_argument('--image-uri', '-i', dest='image_uri', + help='uri of a container image in the amazon ecr registry to deploy', + default=None) parser.add_argument('--s3-bucket', '-s', dest='s3_bucket', help='S3 bucket to store the lambda function in', default=None) diff --git a/lambda_uploader/uploader.py b/lambda_uploader/uploader.py index ba8335b..1fc4050 100644 --- a/lambda_uploader/uploader.py +++ b/lambda_uploader/uploader.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import backoff import boto3 import logging +from botocore.exceptions import ClientError from os import path @@ -23,6 +25,7 @@ class PackageUploader(object): '''TODO: Should we decouple the config from the Object Init''' + def __init__(self, config, profile_name): self._config = config self._vpc_config = self._format_vpc_config() @@ -34,29 +37,36 @@ def __init__(self, config, profile_name): ''' Calls the AWS methods to upload an existing package and update the function configuration - returns the package version ''' + def upload_existing(self, pkg): environment = {'Variables': self._config.variables} - self._validate_package_size(pkg.zip_file) - with open(pkg.zip_file, "rb") as fil: - zip_file = fil.read() - - LOG.debug('running update_function_code') - conf_update_resp = None - if self._config.s3_bucket: - self._upload_s3(pkg.zip_file) - conf_update_resp = self._lambda_client.update_function_code( - FunctionName=self._config.name, - S3Bucket=self._config.s3_bucket, - S3Key=self._config.s3_package_name(), - Publish=False, - ) + if pkg: + self._validate_package_size(pkg.zip_file) + with open(pkg.zip_file, "rb") as fil: + zip_file = fil.read() + + LOG.debug('running update_function_code') + conf_update_resp = None + if self._config.s3_bucket: + self._upload_s3(pkg.zip_file) + conf_update_resp = self._lambda_client.update_function_code( + FunctionName=self._config.name, + S3Bucket=self._config.s3_bucket, + S3Key=self._config.s3_package_name(), + Publish=False, + ) + else: + conf_update_resp = self._lambda_client.update_function_code( + FunctionName=self._config.name, + ZipFile=zip_file, + Publish=False, + ) else: conf_update_resp = self._lambda_client.update_function_code( FunctionName=self._config.name, - ZipFile=zip_file, + ImageUri=self._config.image_uri, Publish=False, ) LOG.debug("AWS update_function_code response: %s" @@ -66,71 +76,108 @@ def upload_existing(self, pkg): LOG.debug("Waiting for lambda function to be updated") waiter.wait(FunctionName=self._config.name) - LOG.debug('running update_function_configuration') - response = self._lambda_client.update_function_configuration( - FunctionName=self._config.name, - Handler=self._config.handler, - Role=self._config.role, - Description=self._config.description, - Timeout=self._config.timeout, - MemorySize=self._config.memory, - VpcConfig=self._vpc_config, - Environment=environment, - TracingConfig=self._config.tracing, - Runtime=self._config.runtime, - ) - LOG.debug("AWS update_function_configuration response: %s" - % response) + @backoff.on_exception(backoff.expo, ClientError) + def update_config(): + LOG.debug('running update_function_configuration') + if pkg: + response = self._lambda_client.update_function_configuration( + FunctionName=self._config.name, + Handler=self._config.handler, + Role=self._config.role, + Description=self._config.description, + Timeout=self._config.timeout, + MemorySize=self._config.memory, + VpcConfig=self._vpc_config, + Environment=environment, + TracingConfig=self._config.tracing, + Runtime=self._config.runtime, + ) + else: + response = self._lambda_client.update_function_configuration( + FunctionName=self._config.name, + Role=self._config.role, + Description=self._config.description, + Timeout=self._config.timeout, + MemorySize=self._config.memory, + VpcConfig=self._vpc_config, + Environment=environment, + TracingConfig=self._config.tracing + ) + LOG.debug("AWS update_function_configuration response: %s" + % response) + return response - version = response.get('Version') - # Publish the version after upload and config update if needed - if self._config.publish: + version = update_config().get('Version') - waiter = self._lambda_client.get_waiter('function_updated') + @backoff.on_exception(backoff.expo, ClientError) + def publish(): + # Publish the version after upload and config update if needed + waiter = self._lambda_client.get_waiter( + 'function_updated') LOG.debug("Waiting for lambda function to be updated") waiter.wait(FunctionName=self._config.name) resp = self._lambda_client.publish_version( - FunctionName=self._config.name, - ) + FunctionName=self._config.name, + ) LOG.debug("AWS publish_version response: %s" % resp) - version = resp.get('Version') + return resp.get('Version') + + if self._config.publish: + version = publish() return version ''' Creates and uploads a new lambda function - returns the package version ''' + def upload_new(self, pkg): environment = {'Variables': self._config.variables} code = {} - if self._config.s3_bucket: - code = {'S3Bucket': self._config.s3_bucket, - 'S3Key': self._config.s3_package_name()} - self._upload_s3(pkg.zip_file) + if pkg: + if self._config.s3_bucket: + code = {'S3Bucket': self._config.s3_bucket, + 'S3Key': self._config.s3_package_name()} + self._upload_s3(pkg.zip_file) + else: + self._validate_package_size(pkg.zip_file) + with open(pkg.zip_file, "rb") as fil: + zip_file = fil.read() + code = {'ZipFile': zip_file} else: - self._validate_package_size(pkg.zip_file) - with open(pkg.zip_file, "rb") as fil: - zip_file = fil.read() - code = {'ZipFile': zip_file} - + code = {'ImageUri': self._config.image_uri} LOG.debug('running create_function_code') - response = self._lambda_client.create_function( - FunctionName=self._config.name, - Runtime=self._config.runtime, - Handler=self._config.handler, - Role=self._config.role, - Code=code, - Description=self._config.description, - Timeout=self._config.timeout, - MemorySize=self._config.memory, - Publish=self._config.publish, - VpcConfig=self._vpc_config, - Environment=environment, - TracingConfig=self._config.tracing, - ) + if pkg: + response = self._lambda_client.create_function( + FunctionName=self._config.name, + Runtime=self._config.runtime, + Handler=self._config.handler, + Role=self._config.role, + Code=code, + Description=self._config.description, + Timeout=self._config.timeout, + MemorySize=self._config.memory, + Publish=self._config.publish, + VpcConfig=self._vpc_config, + Environment=environment, + TracingConfig=self._config.tracing, + ) + else: + response = self._lambda_client.create_function( + FunctionName=self._config.name, + Role=self._config.role, + Code=code, + Description=self._config.description, + Timeout=self._config.timeout, + MemorySize=self._config.memory, + Publish=self._config.publish, + VpcConfig=self._vpc_config, + Environment=environment, + TracingConfig=self._config.tracing, + PackageType='Image' + ) LOG.debug("AWS create_function response: %s" % response) return response.get('Version') @@ -139,11 +186,12 @@ def upload_new(self, pkg): Auto determines whether the function exists or not and calls the appropriate method (upload_existing or upload_new). ''' + def upload(self, pkg): existing_function = True try: get_resp = self._lambda_client.get_function_configuration( - FunctionName=self._config.name) + FunctionName=self._config.name) LOG.debug("AWS get_function_configuration response: %s" % get_resp) except: # noqa: E722 existing_function = False @@ -158,6 +206,7 @@ def upload(self, pkg): Create/update an alias to point to the package. Raises an exception if the package has not been uploaded. ''' + def alias(self): # if self.version is still None raise exception if self.version is None: @@ -172,9 +221,10 @@ def alias(self): Pulls down the current list of aliases and checks to see if an alias exists. ''' + def _alias_exists(self): resp = self._lambda_client.list_aliases( - FunctionName=self._config.name) + FunctionName=self._config.name) for alias in resp.get('Aliases'): if alias.get('Name') == self._config.alias: @@ -182,25 +232,27 @@ def _alias_exists(self): return False '''Creates alias''' + def _create_alias(self): LOG.debug("Creating new alias %s" % self._config.alias) resp = self._lambda_client.create_alias( - FunctionName=self._config.name, - Name=self._config.alias, - FunctionVersion=self.version, - Description=self._config.alias_description, - ) + FunctionName=self._config.name, + Name=self._config.alias, + FunctionVersion=self.version, + Description=self._config.alias_description, + ) LOG.debug("AWS create_alias response: %s" % resp) '''Update alias''' + def _update_alias(self): LOG.debug("Updating alias %s" % self._config.alias) resp = self._lambda_client.update_alias( - FunctionName=self._config.name, - Name=self._config.alias, - FunctionVersion=self.version, - Description=self._config.alias_description, - ) + FunctionName=self._config.name, + Name=self._config.alias, + FunctionVersion=self.version, + Description=self._config.alias_description, + ) LOG.debug("AWS update_alias response: %s" % resp) def _validate_package_size(self, pkg): diff --git a/requirements.txt b/requirements.txt index 9e55e26..083ed6b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ boto3==1.4.0 virtualenv +backoff \ No newline at end of file diff --git a/setup.py b/setup.py index 80b408c..a45ec4a 100644 --- a/setup.py +++ b/setup.py @@ -8,6 +8,7 @@ 'boto3>=1.4.2', 'botocore>=1.4.85', 'virtualenv', + 'backoff' ] STYLE_REQUIRES = [