Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding lambda image uri support #168

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion lambda_uploader/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
u'alias': None, u'alias_description': None,
u'ignore': [], u'extra_files': [], u'vpc': None,
u's3_bucket': None, u's3_key': None, u'runtime': 'python2.7',
u'variables': {}, u'subscription': {}, u'tracing': {}}
u'variables': {}, u'subscription': {}, u'tracing': {}, u'image_uri': None}


class Config(object):
Expand Down Expand Up @@ -79,6 +79,12 @@ def alias_description(self):
else:
return self._config['alias_description']

'''
Public method to set image uri
'''
def set_image_uri(self, image_uri):
self._config['image_uri'] = image_uri

'''
Public method to set the S3 bucket and keyname
'''
Expand Down
42 changes: 26 additions & 16 deletions lambda_uploader/shell.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,9 @@ def _execute(args):
cfg = config.Config(pth, args.config, role=args.role,
variables=args.variables)

if args.image_uri:
cfg.image_uri(args.image_uri)

if args.s3_bucket:
cfg.set_s3(args.s3_bucket, args.s3_key)

Expand All @@ -70,22 +73,25 @@ def _execute(args):
# build and include virtualenv, the default
venv = None

if args.no_build:
pkg = package.create_package(pth)
if not cfg.image_uri:
if args.no_build:
pkg = package.create_package(pth)
else:
_print('Building Package')
requirements = cfg.requirements
if args.requirements:
requirements = path.abspath(args.requirements)
extra_files = cfg.extra_files
if args.extra_files:
extra_files = args.extra_files
pkg = package.build_package(pth, requirements,
venv, cfg.ignore, extra_files,
pyexec=cfg.runtime)

if not args.no_clean:
pkg.clean_workspace()
else:
_print('Building Package')
requirements = cfg.requirements
if args.requirements:
requirements = path.abspath(args.requirements)
extra_files = cfg.extra_files
if args.extra_files:
extra_files = args.extra_files
pkg = package.build_package(pth, requirements,
venv, cfg.ignore, extra_files,
pyexec=cfg.runtime)

if not args.no_clean:
pkg.clean_workspace()
pkg = None

if not args.no_upload:
# Set publish if flagged to do so
Expand All @@ -109,7 +115,8 @@ def _execute(args):
_print('Creating subscription')
subscribers.create_subscriptions(cfg, args.profile)

pkg.clean_zipfile()
if pkg:
pkg.clean_zipfile()

_print('Fin')

Expand Down Expand Up @@ -164,6 +171,9 @@ def main(arv=None):
default=None, help=alias_help)
parser.add_argument('--alias-description', '-m', dest='alias_description',
default=None, help='alias description')
parser.add_argument('--image-uri', '-i', dest='image_uri',
help='uri of a container image in the amazon ecr registry to deploy',
default=None)
parser.add_argument('--s3-bucket', '-s', dest='s3_bucket',
help='S3 bucket to store the lambda function in',
default=None)
Expand Down
200 changes: 126 additions & 74 deletions lambda_uploader/uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import backoff
import boto3
import logging
from botocore.exceptions import ClientError

from os import path

Expand All @@ -23,6 +25,7 @@

class PackageUploader(object):
'''TODO: Should we decouple the config from the Object Init'''

def __init__(self, config, profile_name):
self._config = config
self._vpc_config = self._format_vpc_config()
Expand All @@ -34,29 +37,36 @@ def __init__(self, config, profile_name):
'''
Calls the AWS methods to upload an existing package and update
the function configuration

returns the package version
'''

def upload_existing(self, pkg):
environment = {'Variables': self._config.variables}
self._validate_package_size(pkg.zip_file)
with open(pkg.zip_file, "rb") as fil:
zip_file = fil.read()

LOG.debug('running update_function_code')
conf_update_resp = None
if self._config.s3_bucket:
self._upload_s3(pkg.zip_file)
conf_update_resp = self._lambda_client.update_function_code(
FunctionName=self._config.name,
S3Bucket=self._config.s3_bucket,
S3Key=self._config.s3_package_name(),
Publish=False,
)
if pkg:
self._validate_package_size(pkg.zip_file)
with open(pkg.zip_file, "rb") as fil:
zip_file = fil.read()

LOG.debug('running update_function_code')
conf_update_resp = None
if self._config.s3_bucket:
self._upload_s3(pkg.zip_file)
conf_update_resp = self._lambda_client.update_function_code(
FunctionName=self._config.name,
S3Bucket=self._config.s3_bucket,
S3Key=self._config.s3_package_name(),
Publish=False,
)
else:
conf_update_resp = self._lambda_client.update_function_code(
FunctionName=self._config.name,
ZipFile=zip_file,
Publish=False,
)
else:
conf_update_resp = self._lambda_client.update_function_code(
FunctionName=self._config.name,
ZipFile=zip_file,
ImageUri=self._config.image_uri,
Publish=False,
)
LOG.debug("AWS update_function_code response: %s"
Expand All @@ -66,71 +76,108 @@ def upload_existing(self, pkg):
LOG.debug("Waiting for lambda function to be updated")
waiter.wait(FunctionName=self._config.name)

LOG.debug('running update_function_configuration')
response = self._lambda_client.update_function_configuration(
FunctionName=self._config.name,
Handler=self._config.handler,
Role=self._config.role,
Description=self._config.description,
Timeout=self._config.timeout,
MemorySize=self._config.memory,
VpcConfig=self._vpc_config,
Environment=environment,
TracingConfig=self._config.tracing,
Runtime=self._config.runtime,
)
LOG.debug("AWS update_function_configuration response: %s"
% response)
@backoff.on_exception(backoff.expo, ClientError)
def update_config():
LOG.debug('running update_function_configuration')
if pkg:
response = self._lambda_client.update_function_configuration(
FunctionName=self._config.name,
Handler=self._config.handler,
Role=self._config.role,
Description=self._config.description,
Timeout=self._config.timeout,
MemorySize=self._config.memory,
VpcConfig=self._vpc_config,
Environment=environment,
TracingConfig=self._config.tracing,
Runtime=self._config.runtime,
)
else:
response = self._lambda_client.update_function_configuration(
FunctionName=self._config.name,
Role=self._config.role,
Description=self._config.description,
Timeout=self._config.timeout,
MemorySize=self._config.memory,
VpcConfig=self._vpc_config,
Environment=environment,
TracingConfig=self._config.tracing
)
LOG.debug("AWS update_function_configuration response: %s"
% response)
return response

version = response.get('Version')
# Publish the version after upload and config update if needed
if self._config.publish:
version = update_config().get('Version')

waiter = self._lambda_client.get_waiter('function_updated')
@backoff.on_exception(backoff.expo, ClientError)
def publish():
# Publish the version after upload and config update if needed
waiter = self._lambda_client.get_waiter(
'function_updated')
LOG.debug("Waiting for lambda function to be updated")
waiter.wait(FunctionName=self._config.name)

resp = self._lambda_client.publish_version(
FunctionName=self._config.name,
)
FunctionName=self._config.name,
)
LOG.debug("AWS publish_version response: %s" % resp)
version = resp.get('Version')
return resp.get('Version')

if self._config.publish:
version = publish()

return version

'''
Creates and uploads a new lambda function

returns the package version
'''

def upload_new(self, pkg):
environment = {'Variables': self._config.variables}
code = {}
if self._config.s3_bucket:
code = {'S3Bucket': self._config.s3_bucket,
'S3Key': self._config.s3_package_name()}
self._upload_s3(pkg.zip_file)
if pkg:
if self._config.s3_bucket:
code = {'S3Bucket': self._config.s3_bucket,
'S3Key': self._config.s3_package_name()}
self._upload_s3(pkg.zip_file)
else:
self._validate_package_size(pkg.zip_file)
with open(pkg.zip_file, "rb") as fil:
zip_file = fil.read()
code = {'ZipFile': zip_file}
else:
self._validate_package_size(pkg.zip_file)
with open(pkg.zip_file, "rb") as fil:
zip_file = fil.read()
code = {'ZipFile': zip_file}

code = {'ImageUri': self._config.image_uri}
LOG.debug('running create_function_code')
response = self._lambda_client.create_function(
FunctionName=self._config.name,
Runtime=self._config.runtime,
Handler=self._config.handler,
Role=self._config.role,
Code=code,
Description=self._config.description,
Timeout=self._config.timeout,
MemorySize=self._config.memory,
Publish=self._config.publish,
VpcConfig=self._vpc_config,
Environment=environment,
TracingConfig=self._config.tracing,
)
if pkg:
response = self._lambda_client.create_function(
FunctionName=self._config.name,
Runtime=self._config.runtime,
Handler=self._config.handler,
Role=self._config.role,
Code=code,
Description=self._config.description,
Timeout=self._config.timeout,
MemorySize=self._config.memory,
Publish=self._config.publish,
VpcConfig=self._vpc_config,
Environment=environment,
TracingConfig=self._config.tracing,
)
else:
response = self._lambda_client.create_function(
FunctionName=self._config.name,
Role=self._config.role,
Code=code,
Description=self._config.description,
Timeout=self._config.timeout,
MemorySize=self._config.memory,
Publish=self._config.publish,
VpcConfig=self._vpc_config,
Environment=environment,
TracingConfig=self._config.tracing,
PackageType='Image'
)
LOG.debug("AWS create_function response: %s" % response)

return response.get('Version')
Expand All @@ -139,11 +186,12 @@ def upload_new(self, pkg):
Auto determines whether the function exists or not and calls
the appropriate method (upload_existing or upload_new).
'''

def upload(self, pkg):
existing_function = True
try:
get_resp = self._lambda_client.get_function_configuration(
FunctionName=self._config.name)
FunctionName=self._config.name)
LOG.debug("AWS get_function_configuration response: %s" % get_resp)
except: # noqa: E722
existing_function = False
Expand All @@ -158,6 +206,7 @@ def upload(self, pkg):
Create/update an alias to point to the package. Raises an
exception if the package has not been uploaded.
'''

def alias(self):
# if self.version is still None raise exception
if self.version is None:
Expand All @@ -172,35 +221,38 @@ def alias(self):
Pulls down the current list of aliases and checks to see if
an alias exists.
'''

def _alias_exists(self):
resp = self._lambda_client.list_aliases(
FunctionName=self._config.name)
FunctionName=self._config.name)

for alias in resp.get('Aliases'):
if alias.get('Name') == self._config.alias:
return True
return False

'''Creates alias'''

def _create_alias(self):
LOG.debug("Creating new alias %s" % self._config.alias)
resp = self._lambda_client.create_alias(
FunctionName=self._config.name,
Name=self._config.alias,
FunctionVersion=self.version,
Description=self._config.alias_description,
)
FunctionName=self._config.name,
Name=self._config.alias,
FunctionVersion=self.version,
Description=self._config.alias_description,
)
LOG.debug("AWS create_alias response: %s" % resp)

'''Update alias'''

def _update_alias(self):
LOG.debug("Updating alias %s" % self._config.alias)
resp = self._lambda_client.update_alias(
FunctionName=self._config.name,
Name=self._config.alias,
FunctionVersion=self.version,
Description=self._config.alias_description,
)
FunctionName=self._config.name,
Name=self._config.alias,
FunctionVersion=self.version,
Description=self._config.alias_description,
)
LOG.debug("AWS update_alias response: %s" % resp)

def _validate_package_size(self, pkg):
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
boto3==1.4.0
virtualenv
backoff
Loading