2024-01-16 10:41:11 -05:00
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
2019-01-24 07:52:55 +00:00
# Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# This file is licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
2024-01-16 10:41:11 -05:00
2019-01-31 00:16:29 +00:00
# snippet-sourcedescription:[MyCodePipelineFunction.py demonstrates how to use an AWS Lambda function to create or update a stack based on a supplied AWS CloudFormation template and Lambda execution role.]
2019-01-24 07:52:55 +00:00
# snippet-service:[codepipeline]
2023-10-18 10:35:05 -07:00
# snippet-keyword:[Python]
2019-08-27 12:07:08 -07:00
# snippet-sourcesyntax:[python]
2023-10-18 10:35:05 -07:00
# snippet-sourcesyntax:[python]
2019-01-24 07:52:55 +00:00
# snippet-keyword:[AWS CodePipeline]
# snippet-keyword:[Code Sample]
# snippet-keyword:[Invoke]
# snippet-sourcetype:[full-example]
# snippet-sourceauthor:[AWS]
# snippet-sourcedate:[2016-03-07]
# snippet-start:[codepipeline.python.MyCodePipelineFunction.complete]
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
from __future__ import print_function
from boto3 . session import Session
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
import json
import urllib
import boto3
import zipfile
import tempfile
import botocore
import traceback
2024-01-16 10:41:11 -05:00
2023-10-18 10:35:05 -07:00
print ( " Loading function " )
2024-01-16 10:41:11 -05:00
2023-10-18 10:35:05 -07:00
cf = boto3 . client ( " cloudformation " )
code_pipeline = boto3 . client ( " codepipeline " )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def find_artifact ( artifacts , name ) :
""" Finds the artifact ' name ' among the ' artifacts '
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
artifacts: The list of artifacts available to the function
name: The artifact we wish to use
Returns:
The artifact dictionary found
Raises:
Exception: If no matching artifact is found
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
for artifact in artifacts :
2023-10-18 10:35:05 -07:00
if artifact [ " name " ] == name :
2019-01-24 07:52:55 +00:00
return artifact
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
raise Exception ( ' Input artifact named " {0} " not found in event ' . format ( name ) )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def get_template ( s3 , artifact , file_in_zip ) :
""" Gets the template artifact
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Downloads the artifact from the S3 artifact store to a temporary file
then extracts the zip and returns the file containing the CloudFormation
template.
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
artifact: The artifact to download
file_in_zip: The path to the file within the zip containing the template
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Returns:
The CloudFormation template as a string
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Raises:
Exception: Any exception thrown while downloading the artifact or unzipping it
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
tmp_file = tempfile . NamedTemporaryFile ( )
2023-10-18 10:35:05 -07:00
bucket = artifact [ " location " ] [ " s3Location " ] [ " bucketName " ]
key = artifact [ " location " ] [ " s3Location " ] [ " objectKey " ]
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
with tempfile . NamedTemporaryFile ( ) as tmp_file :
s3 . download_file ( bucket , key , tmp_file . name )
2023-10-18 10:35:05 -07:00
with zipfile . ZipFile ( tmp_file . name , " r " ) as zip :
return zip . read ( file_in_zip )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def update_stack ( stack , template ) :
""" Start a CloudFormation stack update
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
stack: The stack to update
template: The template to apply
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Returns:
True if an update was started, false if there were no changes
to the template since the last update.
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Raises:
Exception: Any exception besides " No updates are to be performed. "
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
try :
cf . update_stack ( StackName = stack , TemplateBody = template )
return True
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
except botocore . exceptions . ClientError as e :
2023-10-18 10:35:05 -07:00
if e . response [ " Error " ] [ " Message " ] == " No updates are to be performed. " :
2019-01-24 07:52:55 +00:00
return False
else :
2023-10-18 10:35:05 -07:00
raise Exception (
' Error updating CloudFormation stack " {0} " ' . format ( stack ) , e
)
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def stack_exists ( stack ) :
""" Check if a stack exists or not
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
stack: The stack to check
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Returns:
True or False depending on whether the stack exists
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Raises:
Any exceptions raised .describe_stacks() besides that
the stack doesn ' t exist.
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
try :
cf . describe_stacks ( StackName = stack )
return True
except botocore . exceptions . ClientError as e :
2023-10-18 10:35:05 -07:00
if " does not exist " in e . response [ " Error " ] [ " Message " ] :
2019-01-24 07:52:55 +00:00
return False
else :
raise e
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def create_stack ( stack , template ) :
""" Starts a new CloudFormation stack creation
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
stack: The stack to be created
template: The template for the stack to be created with
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Throws:
Exception: Any exception thrown by .create_stack()
"""
cf . create_stack ( StackName = stack , TemplateBody = template )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def get_stack_status ( stack ) :
""" Get the status of an existing CloudFormation stack
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
stack: The name of the stack to check
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Returns:
The CloudFormation status string of the stack such as CREATE_COMPLETE
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Raises:
Exception: Any exception thrown by .describe_stacks()
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
stack_description = cf . describe_stacks ( StackName = stack )
2023-10-18 10:35:05 -07:00
return stack_description [ " Stacks " ] [ 0 ] [ " StackStatus " ]
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def put_job_success ( job , message ) :
""" Notify CodePipeline of a successful job
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
job: The CodePipeline job ID
message: A message to be logged relating to the job status
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Raises:
Exception: Any exception thrown by .put_job_success_result()
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
2023-10-18 10:35:05 -07:00
print ( " Putting job success " )
2019-01-24 07:52:55 +00:00
print ( message )
code_pipeline . put_job_success_result ( jobId = job )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def put_job_failure ( job , message ) :
""" Notify CodePipeline of a failed job
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
job: The CodePipeline job ID
message: A message to be logged relating to the job status
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Raises:
Exception: Any exception thrown by .put_job_failure_result()
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
2023-10-18 10:35:05 -07:00
print ( " Putting job failure " )
2019-01-24 07:52:55 +00:00
print ( message )
2023-10-18 10:35:05 -07:00
code_pipeline . put_job_failure_result (
jobId = job , failureDetails = { " message " : message , " type " : " JobFailed " }
)
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def continue_job_later ( job , message ) :
""" Notify CodePipeline of a continuing job
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
This will cause CodePipeline to invoke the function again with the
supplied continuation token.
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
job: The JobID
message: A message to be logged relating to the job status
continuation_token: The continuation token
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Raises:
Exception: Any exception thrown by .put_job_success_result()
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
# Use the continuation token to keep track of any job execution state
# This data will be available when a new job is scheduled to continue the current execution
2023-10-18 10:35:05 -07:00
continuation_token = json . dumps ( { " previous_job_id " : job } )
2024-01-16 10:41:11 -05:00
2023-10-18 10:35:05 -07:00
print ( " Putting job continuation " )
2019-01-24 07:52:55 +00:00
print ( message )
2023-10-18 10:35:05 -07:00
code_pipeline . put_job_success_result (
jobId = job , continuationToken = continuation_token
)
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def start_update_or_create ( job_id , stack , template ) :
""" Starts the stack update or create process
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
If the stack exists then update, otherwise create.
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
job_id: The ID of the CodePipeline job
stack: The stack to create or update
template: The template to create/update the stack with
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
if stack_exists ( stack ) :
status = get_stack_status ( stack )
2023-10-18 10:35:05 -07:00
if status not in [ " CREATE_COMPLETE " , " ROLLBACK_COMPLETE " , " UPDATE_COMPLETE " ] :
2019-01-24 07:52:55 +00:00
# If the CloudFormation stack is not in a state where
# it can be updated again then fail the job right away.
2023-10-18 10:35:05 -07:00
put_job_failure ( job_id , " Stack cannot be updated when status is: " + status )
2019-01-24 07:52:55 +00:00
return
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
were_updates = update_stack ( stack , template )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
if were_updates :
# If there were updates then continue the job so it can monitor
# the progress of the update.
2023-10-18 10:35:05 -07:00
continue_job_later ( job_id , " Stack update started " )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
else :
2023-10-18 10:35:05 -07:00
# If there were no updates then succeed the job immediately
put_job_success ( job_id , " There were no stack updates " )
2019-01-24 07:52:55 +00:00
else :
# If the stack doesn't already exist then create it instead
# of updating it.
create_stack ( stack , template )
# Continue the job so the pipeline will wait for the CloudFormation
# stack to be created.
2023-10-18 10:35:05 -07:00
continue_job_later ( job_id , " Stack create started " )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def check_stack_update_status ( job_id , stack ) :
""" Monitor an already-running CloudFormation update/create
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Succeeds, fails or continues the job depending on the stack status.
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
job_id: The CodePipeline job ID
stack: The stack to monitor
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
status = get_stack_status ( stack )
2023-10-18 10:35:05 -07:00
if status in [ " UPDATE_COMPLETE " , " CREATE_COMPLETE " ] :
2019-01-24 07:52:55 +00:00
# If the update/create finished successfully then
# succeed the job and don't continue.
2023-10-18 10:35:05 -07:00
put_job_success ( job_id , " Stack update complete " )
2024-01-16 10:41:11 -05:00
2023-10-18 10:35:05 -07:00
elif status in [
" UPDATE_IN_PROGRESS " ,
" UPDATE_ROLLBACK_IN_PROGRESS " ,
" UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS " ,
" CREATE_IN_PROGRESS " ,
" ROLLBACK_IN_PROGRESS " ,
] :
2019-01-24 07:52:55 +00:00
# If the job isn't finished yet then continue it
2023-10-18 10:35:05 -07:00
continue_job_later ( job_id , " Stack update still in progress " )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
else :
# If the Stack is a state which isn't "in progress" or "complete"
# then the stack update/create has failed so end the job with
# a failed result.
2023-10-18 10:35:05 -07:00
put_job_failure ( job_id , " Update failed: " + status )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def get_user_params ( job_data ) :
""" Decodes the JSON user parameters and validates the required properties.
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
job_data: The job data structure containing the UserParameters string which should be a valid JSON structure
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Returns:
The JSON parameters decoded as a dictionary.
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Raises:
Exception: The JSON can ' t be decoded or a property is missing.
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
try :
# Get the user parameters which contain the stack, artifact and file settings
2023-10-18 10:35:05 -07:00
user_parameters = job_data [ " actionConfiguration " ] [ " configuration " ] [
" UserParameters "
]
2019-01-24 07:52:55 +00:00
decoded_parameters = json . loads ( user_parameters )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
except Exception as e :
# We're expecting the user parameters to be encoded as JSON
# so we can pass multiple values. If the JSON can't be decoded
# then fail the job with a helpful message.
2023-10-18 10:35:05 -07:00
raise Exception ( " UserParameters could not be decoded as JSON " )
2024-01-16 10:41:11 -05:00
2023-10-18 10:35:05 -07:00
if " stack " not in decoded_parameters :
2019-01-24 07:52:55 +00:00
# Validate that the stack is provided, otherwise fail the job
# with a helpful message.
2023-10-18 10:35:05 -07:00
raise Exception ( " Your UserParameters JSON must include the stack name " )
2024-01-16 10:41:11 -05:00
2023-10-18 10:35:05 -07:00
if " artifact " not in decoded_parameters :
2019-01-24 07:52:55 +00:00
# Validate that the artifact name is provided, otherwise fail the job
# with a helpful message.
2023-10-18 10:35:05 -07:00
raise Exception ( " Your UserParameters JSON must include the artifact name " )
2024-01-16 10:41:11 -05:00
2023-10-18 10:35:05 -07:00
if " file " not in decoded_parameters :
2019-01-24 07:52:55 +00:00
# Validate that the template file is provided, otherwise fail the job
# with a helpful message.
2023-10-18 10:35:05 -07:00
raise Exception ( " Your UserParameters JSON must include the template file name " )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
return decoded_parameters
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def setup_s3_client ( job_data ) :
""" Creates an S3 client
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Uses the credentials passed in the event by CodePipeline. These
credentials can be used to access the artifact bucket.
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
job_data: The job data structure
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Returns:
An S3 client with the appropriate credentials
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
2023-10-18 10:35:05 -07:00
key_id = job_data [ " artifactCredentials " ] [ " accessKeyId " ]
key_secret = job_data [ " artifactCredentials " ] [ " secretAccessKey " ]
session_token = job_data [ " artifactCredentials " ] [ " sessionToken " ]
2024-01-16 10:41:11 -05:00
2023-10-18 10:35:05 -07:00
session = Session (
aws_access_key_id = key_id ,
2019-01-24 07:52:55 +00:00
aws_secret_access_key = key_secret ,
2023-10-18 10:35:05 -07:00
aws_session_token = session_token ,
)
return session . client ( " s3 " , config = botocore . client . Config ( signature_version = " s3v4 " ) )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
def lambda_handler ( event , context ) :
""" The Lambda function handler
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
If a continuing job then checks the CloudFormation stack status
and updates the job accordingly.
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
If a new job then kick of an update or creation of the target
CloudFormation stack.
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
Args:
event: The event passed by Lambda
context: The context passed by Lambda
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
"""
try :
# Extract the Job ID
2023-10-18 10:35:05 -07:00
job_id = event [ " CodePipeline.job " ] [ " id " ]
2024-01-16 10:41:11 -05:00
2023-10-18 10:35:05 -07:00
# Extract the Job Data
job_data = event [ " CodePipeline.job " ] [ " data " ]
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
# Extract the params
params = get_user_params ( job_data )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
# Get the list of artifacts passed to the function
2023-10-18 10:35:05 -07:00
artifacts = job_data [ " inputArtifacts " ]
2024-01-16 10:41:11 -05:00
2023-10-18 10:35:05 -07:00
stack = params [ " stack " ]
artifact = params [ " artifact " ]
template_file = params [ " file " ]
2024-01-16 10:41:11 -05:00
2023-10-18 10:35:05 -07:00
if " continuationToken " in job_data :
2019-01-24 07:52:55 +00:00
# If we're continuing then the create/update has already been triggered
# we just need to check if it has finished.
check_stack_update_status ( job_id , stack )
else :
# Get the artifact details
artifact_data = find_artifact ( artifacts , artifact )
# Get S3 client to access artifact with
s3 = setup_s3_client ( job_data )
# Get the JSON template file out of the artifact
template = get_template ( s3 , artifact_data , template_file )
# Kick off a stack update or create
2023-10-18 10:35:05 -07:00
start_update_or_create ( job_id , stack , template )
2024-01-16 10:41:11 -05:00
2019-01-24 07:52:55 +00:00
except Exception as e :
# If any other exceptions which we didn't expect are raised
# then fail the job and log the exception message.
2023-10-18 10:35:05 -07:00
print ( " Function failed due to exception. " )
2019-01-24 07:52:55 +00:00
print ( e )
traceback . print_exc ( )
2023-10-18 10:35:05 -07:00
put_job_failure ( job_id , " Function exception: " + str ( e ) )
2024-01-16 10:41:11 -05:00
2023-10-18 10:35:05 -07:00
print ( " Function complete. " )
2019-01-24 07:52:55 +00:00
return " Complete. "
2024-01-16 10:41:11 -05:00
2019-01-30 01:17:11 +00:00
# snippet-end:[codepipeline.python.MyCodePipelineFunction.complete]