-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathupload-portfolio-lambda.py
103 lines (73 loc) · 3.54 KB
/
upload-portfolio-lambda.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
#####
#
# Lambda function normally invoked by CodePipeline when a change is detected in GitHub
# source. CodeBuild creates a compressed archive file in an S3 build bucket and this
# code is them used to populate the target S3 bucket which is also HTML root for my
# portfolio URL of http://portfolio.mikeoc.me
#
# Code based on example provided in CodeAcademy Course
#
# "Make your Portfolio Dynamic with ReactJS"
#
# Reference code @ https://github.com/robin-acloud/my-portfolio/
#
# Author: Michael O'Connor
#
# Last Change: September 17, 2017
#
#####
import boto3
from botocore.client import Config
import StringIO
import zipfile
import mimetypes
def lambda_handler(event, context):
# Provide pointer to SNS resource we will use to notify subscribers
sns = boto3.resource('sns')
topic = sns.Topic('arn:aws:sns:us-east-1:180587510551:deployPortfolioTopic')
# Define target S3 bucket which serves as HTML root for portfolio website
targetBucket = 'portfolio.mikeoc.me'
# When calling this Lambda function manually, define an intermediate build location
location = {
"bucketName": 'portfoliobuild.mikeoc.me',
"objectKey": 'portfoliobuild.zip'
}
# Normal path through following code will be triggered by CodePipeline
try:
job = event.get("CodePipeline.job") # Get info from CodePipeline
# Determine build bucket and zipfile locations
if job: # If invoked from CodePipeline
for artifact in job["data"]["inputArtifacts"]:
if artifact["name"] == "MyAppBuild":
location = artifact["location"]["s3Location"]
s3 = boto3.resource('s3', config=Config(signature_version='s3v4'))
print "Building portfolio from: " + str(location)
portfolio_bucket = s3.Bucket(targetBucket) # target bucket
build_bucket = s3.Bucket(location["bucketName"]) # source bucket
# Copy newly built zip archive contents into memory
portfolio_zip = StringIO.StringIO() # Create an in memory buffer
build_bucket.download_fileobj(location["objectKey"], portfolio_zip)
# Now, copy the individual zipfile contents into the target S3 bucket,
# set metadata appropriately and access permissions to public
with zipfile.ZipFile(portfolio_zip) as myzip:
for nm in myzip.namelist():
print "Now processing file: " + nm
obj = myzip.open(nm)
if nm == "index.html": # add specific metadata
portfolio_bucket.upload_fileobj(obj, nm, ExtraArgs={'ContentType': 'text/html;charset=utf-8'})
else:
portfolio_bucket.upload_fileobj(obj, nm, ExtraArgs={'ContentType': mimetypes.guess_type(nm)[0]})
portfolio_bucket.Object(nm).Acl().put(ACL='public-read') # Make World readable
# Update logs
print "Portfolio Lambda Function complete"
# Update CodePipeline if applicable
if job:
codepipeline = boto3.client('codepipeline')
codepipeline.put_job_success_result(jobId=job["id"])
# Update SNS Topic if everything worked as expected
topic.publish(Subject="Portfolio Deployed", Message="Deployed Successfully!")
# If code experiences an error, publish to SNS and raise as exception error
except:
topic.publish(Subject="Portfolio Deploy Fail", Message="Deploy Failed!")
raise
return 'Hello from Lambda'