Skip to content

Commit 0ca60a4

Browse files
committed
Build with CodeBuild instead of BB Pipelines. Utilise terraform-modules. Add Dockerfile. Bump packages.
1 parent 78d3dd0 commit 0ca60a4

16 files changed

+724
-567
lines changed

Dockerfile

+14
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
FROM node:8-alpine
2+
WORKDIR /app
3+
4+
COPY package.json yarn.lock ./
5+
RUN yarn install
6+
7+
ENV SERVER_HOSTNAME=0.0.0.0
8+
9+
COPY server.ts tsconfig.json tslint.json webpack.config.ts webpack.prod.config.ts ./
10+
COPY src src
11+
12+
EXPOSE 3001/tcp
13+
14+
ENTRYPOINT ["yarn", "run", "dev"]

README.md

+9-5
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,7 @@
22

33
[Bitbucket Pipelines status](https://bitbucket.org/jch254/serverless-node-dynamodb-ui/addon/pipelines/home)
44

5-
A simple React/Redux-powered UI to front a simple [Serverless API](https://github.com/jch254/serverless-node-dynamodb-api). This project utilises [TypeScript for type checking](https://www.youtube.com/watch?v=V1po0BT7kac) and transpliation to browser-friendly ES5 JavaScript.
6-
7-
Auth0 handles authentication. Users must sign up/login to generate an auth token and gain access to the secured area. All endpoints in the API check validity of the auth token and return unauthorised if invalid, the UI then prompts the user to log in again. The API also determines the identity of the user via the auth token.
8-
9-
This project is deployed to AWS on S3, CloudFront is used as a CDN and Route 53 is used for DNS. All infrastructure is defined as code in the [/infrastructure](infrastructure) directory. Manual steps suck so this project uses Bitbucket Pipelines to automate the build and deployment to AWS - see [bitbucket-pipelines.yml](bitbucket-pipelines.yml). AWS credentials are set using [Bitbucket Pipelines environment variables](https://confluence.atlassian.com/bitbucket/environment-variables-in-bitbucket-pipelines-794502608.html).
5+
React/Redux-powered UI to front [Serverless API](https://github.com/jch254/serverless-node-dynamodb-api). This project utilises [TypeScript for type checking](https://www.youtube.com/watch?v=V1po0BT7kac) and transpliation to browser-friendly ES5 JavaScript. Auth0 handles authentication. Users must sign up/login to generate an auth token and gain access to the secured area. All endpoints in the API check validity of the auth token and return unauthorised if invalid, the UI then prompts the user to log in again. The API also determines the identity of the user via the auth token.
106

117
### Main technologies used
128

@@ -37,6 +33,14 @@ yarn install
3733
yarn run dev
3834
```
3935

36+
### Running development version locally in Docker container
37+
1. Run the following commands in the app's root directory then submit requests to http://localhost:3001.
38+
39+
```
40+
docker build -t sls-api .
41+
docker run -p 3001:3001 -e AUTH0_CLIENT_ID=YOUR_CLIENT_ID -e AUTH0_DOMAIN=YOUR_DOMAIN -e API_BASE_URI=YOUR_API sls-api
42+
```
43+
4044
### Building the production version
4145
1. Run the following commands in the app's root directory then check the /dist folder
4246

bitbucket-pipelines.yml

-9
This file was deleted.

buildspec.yml

+44
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
# All commands below are run from root directory of repository by CodeBuild
2+
version: 0.2
3+
4+
env:
5+
variables:
6+
TF_VAR_region: "ap-southeast-2"
7+
TF_VAR_name: "serverless-node-dynamodb-ui"
8+
TF_VAR_kms_key_arns: '["arn:aws:kms:ap-southeast-2:982898479788:key/0ec9686b-13a1-40fc-8256-86e8d3503e9c"]'
9+
TF_VAR_ssm_parameter_arns: '["arn:aws:ssm:ap-southeast-2:982898479788:parameter/shared/*","arn:aws:ssm:ap-southeast-2:982898479788:parameter/serverless-node-dynamodb-ui/*"]'
10+
TF_VAR_build_docker_image: "jch254/docker-node-terraform-aws"
11+
TF_VAR_build_docker_tag: "latest"
12+
TF_VAR_buildspec: "buildspec.yml"
13+
TF_VAR_source_location: "https://github.com/jch254/serverless-node-dynamodb-ui.git"
14+
TF_VAR_bucket_name: "serverless-api.603.nu"
15+
TF_VAR_dns_names: '["serverless-api.603.nu"]'
16+
TF_VAR_route53_zone_id: "ZS32KHT5LS4PR"
17+
TF_VAR_acm_arn: "arn:aws:acm:us-east-1:982898479788:certificate/2367a831-34bd-4d81-bb17-2f79d08329a6"
18+
AUTH0_CLIENT_ID: "PabWYDl71ibZ920e3XjoPIe0QoJVrhtY"
19+
AUTH0_DOMAIN: "603.au.auth0.com"
20+
API_BASE_URI: "https://sls-api.603.nu"
21+
REMOTE_STATE_BUCKET: "603-terraform-remote-state"
22+
parameter-store:
23+
GA_ID: "/serverless-node-dynamodb-ui/ga-id"
24+
25+
phases:
26+
install:
27+
commands:
28+
# Workaround until CodeBuild/CodePipeline retains file permissions
29+
- find ./infrastructure -name "*.bash" -exec chmod +x {} \;
30+
- ./infrastructure/install.bash
31+
32+
pre_build:
33+
commands:
34+
# Workaround until TF supports creds via Task Roles when running on ECS or CodeBuild
35+
# See: https://github.com/hashicorp/terraform/issues/8746
36+
- export AWS_ACCESS_KEY_ID=`curl --silent http://169.254.170.2:80$AWS_CONTAINER_CREDENTIALS_RELATIVE_URI | jq -r '.AccessKeyId'`
37+
- export AWS_SECRET_ACCESS_KEY=`curl --silent http://169.254.170.2:80$AWS_CONTAINER_CREDENTIALS_RELATIVE_URI | jq -r '.SecretAccessKey'`
38+
- export AWS_SESSION_TOKEN=`curl --silent http://169.254.170.2:80$AWS_CONTAINER_CREDENTIALS_RELATIVE_URI | jq -r '.Token'`
39+
40+
build:
41+
commands:
42+
- ./infrastructure/build-artifacts.bash
43+
- ./infrastructure/deploy-infrastructure.bash
44+
- ./infrastructure/upload-artifacts.bash

infrastructure/README.md

+36-20
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
# Deployment/Infrastructure
22

3-
This project is deployed to AWS on S3. CloudFront is used as a CDN. Route 53 is used for DNS.
3+
This project is built, tested and deployed to AWS by [codebuild-github-webhook](https://github.com/jch254/codebuild-github-webhook) and CodeBuild. Artifacts are served from S3. CloudFront is used as a CDN. Route 53 is used for DNS.
44

5-
--
5+
---
66

77
### Deployment Prerequisites
88

@@ -15,33 +15,49 @@ To deploy to AWS, you must:
1515
1. Set your credentials as the environment variables `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`.
1616
1. Run `aws configure` and fill in the details it asks for.
1717
1. Run on an EC2 instance with an IAM Role.
18-
1. Run via CodeBuild or ECS Task with an IAM Role.
19-
1. Update the S3 backend in [main.tf](main.tf):
20-
```
21-
terraform {
22-
backend "s3" {
23-
bucket = "YOUR_S3_BUCKET"
24-
key = "serverless-node-dynamodb-ui.tfstate"
25-
region = "YOUR_REGION"
26-
encrypt= "true"
27-
}
28-
}
29-
```
18+
1. Run via CodeBuild or ECS Task with an IAM Role (see [buildspec-test.yml](../buildspec-test.yml) for workaround)
3019

3120
#### Deploying infrastructure
3221

33-
1. `terraform init`
34-
1. `terraform plan -var-file main.tfvars -out main.tfplan`
22+
1. Update and export all environment variables specified in the appropriate buildspec declaration (check all phases) and bash scripts
23+
1. Initialise Terraform:
24+
```
25+
terraform init \
26+
-backend-config 'bucket=YOUR_S3_BUCKET' \
27+
-backend-config 'key=YOUR_S3_KEY' \
28+
-backend-config 'region=YOUR_REGION' \
29+
-get=true \
30+
-upgrade=true
31+
```
32+
1. `terraform plan -out main.tfplan`
3533
1. `terraform apply main.tfplan`
3634

3735
#### Updating infrastructure
3836

37+
1. Update and export all environment variables specified in the appropriate buildspec declaration (check all phases) and bash scripts
3938
1. Make necessary infrastructure code changes.
40-
1. `terraform init`
41-
1. `terraform plan -var-file main.tfvars -out main.tfplan`
39+
1. Initialise Terraform:
40+
```
41+
terraform init \
42+
-backend-config 'bucket=YOUR_S3_BUCKET' \
43+
-backend-config 'key=YOUR_S3_KEY' \
44+
-backend-config 'region=YOUR_REGION' \
45+
-get=true \
46+
-upgrade=true
47+
```
48+
1. `terraform plan -out main.tfplan`
4249
1. `terraform apply main.tfplan`
4350

4451
#### Destroying infrastructure (use with care)
4552

46-
1. `terraform init`
47-
1. `terraform destroy -var-file main.tfvars`
53+
1. Update and export all environment variables specified in the appropriate buildspec declaration (check all phases) and bash scripts
54+
1. Initialise Terraform:
55+
```
56+
terraform init \
57+
-backend-config 'bucket=YOUR_S3_BUCKET' \
58+
-backend-config 'key=YOUR_S3_KEY' \
59+
-backend-config 'region=YOUR_REGION' \
60+
-get=true \
61+
-upgrade=true
62+
```
63+
1. `terraform destroy`

infrastructure/build-artifacts.bash

+4-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
#!/bin/bash -ex
22

3-
yarn install
4-
export API_BASE_URI="https://sls-api.603.nu"
3+
echo Building artifacts...
4+
55
yarn run build
6+
7+
echo Finished building artifacts
+36
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
{
2+
"Version": "2012-10-17",
3+
"Statement": [
4+
{
5+
"Effect": "Allow",
6+
"Resource": [
7+
"*"
8+
],
9+
"Action": [
10+
"logs:*",
11+
"s3:*",
12+
"codebuild:*",
13+
"codepipeline:*",
14+
"cloudwatch:*",
15+
"cloudfront:*",
16+
"route53:*",
17+
"iam:*",
18+
"ssm:DescribeParameters"
19+
]
20+
},
21+
{
22+
"Effect": "Allow",
23+
"Action": [
24+
"kms:Decrypt"
25+
],
26+
"Resource": ${kms_key_arns}
27+
},
28+
{
29+
"Effect": "Allow",
30+
"Action": [
31+
"ssm:GetParameters"
32+
],
33+
"Resource": ${ssm_parameter_arns}
34+
}
35+
]
36+
}
+11-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,16 @@
11
#!/bin/bash -ex
22

3+
echo Deploying infrastructure via Terraform...
4+
35
cd infrastructure
4-
terraform init
5-
terraform plan -var-file main.tfvars -out main.tfplan
6+
terraform init \
7+
-backend-config "bucket=${REMOTE_STATE_BUCKET}" \
8+
-backend-config "key=${TF_VAR_name}" \
9+
-backend-config "region=${TF_VAR_region}" \
10+
-get=true \
11+
-upgrade=true
12+
terraform plan -out main.tfplan
613
terraform apply main.tfplan
714
cd ..
15+
16+
echo Finished deploying infrastructure

infrastructure/install.bash

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
#!/bin/bash -ex
2+
3+
echo Installing dependencies...
4+
5+
yarn install
6+
7+
echo Finished installing dependencies

infrastructure/main.tf

+43-75
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,5 @@
11
terraform {
22
backend "s3" {
3-
bucket = "603-terraform-remote-state"
4-
key = "serverless-node-dynamodb-ui.tfstate"
5-
region = "ap-southeast-2"
63
encrypt= "true"
74
}
85
}
@@ -12,86 +9,57 @@ provider "aws" {
129
version = "~> 0.1"
1310
}
1411

15-
resource "aws_s3_bucket" "apex_bucket" {
16-
bucket = "${var.dns_name}"
17-
acl = "public-read"
18-
force_destroy = true
12+
resource "aws_iam_role" "codebuild_role" {
13+
name = "${var.name}-codebuild"
1914

20-
policy = <<POLICY
15+
assume_role_policy = <<EOF
2116
{
22-
"Version":"2012-10-17",
23-
"Statement":[{
24-
"Sid":"PublicReadForGetBucketObjects",
25-
"Effect":"Allow",
26-
"Principal": "*",
27-
"Action":"s3:GetObject",
28-
"Resource":["arn:aws:s3:::${var.dns_name}/*"]
29-
}]
17+
"Version": "2012-10-17",
18+
"Statement": [
19+
{
20+
"Effect": "Allow",
21+
"Principal": {
22+
"Service": "codebuild.amazonaws.com"
23+
},
24+
"Action": "sts:AssumeRole"
25+
}
26+
]
27+
}
28+
EOF
3029
}
31-
POLICY
3230

33-
website {
34-
index_document = "index.html"
35-
error_document = "index.html"
31+
data "template_file" "codebuild_policy" {
32+
template = "${file("./codebuild-role-policy.tpl")}"
33+
34+
vars {
35+
kms_key_arns = "${var.kms_key_arns}"
36+
ssm_parameter_arns = "${var.ssm_parameter_arns}"
3637
}
3738
}
3839

39-
resource "aws_cloudfront_distribution" "cdn" {
40-
origin {
41-
domain_name = "${aws_s3_bucket.apex_bucket.website_endpoint}"
42-
origin_id = "apex_bucket_origin"
43-
custom_origin_config {
44-
http_port = "80"
45-
https_port = "443"
46-
origin_protocol_policy = "http-only"
47-
origin_ssl_protocols = ["TLSv1", "TLSv1.1", "TLSv1.2"]
48-
}
49-
}
50-
enabled = true
51-
aliases = ["${var.dns_name}"]
52-
custom_error_response {
53-
error_code = "404"
54-
response_code = "200"
55-
response_page_path ="/index.html"
56-
}
57-
price_class = "PriceClass_All"
58-
default_cache_behavior {
59-
allowed_methods = [ "DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT" ]
60-
cached_methods = [ "GET", "HEAD" ]
61-
target_origin_id = "apex_bucket_origin"
62-
forwarded_values {
63-
query_string = true
64-
headers = ["*"]
65-
cookies {
66-
forward = "all"
67-
}
68-
}
69-
viewer_protocol_policy = "redirect-to-https"
70-
compress = true
71-
min_ttl = 0
72-
default_ttl = 3600
73-
max_ttl = 86400
74-
}
75-
viewer_certificate {
76-
acm_certificate_arn = "${var.acm_arn}"
77-
ssl_support_method = "sni-only"
78-
minimum_protocol_version = "TLSv1"
79-
}
80-
restrictions {
81-
geo_restriction {
82-
restriction_type = "none"
83-
}
84-
}
40+
resource "aws_iam_role_policy" "codebuild_policy" {
41+
name = "${var.name}-codebuild-policy"
42+
role = "${aws_iam_role.codebuild_role.id}"
43+
policy = "${data.template_file.codebuild_policy.rendered}"
8544
}
8645

87-
resource "aws_route53_record" "apex_route53_record" {
88-
zone_id = "${var.route53_zone_id}"
89-
name = "${var.dns_name}"
90-
type = "A"
46+
module "codebuild_project" {
47+
source = "github.com/jch254/terraform-modules//codebuild-project?ref=1.0.0"
9148

92-
alias {
93-
name = "${aws_cloudfront_distribution.cdn.domain_name}"
94-
zone_id = "${aws_cloudfront_distribution.cdn.hosted_zone_id}"
95-
evaluate_target_health = false
96-
}
49+
name = "${var.name}"
50+
codebuild_role_arn = "${aws_iam_role.codebuild_role.arn}"
51+
build_docker_image = "${var.build_docker_image}"
52+
build_docker_tag = "${var.build_docker_tag}"
53+
source_type = "${var.source_type}"
54+
buildspec = "${var.buildspec}"
55+
source_location = "${var.source_location}"
56+
}
57+
58+
module "webapp" {
59+
source = "github.com/jch254/terraform-modules//web-app?ref=1.0.0"
60+
61+
bucket_name = "${var.bucket_name}"
62+
dns_names = "${var.dns_names}"
63+
route53_zone_id = "${var.route53_zone_id}"
64+
acm_arn = "${var.acm_arn}"
9765
}

infrastructure/main.tfvars

-5
This file was deleted.

0 commit comments

Comments
 (0)