diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/.gitignore b/jenkinsfile-examples/aws-packer-terraform-inspec/.gitignore new file mode 100644 index 0000000..5e9fcd1 --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/.gitignore @@ -0,0 +1,6 @@ +backend_config.tf +plan.out +.terraform/ +aws-security/files/ +manifest-*.json +output.json diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/Jenkinsfile b/jenkinsfile-examples/aws-packer-terraform-inspec/Jenkinsfile new file mode 100644 index 0000000..096cc4b --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/Jenkinsfile @@ -0,0 +1,177 @@ +// Declarative Jenkinsfile Pipeline for a Hashicorp packer/terraform AWS simple ec2 stack +// (n.b. use of env.BRANCH_NAME to filter stages based on branch means this needs to be part +// of a Multibranch Project in Jenkins - this fits with the model of branches/PR's being +// tested & master being deployed) +pipeline { + agent any + environment { + AWS_DEFAULT_REGION = 'us-east-1' + } + + stages { + stage('Validate & lint') { + parallel { + stage('packer validate') { + agent { + docker { + image 'simonmcc/hashicorp-pipeline:latest' + alwaysPull true + } + } + steps { + checkout scm + wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { + sh "packer validate ./base/base.json" + sh "AMI_BASE=ami-fakefake packer validate app/app.json" + } + } + } + stage('terraform fmt') { + agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } } + steps { + checkout scm + sh "terraform fmt -check=true -diff=true" + } + } + } + } + stage('build AMIs') { + agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } } + steps { + checkout scm + withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', + credentialsId: 'demo-aws-creds', + accessKeyVariable: 'AWS_ACCESS_KEY_ID', + secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { + wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { + sh "./scripts/build.sh base base" + sh "./scripts/build.sh app app" + } + } + } + } + + stage('build test stack') { + agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } } + when { + expression { env.BRANCH_NAME != 'master' } + } + steps { + checkout scm + withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', + credentialsId: 'demo-aws-creds', + accessKeyVariable: 'AWS_ACCESS_KEY_ID', + secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { + wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { + sh "./scripts/tf-wrapper.sh -a plan" + sh "./scripts/tf-wrapper.sh -a apply" + sh "cat output.json" + stash name: 'terraform_output', includes: '**/output.json' + } + } + } + post { + failure { + withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', + credentialsId: 'demo-aws-creds', + accessKeyVariable: 'AWS_ACCESS_KEY_ID', + secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { + wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { + sh "./scripts/tf-wrapper.sh -a destroy" + } + } + } + } + } + stage('test test stack') { + agent { + docker { + image 'chef/inspec:latest' + args "--entrypoint=''" + } + } + when { + expression { env.BRANCH_NAME != 'master' } + } + steps { + checkout scm + withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', + credentialsId: 'demo-aws-creds', + accessKeyVariable: 'AWS_ACCESS_KEY_ID', + secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { + wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { + unstash 'terraform_output' + sh "cat output.json" + sh "mkdir aws-security/files || true" + sh "mkdir test-results || true" + sh "cp output.json aws-security/files/output.json" + sh "inspec exec aws-security --reporter=cli junit:test-results/inspec-junit.xml -t aws://us-east-1" + sh "touch test-results/inspec-junit.xml" + junit 'test-results/*.xml' + } + } + } + } + stage('destroy test stack') { + agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } } + when { + expression { env.BRANCH_NAME != 'master' } + } + steps { + checkout scm + withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', + credentialsId: 'demo-aws-creds', + accessKeyVariable: 'AWS_ACCESS_KEY_ID', + secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { + wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { + sh "./scripts/tf-wrapper.sh -a destroy" + } + } + } + } + stage('terraform plan - master') { + agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } } + when { + expression { env.BRANCH_NAME == 'master' } + } + steps { + checkout scm + withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', + credentialsId: 'demo-aws-creds', + accessKeyVariable: 'AWS_ACCESS_KEY_ID', + secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { + wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { + sh "./scripts/tf-wrapper.sh -a plan" + stash name: 'terraform_plan', includes: 'plan/plan.out,.terraform/**' + } + } + } + } + stage('Manual Approval') { + when { + expression { env.BRANCH_NAME == 'master' } + } + steps { + input 'Do you approve the apply?' + } + } + stage('terraform apply - master') { + agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } } + when { + expression { env.BRANCH_NAME == 'master' } + } + steps { + checkout scm + withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', + credentialsId: 'demo-aws-creds', + accessKeyVariable: 'AWS_ACCESS_KEY_ID', + secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { + wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { + unstash 'terraform_plan' + sh "./scripts/tf-wrapper.sh -a apply" + } + } + } + } + } +} diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/README.md b/jenkinsfile-examples/aws-packer-terraform-inspec/README.md new file mode 100644 index 0000000..5669888 --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/README.md @@ -0,0 +1,34 @@ +# Real World AWS Packer & Terraform Pipeline + +This is a working Jenkinsfile Multibranch Pipeline for building AWS AMI's images & deploying EC2 instances based on the AMI with terraform (in both a per-branch test stack, destroyed by the pipeline, and maintaining production from the master branch). + +![](images/blueocean-non-master.png) +![](images/blueocean-master.png) + +The pipeline uses 2 docker images to run jobs, most of the work is done in [hashicorp-pipeline](https://hub.docker.com/r/simonmcc/hashicorp-pipeline/), which contains [packer](https://www.packer.io), [terraform](https://www.terraform.io), [aws cli](https://aws.amazon.com/cli/) & some other [needed binaries](https://github.com/simonmcc/hashicorp-pipeline/blob/master/Dockerfile#L3-L4) (jq, perl), the other docker image used is [chef/inspec](https://hub.docker.com/r/chef/inspec/), which is used to test the stack built during non-master pipeline executions. + +### Features +* `terraform fmt -check=true -diff=true` used to check terraform config files +* `packer validate` used to validate packer config files +* `base` and `application` AMI's built and tagged with SHA1 of the `base/base.json` and `app/app.json`, to help prevent unnecessary AMI builds +* Automatically configures terraform remote state (S3 & DynamoDB) +* terraform workspaces used to store per-branch terraform state +* parameterized terraform build using per-branch tfvars +* `chef/inspec` used to validate AWS resources +* `terraform plan` with stash of plan, human approval, `terraform apply` workflow for master/production changes + +### What next to use this in your own project +* Add `demo-aws-creds` credential set (uses [AWS Credentials](https://plugins.jenkins.io/aws-credentials) plugin) +* Update terraform to actually deploy an EC2 instance (for demo purposes, this only builds a VPC, so zero cost & quick cycle time for pipeline experimentation, trivial to add an EC2 instance) +* Use AWS Auto Scaling group to replace running EC2 instances with EC2 instances built from the master branch `app` AMI +* Use AMI tags to ensure `app` AMI matches the branch (Use the SHA1 of `app/app.json` to search for the AMI in terraform, requires some tfvars templating/injection) +* Extend `chef/inspec` stage to test AWS EC2 +* Add a Selenium or other web test stage + +If you want a simple dockerized Jenkins setup to host this, I used [Jenkins 201](https://github.com/jenkins201/jenkins-container), there's also example Job DSL [here](https://github.com/jenkins201/jenkins-container/blob/master/jobs/packer_terraform.groovy). + +### Further info +Some of the scripts in this pipeline ([build.sh](scripts/build.sh) & [common.sh](scripts/common.sh)) are based on an incomplete [packer/AMI](https://github.com/CircleCI-Public/circleci-packer) example from [CircleCI]( +https://circleci.com/blog/how-to-build-immutable-infrastructure-with-packer-and-circleci-workflows/). + + diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/app/app.json b/jenkinsfile-examples/aws-packer-terraform-inspec/app/app.json new file mode 100644 index 0000000..12b421b --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/app/app.json @@ -0,0 +1,68 @@ +{ + "_readme": [ + "The AMI used is generated by the packer template base/base.json", + "" + ], + "variables": { + "ami_name": "app", + "ami_base": "{{env `AMI_BASE`}}", + "ami_sha": "{{env `SHA`}}", + "aws_access_key": "{{env `AWS_ACCESS_KEY`}}", + "aws_secret_key": "{{env `AWS_SECRET_ACCESS_KEY`}}" + }, + "builders": [ + { + "ami_description": "{{user `ami_name`}} AMI", + "ami_name": "{{user `ami_name`}} {{timestamp}}", + "ami_regions": [ + "us-east-1" + ], + "instance_type": "t1.micro", + "region": "us-east-1", + "run_tags": { + "ami-create": "{{user `ami_name`}}" + }, + "source_ami": "{{user `ami_base`}}", + "ssh_username": "ubuntu", + "subnet_id": "", + "tags": { + "OS_Name": "Ubuntu", + "OS_Version": "16.04", + "SHA": "{{user `ami_sha`}}", + "AMI": "{{user `ami_name`}}" + }, + "type": "amazon-ebs", + "vpc_id": "" + } + ], + "post-processors": [ + { + "output": "manifest-app.json", + "strip_path": true, + "type": "manifest" + } + ], + "provisioners": [ + { + "inline": [ + "while [ ! -f /var/lib/cloud/instance/boot-finished ]; do echo 'Waiting for cloud-init...'; sleep 1; done" + ], + "type": "shell" + }, + { + "execute_command": "chmod +x {{ .Path }}; {{ .Vars }} sudo -E {{ .Path }}", + "scripts": [ + "./app/tasks/apache.sh" + ], + "type": "shell" + }, + { + "execute_command": "chmod +x {{ .Path }}; {{ .Vars }} sudo -E {{ .Path }}", + "scripts": [ + "./base/tasks/cleanup.sh", + "./base/tasks/debug.sh" + ], + "type": "shell" + } + ] + } diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/app/tasks/apache.sh b/jenkinsfile-examples/aws-packer-terraform-inspec/app/tasks/apache.sh new file mode 100644 index 0000000..28dd266 --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/app/tasks/apache.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -e + +echo '---- install Apache' + +DEBIAN_FRONTEND=noninteractive apt-get -y update +DEBIAN_FRONTEND=noninteractive apt-get -y install apache2 + +cat > /var/www/html/index.html <> /etc/skel/.bashrc' + +echo "---- make Apt non interactive" +sudo /bin/bash -c 'echo "force-confnew" >> /etc/dpkg/dpkg.cfg' +#sudo /bin/bash -c 'cat /tmp/dpkg.cfg.update >> /etc/sudoers.d/env_keep' +#sudo cp /tmp/apt.conf.update /etc/apt/apt.conf + +echo "---- Update and Upgrade" +sudo DEBIAN_FRONTEND=noninteractive apt-get -y update +sudo DEBIAN_FRONTEND=noninteractive apt-get -y upgrade +sudo DEBIAN_FRONTEND=noninteractive apt-get -y install apt-transport-https +sudo DEBIAN_FRONTEND=noninteractive apt-get -y install curl unzip zip jq diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/base/tasks/cleanup.sh b/jenkinsfile-examples/aws-packer-terraform-inspec/base/tasks/cleanup.sh new file mode 100644 index 0000000..c0fbf39 --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/base/tasks/cleanup.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -e + +echo "---- cleanup" +echo Ubuntu Provision Cleanup +sudo DEBIAN_FRONTEND=noninteractive apt-get -y autoremove --purge +sudo DEBIAN_FRONTEND=noninteractive apt-get -y autoclean +sudo DEBIAN_FRONTEND=noninteractive apt-get check + +sudo rm -rf /var/lib/apt/lists/* +sudo rm -rf /tmp/* diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/base/tasks/debug.sh b/jenkinsfile-examples/aws-packer-terraform-inspec/base/tasks/debug.sh new file mode 100644 index 0000000..7f1c2d4 --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/base/tasks/debug.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e + +echo "---- debug info" +uname -a +cat /etc/os-release +dpkg -l | grep linux- diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/images/blueocean-master.png b/jenkinsfile-examples/aws-packer-terraform-inspec/images/blueocean-master.png new file mode 100644 index 0000000..11020d1 Binary files /dev/null and b/jenkinsfile-examples/aws-packer-terraform-inspec/images/blueocean-master.png differ diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/images/blueocean-non-master.png b/jenkinsfile-examples/aws-packer-terraform-inspec/images/blueocean-non-master.png new file mode 100644 index 0000000..4f1cf80 Binary files /dev/null and b/jenkinsfile-examples/aws-packer-terraform-inspec/images/blueocean-non-master.png differ diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/master.tfvars b/jenkinsfile-examples/aws-packer-terraform-inspec/master.tfvars new file mode 100644 index 0000000..282b580 --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/master.tfvars @@ -0,0 +1,2 @@ +vpc_main_cidr = "172.18.0.0/16" +vpc_dmz_cidr = "172.19.0.0/16" diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/scripts/build.sh b/jenkinsfile-examples/aws-packer-terraform-inspec/scripts/build.sh new file mode 100755 index 0000000..ceec4ac --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/scripts/build.sh @@ -0,0 +1,56 @@ +#!/bin/bash +# +# Hashicorp packer/terraform simple stack build wrapper +# * facilitates building base & service/app AMI (service/app depends on base) +# * tag AMI's with SHA1 of the packer .json file that built it +# +# Wrapping a few CLI command in bash always seems like a good idea at the start. +# It's not. Don't do it. Use python to wrap & possible call API's directly. + +# Exit immediately if a command exits with a non-zero status +set -e + +# debug - expand all commands +# set -x + +# load our helper functions +source scripts/common.sh + +# check that the tools we require are present +package_check + +# +# base.sh DIR TARGET [BASE_NAME] +DIR="$1" +NAME="$2" +BASE_NAME="$3" +if [[ -z "$DIR" ]]; then + echo "please specify the directory as first runtime argument" + exit 1 +fi +if [[ -z "$NAME" ]]; then + echo "please specify the name as second runtime argument" + exit 1 +fi +if [[ -z "$BASE_NAME" ]]; then + echo "No base AMI given" +else + export BASE_BUILT=$(base_rebuilt $BASE_NAME) + if [ "${BASE_BUILT}" = "true" ]; then + echo "Couldn't find ${BASE_NAME} in manifest-${BASE_NAME}.json, looking up AMI via EC2 API" + fi + export AMI_BASE="$(get_base_ami "$BASE_BUILT" "$BASE_NAME" "$BASE_NAME")" +fi + +export SHA=$(git ls-tree HEAD "$DIR" | cut -d" " -f3 | cut -f1) +TAG_EXISTS=$(tag_exists $SHA) + +if [ "$TAG_EXISTS" = "false" ]; then + echo "No AMI found for ${NAME} (SHA: ${SHA}), building one.." + packer build ${DIR}/$NAME.json + PACKER_EXIT=$? + echo "Packer exit code: ${PACKER_EXIT}" +else + echo "AMI found for ${NAME} (SHA: ${SHA})" + touch manifest-${NAME}.json +fi diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/scripts/common.sh b/jenkinsfile-examples/aws-packer-terraform-inspec/scripts/common.sh new file mode 100755 index 0000000..984effc --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/scripts/common.sh @@ -0,0 +1,191 @@ +#!/bin/bash +# +# Wrapping a few CLI command in bash always seems like a good idea at the start. +# It's not. Don't do it. Use python to wrap & possible call API's directly. +tag_exists () { + local SHA=$1 + if [[ -z "$SHA" ]]; then + echo "-- ERROR: there was a problem looking up AMI by sha" + exit 1 + fi + EMPTY=$(aws ec2 describe-images --filters Name=tag:SHA,Values=$SHA --query 'Images[*]') + AWS_CLI_EXIT_CODE=$? + if [[ "${AWS_CLI_EXIT_CODE}" -eq 0 ]]; then + if [ "$EMPTY" = "[]" ]; then + echo "false" + else + echo "true" + fi + else + (>&2 echo "ERROR: AWS CLI error checking for existing images matching ${SHA}") + exit 2 + fi +} + +get_git_branch () { +# output the current branch, handling detached HEAD as found in Jenkins +# https://stackoverflow.com/questions/6059336/how-to-find-the-current-git-branch-in-detached-head-state + local GIT_BRANCH=$(git rev-parse --abbrev-ref HEAD) + + # Jenkins will often checkout the SHA of a branch, (detached HEAD) + if [[ "${GIT_BRANCH}" == 'HEAD' ]]; then + # lookup branch against remotes, without network access (we may not have creds to talk to git remote) + echo "$(git branch --remote --verbose --no-abbrev --contains | sed -Ene 's/^[^\/]*\/([^\ ]+).*$/\1/p')" + else + echo "${GIT_BRANCH}" + fi +} + +base_rebuilt () { + local NAME=$1 + if [[ -e "manifest-$NAME.json" ]] && [[ -s "manifest-$NAME.json" ]]; then + echo "true" + else + echo "false" + fi +} + +extract_artifact_id () { + local NAME="$1" + local AMI="$(cat manifest-$NAME.json | jq '.builds[0].artifact_id' | perl -n -e'/us-east-1:(ami-[a-z0-9]+)/ && print $1')" + echo "${AMI}" +} + +get_base_ami () { + local BASE_BUILT=$1 + local DIR=$2 + local NAME=$3 + if [ "$BASE_BUILT" = "false" ]; then + EXISTING_BASE_SHA="$(git ls-tree HEAD $DIR | cut -d" " -f3 | cut -f1)" + EXISTING_BASE_IMAGE=$(aws ec2 describe-images --filters Name=tag:SHA,Values=$EXISTING_BASE_SHA --query 'Images[*]' | jq -r '.[0].ImageId') + echo "$EXISTING_BASE_IMAGE" + else + BASE_AMI_US_EAST_1="$(extract_artifact_id $NAME)" + echo "${BASE_AMI_US_EAST_1}" + fi +} + +package_check () { + command -v aws > /dev/null || (echo "aws cli must be installed" && exit 1) + command -v packer > /dev/null || (echo "packer must be installed" && exit 1) + command -v terraform > /dev/null || (echo "packer must be installed" && exit 1) + command -v git > /dev/null || (echo "git must be installed" && exit 1) + command -v jq > /dev/null || (echo "jq must be installed" && exit 1) + command -v perl > /dev/null || (echo "perl must be installed" && exit 1) +} + +check_terraform_version() { + # TODO: extract from requirements.txt or something? + TERRAFORM_REQUIRED_VERSION="v0.11.7" + + TERRAFORM_BIN=$(which terraform) + TERRAFORM_INSTALLED_VERSION=$(${TERRAFORM_BIN} -version | awk '/^Terraform/{ print $2 }') + + if [[ "${TERRAFORM_INSTALLED_VERSION}" != "${TERRAFORM_REQUIRED_VERSION}" ]]; then + log "ERROR: ${TERRAFORM_BIN} is reporting ${TERRAFORM_INSTALLED_VERSION}, ${TERRAFORM_REQUIRED_VERSION} required, aborting." + exit 1 + fi +} + +check_aws_credentials () { + [[ -z "${AWS_DEFAULT_REGION}" ]] && (echo "AWS_DEFAULT_REGION must be set" && exit 1) + [[ -z "${AWS_ACCESS_KEY_ID}" ]] && (echo "AWS_ACCESS_KEY_ID must be set" && exit 1) + [[ -z "${AWS_SECRET_ACCESS_KEY}" ]] && (echo "AWS_SECRET_ACCESS_KEY must be set" && exit 1) + [[ 1 ]] +} + +generate_terraform_backend() { + # inspired by https://github.com/hashicorp/terraform/issues/12877#issuecomment-311649591 + local PROJECT_NAME + local ACCOUNT_ID + local LOCATION_CONSTRAINT + local BUCKET_NAME + local BUCKET_EXISTS + local TABLE_INDEX + local TABLE_NAME + + if [[ -z "$1" ]]; then + PROJECT_NAME="${PWD##*/}" # use current dir name + else + PROJECT_NAME=$1 + fi + ACCOUNT_ID="$(aws sts get-caller-identity --query Account --output text)" + + if [[ "${AWS_DEFAULT_REGION}" = "us-east-1" ]]; then + LOCATION_CONSTRAINT="" + else + LOCATION_CONSTRAINT='--create-bucket-configuration LocationConstraint="${AWS_DEFAULT_REGION}"' + fi + + BUCKET_NAME="terraform-tfstate-${ACCOUNT_ID}" + BUCKET_EXISTS=$(aws s3api list-buckets | jq ".Buckets[] | select(.Name == \"${BUCKET_NAME}\")") + if [[ -z "${BUCKET_EXISTS}" ]]; then + echo "Creating Terraform State S3 Bucket ${BUCKET_NAME} in ${AWS_DEFAULT_REGION}" + aws s3api create-bucket \ + --region "${AWS_DEFAULT_REGION}" \ + ${LOCATION_CONSTRAINT} \ + --bucket "${BUCKET_NAME}" + fi + + TABLE_NAME="terraform_locks" + TABLE_INDEX=$(aws dynamodb list-tables | jq ".TableNames | index(\"${TABLE_NAME}\")") + if [[ "${TABLE_INDEX}" = 'null' ]];then + echo "Creating Terraform State DynamoDB Lock Table ${TABLE_NAME} in ${AWS_DEFAULT_REGION}" + aws dynamodb create-table \ + --region "${AWS_DEFAULT_REGION}" \ + --table-name ${TABLE_NAME} \ + --attribute-definitions AttributeName=LockID,AttributeType=S \ + --key-schema AttributeName=LockID,KeyType=HASH \ + --provisioned-throughput ReadCapacityUnits=1,WriteCapacityUnits=1 + aws dynamodb wait table-exists --table-name terraform_locks + fi + + + # NB - the pattern of managing the S3 bucket & DynamoDB table in Terraform + # makes it impossible to cleanly destroy the terraform stack, so we don't do that + cat < ./backend_config.tf +terraform { + backend "s3" { + bucket = "${BUCKET_NAME}" + key = "${PROJECT_NAME}" + region = "${AWS_DEFAULT_REGION}" + dynamodb_table = "terraform_locks" + } +} +EOF +} + +map_branch_to_workspace() { + # TODO input & output sanity checking.. + if [[ $1 = 'master' ]]; then + echo "default" | tr / - + else + echo $1 | tr / - + fi +} + +map_branch_to_tfvars() { + # map the branch to a tfvars file, with some sensible defaults + local TF_VARS_FILE + case "$1" in + master) + TF_VARS_FILE=master.tfvars + ;; + develop) + TF_VARS_FILE=develop.tfvars + ;; + *) + if [[ -f "$1".tfvars ]]; then + TF_VARS_FILE="$1".tfvars + else + TF_VARS_FILE="testing-defaults.tfvars" + fi + ;; + esac + + if [[ ! -f "${TF_VARS_FILE}" ]]; then + touch "${TF_VARS_FILE}" + fi + + echo "${TF_VARS_FILE}" +} diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/scripts/tf-wrapper.sh b/jenkinsfile-examples/aws-packer-terraform-inspec/scripts/tf-wrapper.sh new file mode 100755 index 0000000..329b69e --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/scripts/tf-wrapper.sh @@ -0,0 +1,78 @@ +#!/bin/bash +# +# terraform wrapper +# +# * takes care of pre "terraform init" steps (S3 & DynamoDB setup) +# * creates a terraform workspace to match the branch (master==default) +# * destroy workspace after destroying a stack +# +# Use case: build an ephemeral terraform configuration in CI to test & then destroy +# Use Case: Maintain default/master safely (how?!?) +# +set -e +# DEBUG +#set -x + +THIS_SCRIPT=${BASH_SOURCE[0]:-$0} +# grumble, moan, PATH, symlinks +if [[ -L "${THIS_SCRIPT}" ]]; then + THIS_SCRIPT=`readlink ${THIS_SCRIPT} 2>&1` +fi +PROJECT_HOME="$( cd "$( dirname "${THIS_SCRIPT}" )/.." && pwd )" + +# load our helper functions +source ${PROJECT_HOME}/scripts/common.sh + +# default to plan, to show changes, valid opions are plan, apply & destroy +TF_ACTION=plan + +OPTIND=1 # Reset is necessary if getopts was used previously in the script. It is a good idea to make this local in a function. +while getopts "a:e:hv" opt; do + case "$opt" in + a) TF_ACTION=${OPTARG} + ;; + *) + show_help + exit 1 + ;; + esac +done +shift "$((OPTIND-1))" # Shift off the options and optional --. + +# check that the tools we require are present +package_check + +# check that we have AWS credentials +check_aws_credentials + +GIT_BRANCH=$(get_git_branch) +TF_WORKSPACE=$(map_branch_to_workspace ${GIT_BRANCH}) +TF_VARS_FILE=$(map_branch_to_tfvars ${GIT_BRANCH}) + +# create the S3 bucket, DynamoDB & matching backend.tf +generate_terraform_backend + +[[ ! -d .terraform ]] && terraform init +# the workspace may already exist - safe to ignore & carry on +terraform workspace new ${TF_WORKSPACE} || true +echo "Selecting workspace: ${TF_WORKSPACE}" +terraform workspace select ${TF_WORKSPACE} +case "${TF_ACTION}" in + plan) + [[ ! -d plan ]] && mkdir plan + terraform plan -var-file=${TF_VARS_FILE} -out=plan/plan.out + ;; + apply) + terraform apply plan/plan.out + terraform output + # once more for the camera + terraform output -json > output.json + ;; + destroy) + terraform destroy -var-file=${TF_VARS_FILE} -auto-approve + terraform workspace select default + terraform workspace delete ${TF_WORKSPACE} + ;; +esac + +echo "Done." diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/terraform.tf b/jenkinsfile-examples/aws-packer-terraform-inspec/terraform.tf new file mode 100644 index 0000000..666faad --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/terraform.tf @@ -0,0 +1,25 @@ +provider "aws" {} + +variable "vpc_main_cidr" { + type = "string" +} + +variable "vpc_dmz_cidr" { + type = "string" +} + +resource "aws_vpc" "main" { + cidr_block = "${var.vpc_main_cidr}" +} + +output "main_vpc_id" { + value = "${aws_vpc.main.id}" +} + +resource "aws_vpc" "dmz" { + cidr_block = "${var.vpc_dmz_cidr}" +} + +output "dmz_vpc_id" { + value = "${aws_vpc.dmz.id}" +} diff --git a/jenkinsfile-examples/aws-packer-terraform-inspec/testing-defaults.tfvars b/jenkinsfile-examples/aws-packer-terraform-inspec/testing-defaults.tfvars new file mode 100644 index 0000000..19ce40a --- /dev/null +++ b/jenkinsfile-examples/aws-packer-terraform-inspec/testing-defaults.tfvars @@ -0,0 +1,2 @@ +vpc_main_cidr = "172.28.0.0/16" +vpc_dmz_cidr = "172.29.0.0/16"