Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add an example Multibranch Jenkinsfile pipeline using packer, terrafrm, Inspec on AWS #92

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions jenkinsfile-examples/aws-packer-terraform-inspec/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
backend_config.tf
plan.out
.terraform/
aws-security/files/
manifest-*.json
output.json
177 changes: 177 additions & 0 deletions jenkinsfile-examples/aws-packer-terraform-inspec/Jenkinsfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,177 @@
// Declarative Jenkinsfile Pipeline for a Hashicorp packer/terraform AWS simple ec2 stack
// (n.b. use of env.BRANCH_NAME to filter stages based on branch means this needs to be part
// of a Multibranch Project in Jenkins - this fits with the model of branches/PR's being
// tested & master being deployed)
pipeline {
agent any
environment {
AWS_DEFAULT_REGION = 'us-east-1'
}

stages {
stage('Validate & lint') {
parallel {
stage('packer validate') {
agent {
docker {
image 'simonmcc/hashicorp-pipeline:latest'
alwaysPull true
}
}
steps {
checkout scm
wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) {
sh "packer validate ./base/base.json"
sh "AMI_BASE=ami-fakefake packer validate app/app.json"
}
}
}
stage('terraform fmt') {
agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } }
steps {
checkout scm
sh "terraform fmt -check=true -diff=true"
}
}
}
}
stage('build AMIs') {
agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } }
steps {
checkout scm
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
credentialsId: 'demo-aws-creds',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) {
wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) {
sh "./scripts/build.sh base base"
sh "./scripts/build.sh app app"
}
}
}
}

stage('build test stack') {
agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } }
when {
expression { env.BRANCH_NAME != 'master' }
}
steps {
checkout scm
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
credentialsId: 'demo-aws-creds',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) {
wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) {
sh "./scripts/tf-wrapper.sh -a plan"
sh "./scripts/tf-wrapper.sh -a apply"
sh "cat output.json"
stash name: 'terraform_output', includes: '**/output.json'
}
}
}
post {
failure {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
credentialsId: 'demo-aws-creds',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) {
wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) {
sh "./scripts/tf-wrapper.sh -a destroy"
}
}
}
}
}
stage('test test stack') {
agent {
docker {
image 'chef/inspec:latest'
args "--entrypoint=''"
}
}
when {
expression { env.BRANCH_NAME != 'master' }
}
steps {
checkout scm
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
credentialsId: 'demo-aws-creds',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) {
wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) {
unstash 'terraform_output'
sh "cat output.json"
sh "mkdir aws-security/files || true"
sh "mkdir test-results || true"
sh "cp output.json aws-security/files/output.json"
sh "inspec exec aws-security --reporter=cli junit:test-results/inspec-junit.xml -t aws://us-east-1"
sh "touch test-results/inspec-junit.xml"
junit 'test-results/*.xml'
}
}
}
}
stage('destroy test stack') {
agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } }
when {
expression { env.BRANCH_NAME != 'master' }
}
steps {
checkout scm
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
credentialsId: 'demo-aws-creds',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) {
wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) {
sh "./scripts/tf-wrapper.sh -a destroy"
}
}
}
}
stage('terraform plan - master') {
agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } }
when {
expression { env.BRANCH_NAME == 'master' }
}
steps {
checkout scm
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
credentialsId: 'demo-aws-creds',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) {
wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) {
sh "./scripts/tf-wrapper.sh -a plan"
stash name: 'terraform_plan', includes: 'plan/plan.out,.terraform/**'
}
}
}
}
stage('Manual Approval') {
when {
expression { env.BRANCH_NAME == 'master' }
}
steps {
input 'Do you approve the apply?'
}
}
stage('terraform apply - master') {
agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } }
when {
expression { env.BRANCH_NAME == 'master' }
}
steps {
checkout scm
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
credentialsId: 'demo-aws-creds',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) {
wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) {
unstash 'terraform_plan'
sh "./scripts/tf-wrapper.sh -a apply"
}
}
}
}
}
}
34 changes: 34 additions & 0 deletions jenkinsfile-examples/aws-packer-terraform-inspec/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# Real World AWS Packer & Terraform Pipeline

This is a working Jenkinsfile Multibranch Pipeline for building AWS AMI's images & deploying EC2 instances based on the AMI with terraform (in both a per-branch test stack, destroyed by the pipeline, and maintaining production from the master branch).

![](images/blueocean-non-master.png)
![](images/blueocean-master.png)

The pipeline uses 2 docker images to run jobs, most of the work is done in [hashicorp-pipeline](https://hub.docker.com/r/simonmcc/hashicorp-pipeline/), which contains [packer](https://www.packer.io), [terraform](https://www.terraform.io), [aws cli](https://aws.amazon.com/cli/) & some other [needed binaries](https://github.com/simonmcc/hashicorp-pipeline/blob/master/Dockerfile#L3-L4) (jq, perl), the other docker image used is [chef/inspec](https://hub.docker.com/r/chef/inspec/), which is used to test the stack built during non-master pipeline executions.

### Features
* `terraform fmt -check=true -diff=true` used to check terraform config files
* `packer validate` used to validate packer config files
* `base` and `application` AMI's built and tagged with SHA1 of the `base/base.json` and `app/app.json`, to help prevent unnecessary AMI builds
* Automatically configures terraform remote state (S3 & DynamoDB)
* terraform workspaces used to store per-branch terraform state
* parameterized terraform build using per-branch tfvars
* `chef/inspec` used to validate AWS resources
* `terraform plan` with stash of plan, human approval, `terraform apply` workflow for master/production changes

### What next to use this in your own project
* Add `demo-aws-creds` credential set (uses [AWS Credentials](https://plugins.jenkins.io/aws-credentials) plugin)
* Update terraform to actually deploy an EC2 instance (for demo purposes, this only builds a VPC, so zero cost & quick cycle time for pipeline experimentation, trivial to add an EC2 instance)
* Use AWS Auto Scaling group to replace running EC2 instances with EC2 instances built from the master branch `app` AMI
* Use AMI tags to ensure `app` AMI matches the branch (Use the SHA1 of `app/app.json` to search for the AMI in terraform, requires some tfvars templating/injection)
* Extend `chef/inspec` stage to test AWS EC2
* Add a Selenium or other web test stage

If you want a simple dockerized Jenkins setup to host this, I used [Jenkins 201](https://github.com/jenkins201/jenkins-container), there's also example Job DSL [here](https://github.com/jenkins201/jenkins-container/blob/master/jobs/packer_terraform.groovy).

### Further info
Some of the scripts in this pipeline ([build.sh](scripts/build.sh) & [common.sh](scripts/common.sh)) are based on an incomplete [packer/AMI](https://github.com/CircleCI-Public/circleci-packer) example from [CircleCI](
https://circleci.com/blog/how-to-build-immutable-infrastructure-with-packer-and-circleci-workflows/).


68 changes: 68 additions & 0 deletions jenkinsfile-examples/aws-packer-terraform-inspec/app/app.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
{
"_readme": [
"The AMI used is generated by the packer template base/base.json",
""
],
"variables": {
"ami_name": "app",
"ami_base": "{{env `AMI_BASE`}}",
"ami_sha": "{{env `SHA`}}",
"aws_access_key": "{{env `AWS_ACCESS_KEY`}}",
"aws_secret_key": "{{env `AWS_SECRET_ACCESS_KEY`}}"
},
"builders": [
{
"ami_description": "{{user `ami_name`}} AMI",
"ami_name": "{{user `ami_name`}} {{timestamp}}",
"ami_regions": [
"us-east-1"
],
"instance_type": "t1.micro",
"region": "us-east-1",
"run_tags": {
"ami-create": "{{user `ami_name`}}"
},
"source_ami": "{{user `ami_base`}}",
"ssh_username": "ubuntu",
"subnet_id": "",
"tags": {
"OS_Name": "Ubuntu",
"OS_Version": "16.04",
"SHA": "{{user `ami_sha`}}",
"AMI": "{{user `ami_name`}}"
},
"type": "amazon-ebs",
"vpc_id": ""
}
],
"post-processors": [
{
"output": "manifest-app.json",
"strip_path": true,
"type": "manifest"
}
],
"provisioners": [
{
"inline": [
"while [ ! -f /var/lib/cloud/instance/boot-finished ]; do echo 'Waiting for cloud-init...'; sleep 1; done"
],
"type": "shell"
},
{
"execute_command": "chmod +x {{ .Path }}; {{ .Vars }} sudo -E {{ .Path }}",
"scripts": [
"./app/tasks/apache.sh"
],
"type": "shell"
},
{
"execute_command": "chmod +x {{ .Path }}; {{ .Vars }} sudo -E {{ .Path }}",
"scripts": [
"./base/tasks/cleanup.sh",
"./base/tasks/debug.sh"
],
"type": "shell"
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
#!/bin/bash
set -e

echo '---- install Apache'

DEBIAN_FRONTEND=noninteractive apt-get -y update
DEBIAN_FRONTEND=noninteractive apt-get -y install apache2

cat > /var/www/html/index.html <<HERE
Plain text FTW!
HERE
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Example InSpec Profile

This example shows the implementation of an InSpec profile.
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@

content = inspec.profile.file("output.json")
params = JSON.parse(content)

vpc_id = params['main_vpc_id']['value']
dmz_vpc_id = params['dmz_vpc_id']['value']

describe aws_vpc(vpc_id) do
its('state') { should eq 'available' }
# as we vary these based on the branch (master.tfvars & testing-defaults.tfvars)
# we can't check the cidr without exporting the CIDR via output.json
# its('cidr_block') { should eq '172.18.0.0/16' }
end

describe aws_vpc(dmz_vpc_id) do
its('state') { should eq 'available' }
# its('cidr_block') { should eq '172.19.0.0/16' }
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
---
lockfile_version: 1
depends: []
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
name: aws-security
title: InSpec Profile
maintainer: The Authors
copyright: The Authors
copyright_email: [email protected]
license: Apache-2.0
summary: An InSpec Compliance Profile
version: 0.1.0
61 changes: 61 additions & 0 deletions jenkinsfile-examples/aws-packer-terraform-inspec/base/base.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
{
"_readme": [
"Ubuntu 16.04 (xenial) LTS",
"AMI: amd64 hvm:ebs-ssd",
" see the *_ami variables below for specific names",
"base_ami should be latest Ubuntu AMI",
"curl -sSL 'https://cloud-images.ubuntu.com/query/xenial/server/released.current.txt' | grep us-east-1 | grep hvm | grep -e 'ebs' | awk '{print $8;}'",
""
],
"variables": {
"ami_name": "baseline-ubuntu-1604",
"ami_base": "ami-a4dc46db",
"ami_sha": "{{env `SHA`}}"
},
"builders": [
{
"ami_description": "{{user `ami_name`}} AMI",
"ami_name": "{{user `ami_name`}} {{timestamp}}",
"ami_regions": [
"us-east-1"
],
"instance_type": "t1.micro",
"region": "us-east-1",
"run_tags": {
"ami-create": "{{user `ami_name`}}"
},
"source_ami": "{{user `ami_base`}}",
"ssh_username": "ubuntu",
"tags": {
"OS_Name": "Ubuntu",
"OS_Version": "16.04",
"SHA": "{{user `ami_sha`}}",
"AMI": "{{user `ami_name`}}"
},
"type": "amazon-ebs"
}
],
"post-processors": [
{
"output": "manifest-base.json",
"strip_path": true,
"type": "manifest"
}
],
"provisioners": [
{
"inline": [
"while [ ! -f /var/lib/cloud/instance/boot-finished ]; do echo 'Waiting for cloud-init...'; sleep 1; done"
],
"type": "shell"
},
{
"scripts": [
"./base/tasks/baseline.sh",
"./base/tasks/cleanup.sh",
"./base/tasks/debug.sh"
],
"type": "shell"
}
]
}
Loading