Skip to content

.github/workflows/terraform-local-workflow.yml #4

.github/workflows/terraform-local-workflow.yml

.github/workflows/terraform-local-workflow.yml #4

name: "Local development workflow"
on:
workflow_call:
inputs:
environment_name:
required: false
default: dev
type: string
aws_region:
required: false
default: eu-west-2
type: string
stack:
required: false
type: string
run_formatting:
required: false
type: string
run_tflint:
required: false
type: string
run_light_sast:
required: false
type: string
run_deep_sast: # requires inputs.plan == 'true'.
required: false
type: string
plan: # requires AWS|Azure credentials to be configured.
required: false
type: string
secrets:
GITHUB_TOKEN:

Check failure on line 32 in .github/workflows/terraform-local-workflow.yml

View workflow run for this annotation

GitHub Actions / .github/workflows/terraform-local-workflow.yml

Invalid workflow file

secret name `GITHUB_TOKEN` within `workflow_call` can not be used since it would collide with system reserved name
required: true
AWS_ACCESS_KEY_ID:
required: false
AWS_SECRET_ACCESS_KEY:
required: false
AWS_ACCOUNT_ID:
required: false
AWS_ROLE_NAME:
required: false
AZURE_SUBSCRIPTION_ID:
required: false
AZURE_TENANT_ID:
required: false
AZURE_CLIENT_ID:
required: false
jobs:
check-all-stacks:
name: Format, Lint and SAST Scan Terraform code
runs-on: ubuntu-latest
if: >-
${{ inputs.stack == '' &&
(inputs.run_formatting == 'true' ||
inputs.run_tflint == 'true' ||
inputs.run_light_sast == 'true' )}}
container:
image: ghcr.io/ukhsa-internal/devops-terraform-ci:latest
steps:
- uses: actions/checkout@v4
- name: Terraform Formatting
if: ${{ inputs.run_formatting == 'true' }}
run: terraform fmt --recursive --check
- name: Terraform Linting with tflint
if: ${{ inputs.run_tflint == 'true' }}
run: tflint --recursive --disable-rule terraform_required_version --disable-rule terraform_required_providers
- name: SAST Scanning with Checkov
if: ${{ inputs.run_light_sast == 'true' }}
uses: ukhsa-Internal/devops-github-actions/.github/actions/terraform-checkov-scan@main
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
define_matrix:
name: Define directory matrix
runs-on: ubuntu-latest
container:
image: catthehacker/ubuntu:act-22.04
if: ${{ inputs.plan == 'true' }}
outputs:
directories: "${{ steps.filter_directories.outputs.filtered_directories }}"
steps:
- uses: actions/checkout@v4
- name: Determine order to run Terraform stacks
uses: >-
ukhsa-Internal/devops-github-actions/.github/actions/terraform-dependency-sort@main
id: directories
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Filter directories based on the set stack
id: filter_directories
run: |
stack="${{ inputs.stack }}"
directories='${{ steps.directories.outputs.json_directory_list }}'
# Filter the directories based on the stack input
if [ -n "$stack" ]; then
filtered_directories=$(echo $directories | jq --arg stack "$stack" '[.[] | select(endswith($stack))]')
# Check if the stack exists within directories
if [ "$(echo $filtered_directories | jq 'length')" -eq 0 ]; then
echo "No stack found for '$stack'. Exiting."
exit 1
fi
else
# If stack input is empty, continue with original ordered list of stacks.
filtered_directories=$directories
fi
# Ensure the output is in compact JSON format
filtered_directories=$(echo $filtered_directories | jq -c .)
echo "filtered_directories=$filtered_directories" >> $GITHUB_OUTPUT
build:
name: "Build Infrastructure - ${{ matrix.directory }}"
runs-on: ubuntu-latest
container:
image: catthehacker/ubuntu:act-22.04
if: ${{ inputs.plan == 'true' }}
defaults:
run:
shell: bash
needs:
- define_matrix
strategy:
matrix:
directory: "${{ fromJSON(needs.define_matrix.outputs.directories) }}"
max-parallel: 1
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
sparse-checkout: |
${{ matrix.directory }}
environment
globals.tf
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
if: >-
${{env.AWS_SECRET_ACCESS_KEY != '' &&
env.AWS_ACCESS_KEY_ID != '' }}
env:
AWS_ACCESS_KEY_ID: "${{ secrets.AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "${{ secrets.AWS_SECRET_ACCESS_KEY }}"
AWS_ACCOUNT_ID: "${{ secrets.AWS_ACCOUNT_ID }}"
AWS_ROLE_NAME: "${{ secrets.AWS_ROLE_NAME }}"
with:
aws-region: "${{ inputs.aws_region }}"
aws-access-key-id: "${{ env.AWS_ACCESS_KEY_ID }}"
aws-secret-access-key: "${{ env.AWS_SECRET_ACCESS_KEY }}"
role-to-assume: "arn:aws:iam::${{ env.AWS_ACCOUNT_ID }}:role/${{ env.AWS_ROLE_NAME }}"
- name: Configure Azure Credentials
uses: azure/login@v2
if: >-
${{ env.AZURE_CLIENT_ID != '' &&
env.AZURE_TENANT_ID != '' &&
env.AZURE_SUBSCRIPTION_ID != '' }}
env:
AZURE_CLIENT_ID: "${{ secrets.AZURE_CLIENT_ID }}"
AZURE_TENANT_ID: "${{ secrets.AZURE_TENANT_ID }}"
AZURE_SUBSCRIPTION_ID: "${{ secrets.AZURE_SUBSCRIPTION_ID }}"
with:
client-id: "${{ env.AZURE_CLIENT_ID }}"
tenant-id: "${{ env.AZURE_TENANT_ID }}"
subscription-id: "${{ env.AZURE_SUBSCRIPTION_ID }}"
- name: Copy required files from root directory
env:
DIRECTORY: "${{ matrix.directory }}"
run: |
files_to_copy=("providers.tf" "terraform.tf")
for FILE in "${files_to_copy[@]}"; do
if [[ ! -f "$DIRECTORY"/"$FILE" ]]; then
cp "$FILE" "$DIRECTORY"/
else
echo "NOTE - $(basename "$DIRECTORY") has its own "$FILE" file. Not copying "$FILE" from root"
fi
done
- name: Find Terraform version
uses: ukhsa-Internal/devops-github-actions/.github/actions/parse-terraform-version@main
id: terraform_version
with:
tf_file: "${{ matrix.directory }}/terraform.tf"
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Terraform
uses: hashicorp/setup-terraform@v3
with:
terraform_version: "${{ steps.terraform_version.outputs.tf_version }}"
- name: Determine Backend Type
id: backend
working-directory: "${{ matrix.directory }}"
run: |
backend_type=$(grep -oP 'backend\s+"?\K[^"\s]+' ./backend.tf)
echo "backend_type=$backend_type" >> $GITHUB_OUTPUT
- name: Terraform Init with AWS S3 Backend
if: ${{ steps.backend.outputs.backend_type == 's3' }}
working-directory: "${{ matrix.directory }}"
env:
AWS_ACCOUNT_ID: "${{ secrets.AWS_ACCOUNT_ID }}"
AWS_REGION: "${{ inputs.aws_region }}"
ENVIRONMENT_NAME: "${{ inputs.environment_name }}"
DIRECTORY: "${{ matrix.directory }}"
run: |
state_name=$(basename "$DIRECTORY")
terraform init \
-backend-config=dynamodb_table="${AWS_REGION}"-state-locks \
-backend-config=bucket="${AWS_ACCOUNT_ID}"-"${AWS_REGION}"-state \
-backend-config=key="${ENVIRONMENT_NAME}"/"$state_name"/terraform.tfstate
- name: Terraform Init with Azure Backend
if: ${{ steps.backend.outputs.backend_type == 'azurerm' }}
working-directory: "${{ matrix.directory }}"
env:
AZURE_CLIENT_ID: "${{ secrets.AZURE_CLIENT_ID }}"
AZURE_TENANT_ID: "${{ secrets.AZURE_TENANT_ID }}"
AZURE_SUBSCRIPTION_ID: "${{ secrets.AZURE_SUBSCRIPTION_ID }}"
ENVIRONMENT_NAME: "${{ inputs.environment_name }}"
DIRECTORY: "${{ matrix.directory }}"
run: |
# TODO: Update this to actually init with Azure backend
echo :x: Using an Azure backend is not currently supported! >> $GITHUB_SUMMARY
exit 1
- name: Find Terraform variables
id: variables
env:
DIRECTORY: "${{ matrix.directory }}"
ENVIRONMENT_NAME: "${{ inputs.environment_name }}"
run: |
find_app_var_files() {
local dir=$1
local state_name=$(basename "$dir")
local tfvars="./$dir/tfvars/${state_name}-${ENVIRONMENT_NAME}.tfvars"
local json="./$dir/tfvars/${state_name}-${ENVIRONMENT_NAME}.tfvars.json"
if [[ -f "$tfvars" ]]; then
echo "-var-file=$(readlink -f $tfvars)"
elif [[ -f "$json" ]]; then
echo "-var-file=$(readlink -f $json)"
fi
}
find_env_var_files() {
local tfvars="./environment/${ENVIRONMENT_NAME}.tfvars"
local json="./environment/${ENVIRONMENT_NAME}.tfvars.json"
if [[ -f "$tfvars" ]]; then
echo "-var-file=$(readlink -f $tfvars)"
elif [[ -f "$json" ]]; then
echo "-var-file=$(readlink -f $json)"
fi
}
global_vars_file="-var-file=$(readlink -f globals.tfvars)"
app_var_file=$(find_app_var_files "$DIRECTORY")
env_var_file=$(find_env_var_files)
full_variable_flags="$global_vars_file $app_var_file $env_var_file"
echo "tf_vars=$full_variable_flags" >> $GITHUB_OUTPUT
- name: Terraform Plan
working-directory: "${{ matrix.directory }}"
env:
ENVIRONMENT_NAME: "${{ inputs.environment_name }}"
TERRAFORM_VARIABLES: "${{ steps.variables.outputs.tf_vars }}"
run: |
terraform plan -no-color -input=false -out=tfplan -compact-warnings ${TERRAFORM_VARIABLES}
if [ -s tfplan ]; then
terraform show -json tfplan | jq > tfplan.json
else
echo "No changes detected in the Terraform plan."
echo '{}' > tfplan.json
fi
- name: SAST Scanning with Checkov (Deep)
uses: ukhsa-Internal/devops-github-actions/.github/actions/terraform-checkov-scan@main
if: >-
${{ inputs.plan == 'true' &&
inputs.run_deep_sast == 'true' }}
with:
scan_type: deep
tfplan_file: "${{ matrix.directory }}/tfplan.json"
scan_directory: "${{ matrix.directory }}"
github-token: ${{ secrets.GITHUB_TOKEN }}