cdh-terraform/terraform.sh

145 lines
3.8 KiB
Bash
Executable File

#!/bin/bash
set -e
SCRIPT_DIR=$(
cd "$(dirname "$0")"
pwd
)
cd "${SCRIPT_DIR}"
if [ "$#" -lt 4 ]; then
echo "Usage: $0 PROJECT ENVIRONMENT REGION MAIN-MODULE COMMAND [options]" >&2
exit 1
fi
if [ ! -d "apps/$1/$4" ]; then
echo "ERROR: Directory apps/$1/$4 does not exist" >&2
exit 1
fi
app=$1
env=$2
region=$3
main_module=$4
command=$5
options=${@:6}
export profile="${app}-terraform"
export AWS_DEFAULT_REGION=${region}
export TF_VAR_remote_state_bucket="${app}-terraform-backend-${env}-${region}-bucket"
export FULL_DYNAMO_TABLE="${app}-terraform-state-${env}-${region}-lock"
export TF_VAR_region=${region}
export TF_PLUGIN_CACHE_DIR="$HOME/.terraform.d/plugin-cache"
TF_VAR_profile=${env}
if [ "$app" == "cdh-maas" ]; then
export AWS_PROFILE="${app}-terraform"
PROD_AWS_ACCOUNT="xxx"
DEV_AWS_ACCOUNT="292620633648"
if [ "$env" == "prod" ]; then
ASSUME_ROLE="arn:aws:iam::${PROD_AWS_ACCOUNT}:role/cdh-maas-terraform"
else
ASSUME_ROLE="arn:aws:iam::${DEV_AWS_ACCOUNT}:role/cdh-maas-terraform"
fi
else
echo "ERROR: Unknown app: $app" >&2
exit 1
fi
hub="unknown"
if [ "$region" == "cn-north-1" ]; then
hub="cn"
elif [ "$region" == "us-east-1" ]; then
hub="us"
elif [ "$region" == "eu-west-1" ]; then
hub="emea"
fi
echo "AWS profile: $AWS_PROFILE"
echo "Assume role: $ASSUME_ROLE"
echo "Backend $TF_VAR_remote_state_bucket"
echo "Running terraform $main_module in AWS Account $AWS_ACCOUNT: $command"
cd "apps/${app}/${main_module}"
rm -f -R .terraform
rm -f .terraform.lock.hcl
if [ "$main_module" == "init" ]; then
terraform init -upgrade
terraform workspace select "$app-$region-$env" || terraform workspace new "$app-$region-$env"
terraform workspace select "$app-$region-$env"
terraform workspace list
if [ "$command" != "init" ]; then
terraform ${command} \
-input=false \
-refresh=true \
-var="project=${app}" \
-var="environment=${env}" \
-var="region=${region}" \
-var="main_module=${main_module}" \
-var="profile=${AWS_PROFILE}" \
-var="role_arn=${ASSUME_ROLE}" \
-var="state_lock_table_name=LockID" \
-var="full_dynamo_table=${FULL_DYNAMO_TABLE}" \
-var="s3_bucket_name=${TF_VAR_remote_state_bucket}" \
-var="hub=${hub}" \
-var-file="../terraform.tfvars" \
${options}
fi
elif [ "$main_module" == "local" ]; then
terraform init -upgrade
if [ "$command" != "init" ]; then
terraform ${command} \
-input=false \
-refresh=true \
-var "project=${app}" \
-var "environment=${env}" \
-var "region=${region}" \
-var "main_module=${main_module}" \
-var "hub=${hub}" \
-var-file="../terraform.tfvars" \
${options}
fi
else
terraform init \
-upgrade \
-backend=true \
-backend-config "bucket=${TF_VAR_remote_state_bucket}" \
-backend-config "key=${main_module}.tfstate" \
-backend-config "dynamodb_table=${FULL_DYNAMO_TABLE}" \
-backend-config "workspace_key_prefix=environment" \
-backend-config "region=${region}" \
-backend-config "profile=${AWS_PROFILE}" \
-backend-config "role_arn=${ASSUME_ROLE}"
terraform workspace select "$app-$region-$env" || terraform workspace new "$app-$region-$env"
terraform workspace select "$app-$region-$env"
terraform workspace list
if [ "$command" == "state" ]; then
terraform ${command} ${options}
fi
if [ "$command" != "init" ] && [ "$command" != "state" ]; then
terraform ${command} \
-var "region=${region}" \
-var "environment=${env}" \
-var "project=${app}" \
-var "role_arn=${ASSUME_ROLE}" \
-var "profile=${AWS_PROFILE}" \
-var "main_module=${main_module}" \
-var "hub=${hub}" \
-var-file "../terraform.tfvars" \
${options}
fi
fi
rm -f -R .terraform
rm -f .terraform.lock.hcl