diff --git a/Jenkinsfile b/Jenkinsfile index e9cb5cd..757a881 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -58,8 +58,9 @@ // CNPREPROD_TOKEN_VAR = credentials('CNPREPROD_TOKEN_VAR') AWS_ACCESS_KEY_ID = credentials('AWS_TERRAFORM_KEY') AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY') + AWS_S3_BUCKET="coco-dynatrace-tfstate" + AWS_S3_REGION="eu-central-1" - TERRAFORM_RESOURCES="dynatrace_management_zone" //EMEA PROD diff --git a/export.py b/export.py index 66942b3..ae89568 100644 --- a/export.py +++ b/export.py @@ -162,7 +162,10 @@ def createResourceDict(): # [AA, EW 2022.01.17] Copy main.tf into the target folder def copyMainTemplate(): shutil.copyfile(templatesFolder + "main.tf", targetFolder + "main.tf") - replacedata(targetFolder + "main.tf", {"{$env}":env, "{$timestamp}":timestamp}) + replacedata(targetFolder + "main.tf", {"{$env}":env, + "{$timestamp}":timestamp, + "{$S3_BUCKET}": str(os.getenv("AWS_S3_BUCKET")), + "{$S3_REGION}":str(os.getenv("AWS_S3_REGION"))}) # [AA 2022.01.17] Copy module.tf in all folders and subfolders except where main.tf is @@ -275,7 +278,7 @@ if(len(sys.argv) == 2): s3 = boto3.client('s3') with open(outputFolder+"/"+timestamp + "_" +env+'.zip', 'rb') as data: - s3.upload_fileobj(data, 'coco-dynatrace-tfstate', 'backups/'+timestamp + "_" +env+'.zip') #TODO: Make s3 bucket name configurable over environment variables + s3.upload_fileobj(data, str(os.getenv("AWS_S3_BUCKET"), 'backups/'+timestamp + "_" +env+'.zip') #TODO: Make s3 bucket name configurable over environment variables print("Finished!") sys.exit(0) diff --git a/templates/main.tf b/templates/main.tf index dd494dd..6b2132f 100644 --- a/templates/main.tf +++ b/templates/main.tf @@ -7,9 +7,9 @@ terraform { } backend "s3" { - bucket = "coco-dynatrace-tfstate" + bucket = "{$S3_BUCKET}" key = "backup/{$env}/{$timestamp}/terraform.tfstate" - region = "eu-central-1" + region = "{$S3_REGION}" dynamodb_table = "coco-dynatrace-tfstate" encrypt = true }