added new env varibale to jenkins

master
ermisw 2022-01-28 16:12:42 +01:00
parent 2c8dfbcd31
commit 2da97c6f12
3 changed files with 9 additions and 5 deletions

3
Jenkinsfile vendored
View File

@ -58,8 +58,9 @@
// CNPREPROD_TOKEN_VAR = credentials('CNPREPROD_TOKEN_VAR')
AWS_ACCESS_KEY_ID = credentials('AWS_TERRAFORM_KEY')
AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY')
AWS_S3_BUCKET="coco-dynatrace-tfstate"
AWS_S3_REGION="eu-central-1"
TERRAFORM_RESOURCES="dynatrace_management_zone"
//EMEA PROD

View File

@ -162,7 +162,10 @@ def createResourceDict():
# [AA, EW 2022.01.17] Copy main.tf into the target folder
def copyMainTemplate():
shutil.copyfile(templatesFolder + "main.tf", targetFolder + "main.tf")
replacedata(targetFolder + "main.tf", {"{$env}":env, "{$timestamp}":timestamp})
replacedata(targetFolder + "main.tf", {"{$env}":env,
"{$timestamp}":timestamp,
"{$S3_BUCKET}": str(os.getenv("AWS_S3_BUCKET")),
"{$S3_REGION}":str(os.getenv("AWS_S3_REGION"))})
# [AA 2022.01.17] Copy module.tf in all folders and subfolders except where main.tf is
@ -275,7 +278,7 @@ if(len(sys.argv) == 2):
s3 = boto3.client('s3')
with open(outputFolder+"/"+timestamp + "_" +env+'.zip', 'rb') as data:
s3.upload_fileobj(data, 'coco-dynatrace-tfstate', 'backups/'+timestamp + "_" +env+'.zip') #TODO: Make s3 bucket name configurable over environment variables
s3.upload_fileobj(data, str(os.getenv("AWS_S3_BUCKET"), 'backups/'+timestamp + "_" +env+'.zip') #TODO: Make s3 bucket name configurable over environment variables
print("Finished!")
sys.exit(0)

View File

@ -7,9 +7,9 @@ terraform {
}
backend "s3" {
bucket = "coco-dynatrace-tfstate"
bucket = "{$S3_BUCKET}"
key = "backup/{$env}/{$timestamp}/terraform.tfstate"
region = "eu-central-1"
region = "{$S3_REGION}"
dynamodb_table = "coco-dynatrace-tfstate"
encrypt = true
}