added new env varibale to jenkins
parent
2c8dfbcd31
commit
2da97c6f12
|
|
@ -58,8 +58,9 @@
|
||||||
// CNPREPROD_TOKEN_VAR = credentials('CNPREPROD_TOKEN_VAR')
|
// CNPREPROD_TOKEN_VAR = credentials('CNPREPROD_TOKEN_VAR')
|
||||||
AWS_ACCESS_KEY_ID = credentials('AWS_TERRAFORM_KEY')
|
AWS_ACCESS_KEY_ID = credentials('AWS_TERRAFORM_KEY')
|
||||||
AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY')
|
AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY')
|
||||||
|
AWS_S3_BUCKET="coco-dynatrace-tfstate"
|
||||||
|
AWS_S3_REGION="eu-central-1"
|
||||||
|
|
||||||
|
|
||||||
TERRAFORM_RESOURCES="dynatrace_management_zone"
|
TERRAFORM_RESOURCES="dynatrace_management_zone"
|
||||||
|
|
||||||
//EMEA PROD
|
//EMEA PROD
|
||||||
|
|
|
||||||
|
|
@ -162,7 +162,10 @@ def createResourceDict():
|
||||||
# [AA, EW 2022.01.17] Copy main.tf into the target folder
|
# [AA, EW 2022.01.17] Copy main.tf into the target folder
|
||||||
def copyMainTemplate():
|
def copyMainTemplate():
|
||||||
shutil.copyfile(templatesFolder + "main.tf", targetFolder + "main.tf")
|
shutil.copyfile(templatesFolder + "main.tf", targetFolder + "main.tf")
|
||||||
replacedata(targetFolder + "main.tf", {"{$env}":env, "{$timestamp}":timestamp})
|
replacedata(targetFolder + "main.tf", {"{$env}":env,
|
||||||
|
"{$timestamp}":timestamp,
|
||||||
|
"{$S3_BUCKET}": str(os.getenv("AWS_S3_BUCKET")),
|
||||||
|
"{$S3_REGION}":str(os.getenv("AWS_S3_REGION"))})
|
||||||
|
|
||||||
|
|
||||||
# [AA 2022.01.17] Copy module.tf in all folders and subfolders except where main.tf is
|
# [AA 2022.01.17] Copy module.tf in all folders and subfolders except where main.tf is
|
||||||
|
|
@ -275,7 +278,7 @@ if(len(sys.argv) == 2):
|
||||||
s3 = boto3.client('s3')
|
s3 = boto3.client('s3')
|
||||||
|
|
||||||
with open(outputFolder+"/"+timestamp + "_" +env+'.zip', 'rb') as data:
|
with open(outputFolder+"/"+timestamp + "_" +env+'.zip', 'rb') as data:
|
||||||
s3.upload_fileobj(data, 'coco-dynatrace-tfstate', 'backups/'+timestamp + "_" +env+'.zip') #TODO: Make s3 bucket name configurable over environment variables
|
s3.upload_fileobj(data, str(os.getenv("AWS_S3_BUCKET"), 'backups/'+timestamp + "_" +env+'.zip') #TODO: Make s3 bucket name configurable over environment variables
|
||||||
|
|
||||||
print("Finished!")
|
print("Finished!")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
|
||||||
|
|
@ -7,9 +7,9 @@ terraform {
|
||||||
}
|
}
|
||||||
|
|
||||||
backend "s3" {
|
backend "s3" {
|
||||||
bucket = "coco-dynatrace-tfstate"
|
bucket = "{$S3_BUCKET}"
|
||||||
key = "backup/{$env}/{$timestamp}/terraform.tfstate"
|
key = "backup/{$env}/{$timestamp}/terraform.tfstate"
|
||||||
region = "eu-central-1"
|
region = "{$S3_REGION}"
|
||||||
dynamodb_table = "coco-dynatrace-tfstate"
|
dynamodb_table = "coco-dynatrace-tfstate"
|
||||||
encrypt = true
|
encrypt = true
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue