Compare commits

...

10 Commits

Author SHA1 Message Date
ermisw 317e950357 added list of all environments and defined export function 2022-02-02 17:09:47 +01:00
ermisw 277ea00ddb Enabled all resources 2022-01-31 15:34:46 +01:00
ermisw 4273e22f28 Error handling subprocesses for Export & Import, Excluding terraform dir and lock file for zipping 2022-01-31 15:31:44 +01:00
rforstner 52c5275634 proxy settings 2022-01-31 09:27:26 +01:00
rforstner 36bf63acdb proxy settings 2022-01-31 09:23:32 +01:00
rforstner 153d3623e1 proxy settings 2022-01-31 09:21:50 +01:00
ermisw 2da97c6f12 added new env varibale to jenkins 2022-01-28 16:12:42 +01:00
ermisw 2c8dfbcd31 Merge branch 'master' of https://atc.bmwgroup.net/bitbucket/scm/opapm/coco_apm_exportdtconfig 2022-01-28 15:49:59 +01:00
ermisw bdbf312ccb added upload data export to s3 bucket 2022-01-28 15:49:40 +01:00
rforstner eb153ea604 update jf 2022-01-27 13:48:44 +01:00
6 changed files with 99 additions and 83 deletions

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
output/
.env .env
.terraform.lock.hcl .terraform.lock.hcl

20
.vscode/launch.json vendored
View File

@ -13,6 +13,26 @@
"args": [ "args": [
"TERRAFORM" "TERRAFORM"
], ],
},
{
"name": "Python: Aktuelle Datei EMEA_PROD",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"args": [
"EMEA_PROD"
],
},
{
"name": "Python: Aktuelle Datei EMEA_PREPROD",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"args": [
"EMEA_PREPROD"
],
} }
] ]
} }

47
Jenkinsfile vendored
View File

@ -1,21 +1,13 @@
//not required right now as CN is reachable from EMEA as well //not required right now as CN is reachable from EMEA as well
def loopEnvironments(environments){
print env.JENKINS_URL environments=['EMEA_PROD', 'EMEA_PREPROD', 'NA_PROD', 'NA_PREPROD', 'CN_PROD', 'CN_PREPROD']
environments.each { key, val ->
@NonCPS // has to be NonCPS or the build breaks on the call to .each
//Execute only if you are on the same environment def export_config_all(list) {
//not required right now as CN is reachable from EMEA as well list.each { env ->
if (env.JENKINS_URL == environments."${key}"[3].'jenkins') sh "python3 export.py ${env}"
{ }
envname = environments."${key}"[0].'name'
envurl = environments."${key}"[1].'env-url'
tokenname = environments."${key}"[2].'env-token-name'
sh 'python createReport.py "${envname}"'
}
}
} }
pipeline { pipeline {
options { options {
@ -56,11 +48,12 @@
// NAPREPROD_TOKEN_VAR = credentials('NAPREPROD_TOKEN_VAR') // NAPREPROD_TOKEN_VAR = credentials('NAPREPROD_TOKEN_VAR')
// CNPROD_TOKEN_VAR = credentials('CNPROD_TOKEN_VAR') // CNPROD_TOKEN_VAR = credentials('CNPROD_TOKEN_VAR')
// CNPREPROD_TOKEN_VAR = credentials('CNPREPROD_TOKEN_VAR') // CNPREPROD_TOKEN_VAR = credentials('CNPREPROD_TOKEN_VAR')
// AWS_ACCESS_KEY_ID = credentials('AWS_TERRAFORM_KEY') AWS_ACCESS_KEY_ID = credentials('AWS_TERRAFORM_KEY')
// AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY') AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY')
AWS_S3_BUCKET="coco-dynatrace-tfstate"
AWS_S3_REGION="eu-central-1"
//TERRAFORM_RESOURCES="dynatrace_management_zone"
TERRAFORM_RESOURCES="dynatrace_management_zone"
//EMEA PROD //EMEA PROD
TF_VAR_EMEA_PROD_ENV_URL="https://xxu26128.live.dynatrace.com" TF_VAR_EMEA_PROD_ENV_URL="https://xxu26128.live.dynatrace.com"
@ -79,11 +72,11 @@
TF_VAR_NA_PREPROD_API_TOKEN=credentials('NAPREPROD_TOKEN_VAR') TF_VAR_NA_PREPROD_API_TOKEN=credentials('NAPREPROD_TOKEN_VAR')
//CN PROD //CN PROD
TF_VAR_CN_PROD_ENV_URL="https://dynatracemgd-tsp.bmwgroup.net/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b" TF_VAR_CN_PROD_ENV_URL="https://dyna-synth-cn.bmwgroup.com.cn/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
TF_VAR_CN_PROD_API_TOKEN=credentials('CNPROD_TOKEN_VAR') TF_VAR_CN_PROD_API_TOKEN=credentials('CNPROD_TOKEN_VAR')
//CN PREPROD //CN PREPROD
TF_VAR_CN_PREPROD_ENV_URL="https://dynatracemgd-tsp.bmwgroup.net/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35" TF_VAR_CN_PREPROD_ENV_URL="https://dyna-synth-cn.bmwgroup.com.cn/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35"
TF_VAR_CN_PREPROD_API_TOKEN=credentials('CNPREPROD_TOKEN_VAR') TF_VAR_CN_PREPROD_API_TOKEN=credentials('CNPREPROD_TOKEN_VAR')
//TERRAFORM //TERRAFORM
@ -129,15 +122,17 @@
} }
} }
stage('Execute Export Script TERRAFORM') { stage('Execute Export Script TERRAFORM') {
steps { steps {
sh 'python3 export.py TERRAFORM' export_config_all(environments)
//sh 'python3 export.py EMEA_PROD'
//sh 'python3 export.py TERRAFORM'
//Only required once CN is not reachable from EMEA //Only required once CN is not reachable from EMEA
//loopEnvironments(environments) //loopEnvironments(environments)
} }
} }
stage('Send report') { stage('Send report') {

106
export.py
View File

@ -1,15 +1,17 @@
import os import os
import subprocess from subprocess import Popen, PIPE, STDOUT, TimeoutExpired
import sys import sys
import time import time
import shutil import shutil
import hcl import hcl
from dotenv import load_dotenv from dotenv import load_dotenv
from glob import glob from glob import glob
#from git import Repo
import zipfile
import boto3
# [AA 2022.01.17] Set available resources
# [AA 2022.01.17] Set available resources
if os.name == 'nt': if os.name == 'nt':
@ -35,11 +37,11 @@ Resources = os.getenv("TERRAFORM_RESOURCES").split(",") if os.getenv("TERRAFORM_
"dynatrace_database_anomalies", "dynatrace_database_anomalies",
"dynatrace_custom_anomalies", "dynatrace_custom_anomalies",
"dynatrace_disk_anomalies", "dynatrace_disk_anomalies",
# "dynatrace_calculated_service_metric", #issue -> bug: windows specific due to path length limit "dynatrace_calculated_service_metric", #issue -> bug: windows specific due to path length limit
"dynatrace_service_naming", "dynatrace_service_naming",
"dynatrace_host_naming", "dynatrace_host_naming",
"dynatrace_processgroup_naming", "dynatrace_processgroup_naming",
#"dynatrace_slo", # issue -> bug: whitespace issue "dynatrace_slo", # issue -> bug: whitespace issue
"dynatrace_span_entry_point", "dynatrace_span_entry_point",
"dynatrace_span_capture_rule", "dynatrace_span_capture_rule",
"dynatrace_span_context_propagation", "dynatrace_span_context_propagation",
@ -68,33 +70,27 @@ def setEnv(env, time, path):
# [AA 2021.12.10] Method to call process synchronously # [AA 2021.12.10] Method to call process synchronously
def runProcess(process_name, input_params): def runExportProcess(process_name, input_params):
process_names = ["Export", "Terraform init"] process_names = ["Export", "Terraform init"]
success = False
#process = subprocess.Popen(input_params)
print("[DEBUG] Start run process: "+ ' '.join(input_params))
try:
process = subprocess.Popen(input_params)
process.wait(timeout=60*60) # 10 minutes
success = True success = True
print("[DEBUG] Start run process: "+ ' '.join(input_params))
process = Popen(input_params,stdout=PIPE, stderr=PIPE)
output, error_output = process.communicate()
process.wait(timeout=60*60) # 10 minutes
if len(output) > 0:
raise Exception ("Eception occured during export config: "+output.decode("utf-8"))
print("[DEBUG]", "Process:", process_name, "Success:", success)
def runImportProcess(process_name, input_params):
process_names = ["Export", "Terraform init"]
success = True
print("[DEBUG] Start run process: "+ ' '.join(input_params))
process = Popen(input_params)
process.wait(timeout=60*60)
if process.returncode > 0:
raise Exception ("Eception occured during generating state File!")
print("[DEBUG]", "Process:", process_name, "Success:", success) print("[DEBUG]", "Process:", process_name, "Success:", success)
# print("[DEBUG]", "Process return code:", outs)
except subprocess.TimeoutExpired:
print("[DEBUG]", "Exception occured:", subprocess.TimeoutExpired)
print("[DEBUG]", "Killing process:", process_name)
process.kill()
success = False
print("[DEBUG]", "Process:", process_name, "Success:", success)
except Exception as err:
print("[DEBUG]", "Exception running export tool"+ str(err))
#if process_name in process_names and success == False:
# print("[DEBUG]", "Process:", process_name, "Success:", success)
# print("[DEBUG]", "Exiting program.")
# process.kill()
# success = False
# sys.exit(1)
#else:
# print("[FAILED]", input_params)
# [AA 2021.12.17] Methods needed to replace the matching keys # [AA 2021.12.17] Methods needed to replace the matching keys
def replacedata(p, maplist): def replacedata(p, maplist):
@ -106,29 +102,11 @@ def replacedata(p, maplist):
template.seek(0) template.seek(0)
template.truncate() template.truncate()
# [AA 2021.12.14] Replace matching {$keys}
#for mapping in mappings[0:2]:
# [AA 2021.12.17] With the values for management_zone and msid in memory # [AA 2021.12.17] With the values for management_zone and msid in memory
for key, val in maplist.items(): for key, val in maplist.items():
print("[DEBUG]", "Replacing key values %s at %s." % (key, p)) print("[DEBUG]", "Replacing key values %s at %s." % (key, p))
data = data.replace(key, val) data = data.replace(key, val)
# # [AA 2021.12.22] Files that require a replacement for test,int and e2e,prod
# if os.path.basename(p) in specificfiles[0:2]:
# for key, val in mappings[2][maplist].items():
# print("[DEBUG]", "Replacing key values %s at %s." % (key, p))
# data = data.replace(key, val)
# # [AA 2021.12.22] Replace key value for {$url} and {$env} for corresponding hub
# if os.path.basename(p) in specificfiles[2]:
# for mapping in mappings[3:5]:
# for key, val in mapping.items():
# print("[DEBUG]", "Replacing key values %s at %s." % (key, p))
# data = data.replace(key, val[pos])
# [AA 2022.01.19] Replace key value for {}
# [AA 2021.12.14] Write data from memory into file # [AA 2021.12.14] Write data from memory into file
with open(p, 'w+') as template: with open(p, 'w+') as template:
template.write(data) template.write(data)
@ -173,7 +151,10 @@ def createResourceDict():
# [AA, EW 2022.01.17] Copy main.tf into the target folder # [AA, EW 2022.01.17] Copy main.tf into the target folder
def copyMainTemplate(): def copyMainTemplate():
shutil.copyfile(templatesFolder + "main.tf", targetFolder + "main.tf") shutil.copyfile(templatesFolder + "main.tf", targetFolder + "main.tf")
replacedata(targetFolder + "main.tf", {"{$env}":env, "{$timestamp}":timestamp}) replacedata(targetFolder + "main.tf", {"{$env}":env,
"{$timestamp}":timestamp,
"{$S3_BUCKET}": str(os.getenv("AWS_S3_BUCKET")),
"{$S3_REGION}":str(os.getenv("AWS_S3_REGION"))})
# [AA 2022.01.17] Copy module.tf in all folders and subfolders except where main.tf is # [AA 2022.01.17] Copy module.tf in all folders and subfolders except where main.tf is
@ -206,17 +187,25 @@ def editMainTF():
def importStates(): def importStates():
os.chdir(targetFolder) os.chdir(targetFolder)
input_params = ["terraform", "init"] input_params = ["terraform", "init"]
runProcess("Terraform init",input_params) runImportProcess("Terraform init",input_params)
for filedir, resourceV in myDict.items(): for filedir, resourceV in myDict.items():
for resource, valueArray in resourceV.items(): for resource, valueArray in resourceV.items():
for rObject in valueArray: for rObject in valueArray:
input_params = ["terraform", "import", "module."+getModuleTag( input_params = ["terraform", "import", "module."+getModuleTag(
filedir)+"."+resource+"."+rObject["resourceName"], rObject["resourceID"]] filedir)+"."+resource+"."+rObject["resourceName"], rObject["resourceID"]]
runProcess("Import", input_params) runImportProcess("Import", input_params)
# terraform import module.alerting_profiles.dynatrace_alerting_profiles.CD_ABC 9348098098safs9f8 # terraform import module.alerting_profiles.dynatrace_alerting_profiles.CD_ABC 9348098098safs9f8
os.chdir(cwd) os.chdir(cwd)
def zipdir(path, ziph):# ziph is zipfile handlefor root, dirs, files in os.walk(path):for file in files:
for root, dirs, files in os.walk(path):
if ".terraform" not in root:
for file in files:
if ".terraform.lock.hcl" not in file:
ziph.write(os.path.join(root, file),
os.path.relpath(os.path.join(root, file),
os.path.join(path, '..')))
# [AA 2022.01.17] Arguments passed # [AA 2022.01.17] Arguments passed
if(len(sys.argv) == 2): if(len(sys.argv) == 2):
@ -239,7 +228,7 @@ if(len(sys.argv) == 2):
setEnv(sys.argv[1], timestamp, outputFolder) setEnv(sys.argv[1], timestamp, outputFolder)
# [AA, EW 2022.01.17] Download resource files # [AA, EW 2022.01.17] Download resource files
runProcess("Export", [export_tool, "export"] + Resources) runExportProcess("Export", [export_tool, "export"] + Resources)
# [AA, EW 2022.01.17] Create a dictionary to store information of resources # [AA, EW 2022.01.17] Create a dictionary to store information of resources
createResourceDict() createResourceDict()
@ -253,8 +242,21 @@ if(len(sys.argv) == 2):
# [AA, EW 2022.01.17] Import the states for each module # [AA, EW 2022.01.17] Import the states for each module
importStates() importStates()
zipf = zipfile.ZipFile(outputFolder+"/"+timestamp + "_" +env+'.zip', 'w', zipfile.ZIP_DEFLATED)
zipdir(targetFolder, zipf)
zipf.close()
s3 = boto3.client('s3')
with open(outputFolder+"/"+timestamp + "_" +env+'.zip', 'rb') as data:
s3.upload_fileobj(data, str(os.getenv("AWS_S3_BUCKET")), 'backups/'+timestamp + "_" +env+'.zip') #TODO: Make s3 bucket name configurable over environment variables
print("Finished!") print("Finished!")
sys.exit(0) sys.exit(0)
except Exception as err: except Exception as err:
print("Exception occured: "+ str(err)) print("Exception occured: "+ str(err))
sys.exit(1) sys.exit(1)

View File

@ -9,3 +9,4 @@ typing
python-dotenv python-dotenv
pyhcl pyhcl
subprocess32 subprocess32
boto3==1.17.0

View File

@ -7,9 +7,9 @@ terraform {
} }
backend "s3" { backend "s3" {
bucket = "coco-dynatrace-tfstate" bucket = "{$S3_BUCKET}"
key = "backup/{$env}/{$timestamp}/terraform.tfstate" key = "backup/{$env}/{$timestamp}/terraform.tfstate"
region = "eu-central-1" region = "{$S3_REGION}"
dynamodb_table = "coco-dynatrace-tfstate" dynamodb_table = "coco-dynatrace-tfstate"
encrypt = true encrypt = true
} }
@ -23,6 +23,3 @@ provider "dynatrace" {
dt_env_url = "${var.{$env}_ENV_URL}" dt_env_url = "${var.{$env}_ENV_URL}"
dt_api_token = "${var.{$env}_API_TOKEN}" dt_api_token = "${var.{$env}_API_TOKEN}"
} }