import os from subprocess import Popen, PIPE, STDOUT, TimeoutExpired import sys import time import shutil import hcl from dotenv import load_dotenv from glob import glob #from git import Repo import zipfile import boto3 # [AA 2022.01.17] Set available resources if os.name == 'nt': export_tool = ".\\bin\\terraform-provider-dynatrace_v1.9.1.exe" else: export_tool = "./bin/terraform-provider-dynatrace_v1.9.1" Resources = os.getenv("TERRAFORM_RESOURCES").split(",") if os.getenv("TERRAFORM_RESOURCES") else [ "dynatrace_custom_service", "dynatrace_dashboard", "dynatrace_management_zone", "dynatrace_maintenance_window", "dynatrace_request_attribute", "dynatrace_alerting_profile", "dynatrace_notification", "dynatrace_autotag" "dynatrace_aws_credentials", "dynatrace_azure_credentials", "dynatrace_k8s_credentials", "dynatrace_service_anomalies", "dynatrace_application_anomalies", "dynatrace_host_anomalies", "dynatrace_database_anomalies", "dynatrace_custom_anomalies", "dynatrace_disk_anomalies", "dynatrace_calculated_service_metric", #issue -> bug: windows specific due to path length limit "dynatrace_service_naming", "dynatrace_host_naming", "dynatrace_processgroup_naming", "dynatrace_slo", # issue -> bug: whitespace issue "dynatrace_span_entry_point", "dynatrace_span_capture_rule", "dynatrace_span_context_propagation", "dynatrace_resource_attributes", "dynatrace_span_attribute", "dynatrace_mobile_application", # "dynatrace_credentials", #issue -> bug: unknown issue? not supported? "dynatrace_browser_monitor", "dynatrace_http_monitor", ] # [AA 2021.12.10] Method to set environments def setEnv(env, time, path): if not (os.getenv("TF_VAR_"+ env + "_ENV_URL") ): raise Exception ("Environment variable missing: TF_VAR_"+ env + "_ENV_URL") if not ( os.getenv("TF_VAR_"+ env + "_API_TOKEN")): raise Exception ("Environment variable missing: TF_VAR_"+env + "_API_TOKEN") os.environ['DYNATRACE_ENV_URL'] = str(os.getenv("TF_VAR_"+env + "_ENV_URL")) os.environ['DYNATRACE_API_TOKEN'] = str(os.getenv("TF_VAR_"+env + "_API_TOKEN")) os.environ['DYNATRACE_TARGET_FOLDER'] = str(path + time + "_" + env) return os.environ # [AA 2021.12.10] Method to call process synchronously def runExportProcess(process_name, input_params): process_names = ["Export", "Terraform init"] success = True print("[DEBUG] Start run process: "+ ' '.join(input_params)) process = Popen(input_params,stdout=PIPE, stderr=PIPE) output, error_output = process.communicate() process.wait(timeout=60*60) # 10 minutes if len(output) > 0: raise Exception ("Eception occured during export config: "+output.decode("utf-8")) print("[DEBUG]", "Process:", process_name, "Success:", success) def runImportProcess(process_name, input_params): process_names = ["Export", "Terraform init"] success = True print("[DEBUG] Start run process: "+ ' '.join(input_params)) process = Popen(input_params) process.wait(timeout=60*60) if process.returncode > 0: raise Exception ("Eception occured during generating state File!") print("[DEBUG]", "Process:", process_name, "Success:", success) # [AA 2021.12.17] Methods needed to replace the matching keys def replacedata(p, maplist): # [AA 2021.12.14] Open each template to read with open(p, 'r+') as template: print("[DEBUG]", "Opening file at %s." % p) data = template.read() template.seek(0) template.truncate() # [AA 2021.12.17] With the values for management_zone and msid in memory for key, val in maplist.items(): print("[DEBUG]", "Replacing key values %s at %s." % (key, p)) data = data.replace(key, val) # [AA 2021.12.14] Write data from memory into file with open(p, 'w+') as template: template.write(data) # [AA 2021.12.13] Fill dictionary def readFile(path): with open(path, 'r', encoding='utf8') as cfg: # [AA 2021.12.01] Load the content of the particular resource file in eg: management_zones obj = hcl.load(cfg) # [AA, EW 2021.12.01] Store resource type and resource name of that file into a dictionary key = list(obj['resource'].keys())[0] val = list(obj['resource'][key].keys())[0] return key, val # [AA, EW 2022.01.17] Load all resources and add them to a dictionary def createResourceDict(): files = [os.path.normpath(f).replace('\\', '/') for f in (glob(targetFolder + "**/**.tf") + glob(targetFolder + "**/**/**.tf"))] for index, file in enumerate(files): filedir = "./"+("./"+os.path.dirname(file)).replace(targetFolder, "") splittedFilename = os.path.basename(file).split(".") if len(splittedFilename) == 5: resourceID = splittedFilename[1] moduleName, resourceName = readFile(file) if not filedir in myDict.keys(): myDict.setdefault(filedir, {}) if not moduleName in myDict[filedir].keys(): myDict[filedir].setdefault(moduleName, []) resourceValue = {"resourceName": resourceName, "resourceID": resourceID} myDict[filedir][moduleName].append(resourceValue) # [AA, EW 2022.01.17] Copy main.tf into the target folder def copyMainTemplate(): shutil.copyfile(templatesFolder + "main.tf", targetFolder + "main.tf") replacedata(targetFolder + "main.tf", {"{$env}":env, "{$timestamp}":timestamp, "{$S3_BUCKET}": str(os.getenv("AWS_S3_BUCKET")), "{$S3_REGION}":str(os.getenv("AWS_S3_REGION"))}) # [AA 2022.01.17] Copy module.tf in all folders and subfolders except where main.tf is def copyModuleTemplate(): dirs = glob(targetFolder + "**/") + glob(targetFolder + "**/**/") for index, dir in enumerate(dirs): shutil.copyfile(templatesFolder + "module.tf", dir + "module.tf") # [AA 2021.12.13] Append correct configuration path def writeFile(k, d): with open(".\\main.tf", "a") as mf: mf.writelines("\n" + "module \"" + k + "\" { source = \"" + d + "\" }") # [AA, EW 2022.01.17] Adjust the resource module name def getModuleTag(str): return str.replace("./", "").replace("/", "_") # [AA, EW 2022.01.17] Set the resource names def editMainTF(): with open(targetFolder + "main.tf", "a") as mf: for index, (filedir, value) in enumerate(myDict.items()): mf.writelines("\n" + "module \"" + getModuleTag(filedir) + "\" { source = \"" + filedir + "\" }") # [AA, EW 2022.01.17] Start importing def importStates(): os.chdir(targetFolder) input_params = ["terraform", "init"] runImportProcess("Terraform init",input_params) for filedir, resourceV in myDict.items(): for resource, valueArray in resourceV.items(): for rObject in valueArray: input_params = ["terraform", "import", "module."+getModuleTag( filedir)+"."+resource+"."+rObject["resourceName"], rObject["resourceID"]] runImportProcess("Import", input_params) # terraform import module.alerting_profiles.dynatrace_alerting_profiles.CD_ABC 9348098098safs9f8 os.chdir(cwd) def zipdir(path, ziph):# ziph is zipfile handlefor root, dirs, files in os.walk(path):for file in files: for root, dirs, files in os.walk(path): if ".terraform" not in root: for file in files: if ".terraform.lock.hcl" not in file: ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), os.path.join(path, '..'))) # [AA 2022.01.17] Arguments passed if(len(sys.argv) == 2): try: # [AA 2021.11.29] Load enviroment file load_dotenv() # [AA, EW 2022.01.17] Set global variables global timestamp, templatesFolder, outputFolder, targetFolder, myDict, cwd, env env=sys.argv[1] timestamp = time.strftime("%Y%m%d-%H%M%S") cwd = os.getcwd() outputFolder = "./output/" targetFolder = outputFolder + timestamp + "_" + sys.argv[1] + "/" templatesFolder = "./templates/" myDict = {} # [AA, EW 2022.01.17] Set env varibales setEnv(sys.argv[1], timestamp, outputFolder) # [AA, EW 2022.01.17] Download resource files runExportProcess("Export", [export_tool, "export"] + Resources) # [AA, EW 2022.01.17] Create a dictionary to store information of resources createResourceDict() # [AA, EW 2022.01.17] Copy main.tf file and add module.tf files copyMainTemplate() copyModuleTemplate() # [AA, EW 2022.01.17] Print the module names with their associated module path into the main.tf file editMainTF() # [AA, EW 2022.01.17] Import the states for each module importStates() zipf = zipfile.ZipFile(outputFolder+"/"+timestamp + "_" +env+'.zip', 'w', zipfile.ZIP_DEFLATED) zipdir(targetFolder, zipf) zipf.close() s3 = boto3.client('s3') with open(outputFolder+"/"+timestamp + "_" +env+'.zip', 'rb') as data: s3.upload_fileobj(data, str(os.getenv("AWS_S3_BUCKET")), 'backups/'+timestamp + "_" +env+'.zip') #TODO: Make s3 bucket name configurable over environment variables print("Finished!") sys.exit(0) except Exception as err: print("Exception occured: "+ str(err)) sys.exit(1) else: print("Usage example: ") print("List of available environments: CN_PREPROD, CN_PROD, EMEA_PREPROD, EMEA_PROD, NA_PREPROD, NA_PROD, etc.") print("python .\export.py [Emvironment] ") sys.exit(1)