added upload data export to s3 bucket

master
ermisw 2022-01-28 15:49:40 +01:00
parent 96d0a76c63
commit bdbf312ccb
4 changed files with 43 additions and 18 deletions

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
output/
.env .env
.terraform.lock.hcl .terraform.lock.hcl

View File

@ -6,10 +6,12 @@ import shutil
import hcl import hcl
from dotenv import load_dotenv from dotenv import load_dotenv
from glob import glob from glob import glob
#from git import Repo
import zipfile
import boto3
# [AA 2022.01.17] Set available resources
# [AA 2022.01.17] Set available resources
if os.name == 'nt': if os.name == 'nt':
@ -114,19 +116,6 @@ def replacedata(p, maplist):
print("[DEBUG]", "Replacing key values %s at %s." % (key, p)) print("[DEBUG]", "Replacing key values %s at %s." % (key, p))
data = data.replace(key, val) data = data.replace(key, val)
# # [AA 2021.12.22] Files that require a replacement for test,int and e2e,prod
# if os.path.basename(p) in specificfiles[0:2]:
# for key, val in mappings[2][maplist].items():
# print("[DEBUG]", "Replacing key values %s at %s." % (key, p))
# data = data.replace(key, val)
# # [AA 2021.12.22] Replace key value for {$url} and {$env} for corresponding hub
# if os.path.basename(p) in specificfiles[2]:
# for mapping in mappings[3:5]:
# for key, val in mapping.items():
# print("[DEBUG]", "Replacing key values %s at %s." % (key, p))
# data = data.replace(key, val[pos])
# [AA 2022.01.19] Replace key value for {} # [AA 2022.01.19] Replace key value for {}
# [AA 2021.12.14] Write data from memory into file # [AA 2021.12.14] Write data from memory into file
@ -217,6 +206,12 @@ def importStates():
# terraform import module.alerting_profiles.dynatrace_alerting_profiles.CD_ABC 9348098098safs9f8 # terraform import module.alerting_profiles.dynatrace_alerting_profiles.CD_ABC 9348098098safs9f8
os.chdir(cwd) os.chdir(cwd)
def zipdir(path, ziph):# ziph is zipfile handlefor root, dirs, files in os.walk(path):for file in files:
for root, dirs, files in os.walk(path):
for file in files:
ziph.write(os.path.join(root, file),
os.path.relpath(os.path.join(root, file),
os.path.join(path, '..')))
# [AA 2022.01.17] Arguments passed # [AA 2022.01.17] Arguments passed
if(len(sys.argv) == 2): if(len(sys.argv) == 2):
@ -253,8 +248,39 @@ if(len(sys.argv) == 2):
# [AA, EW 2022.01.17] Import the states for each module # [AA, EW 2022.01.17] Import the states for each module
importStates() importStates()
#remote = "https://atc.bmwgroup.net/bitbucket/scm/opapm/coco_apm_fullbackup.git"
#repo = Repo.init(path="C:\\Users\\wiegee\\Documents\\Projekte\\BMW\\dev\\DynatraceFullBackUpData")
#if len(repo.index.diff("HEAD")) != 0:
# repo.git.add('.')
# repo.git.commit(m="testing")
#if len(repo.remotes)==0:
# repo.git.remote('add', 'origin', remote)
#repo.git.push('-u', 'origin', 'master')
#s3 = boto3.client('s3')
# Upload the Zip file ATA.zip within the folder2 on S3with open('ATA.zip', 'rb') as data:
#s3.upload_fileobj(data, 'first-us-east-1-bucket', 'ATA.zip')g
zipf = zipfile.ZipFile(outputFolder+"/"+timestamp + "_" +env+'.zip', 'w', zipfile.ZIP_DEFLATED)
zipdir(targetFolder, zipf)
zipf.close()
s3 = boto3.client('s3')
with open(outputFolder+"/"+timestamp + "_" +env+'.zip', 'rb') as data:
s3.upload_fileobj(data, 'coco-dynatrace-tfstate', 'backups/'+timestamp + "_" +env+'.zip') #TODO: Make s3 bucket name configurable over environment variables
print("Finished!") print("Finished!")
sys.exit(0) sys.exit(0)
except Exception as err: except Exception as err:
print("Exception occured: "+ str(err)) print("Exception occured: "+ str(err))
sys.exit(1) sys.exit(1)

View File

@ -9,3 +9,4 @@ typing
python-dotenv python-dotenv
pyhcl pyhcl
subprocess32 subprocess32
boto3==1.17.0

View File

@ -23,6 +23,3 @@ provider "dynatrace" {
dt_env_url = "${var.{$env}_ENV_URL}" dt_env_url = "${var.{$env}_ENV_URL}"
dt_api_token = "${var.{$env}_API_TOKEN}" dt_api_token = "${var.{$env}_API_TOKEN}"
} }