added upload data export to s3 bucket
parent
96d0a76c63
commit
bdbf312ccb
|
|
@ -1,2 +1,3 @@
|
|||
output/
|
||||
.env
|
||||
.terraform.lock.hcl
|
||||
54
export.py
54
export.py
|
|
@ -6,7 +6,9 @@ import shutil
|
|||
import hcl
|
||||
from dotenv import load_dotenv
|
||||
from glob import glob
|
||||
|
||||
#from git import Repo
|
||||
import zipfile
|
||||
import boto3
|
||||
|
||||
|
||||
# [AA 2022.01.17] Set available resources
|
||||
|
|
@ -114,19 +116,6 @@ def replacedata(p, maplist):
|
|||
print("[DEBUG]", "Replacing key values %s at %s." % (key, p))
|
||||
data = data.replace(key, val)
|
||||
|
||||
# # [AA 2021.12.22] Files that require a replacement for test,int and e2e,prod
|
||||
# if os.path.basename(p) in specificfiles[0:2]:
|
||||
# for key, val in mappings[2][maplist].items():
|
||||
# print("[DEBUG]", "Replacing key values %s at %s." % (key, p))
|
||||
# data = data.replace(key, val)
|
||||
|
||||
# # [AA 2021.12.22] Replace key value for {$url} and {$env} for corresponding hub
|
||||
# if os.path.basename(p) in specificfiles[2]:
|
||||
# for mapping in mappings[3:5]:
|
||||
# for key, val in mapping.items():
|
||||
# print("[DEBUG]", "Replacing key values %s at %s." % (key, p))
|
||||
# data = data.replace(key, val[pos])
|
||||
|
||||
# [AA 2022.01.19] Replace key value for {}
|
||||
|
||||
# [AA 2021.12.14] Write data from memory into file
|
||||
|
|
@ -217,6 +206,12 @@ def importStates():
|
|||
# terraform import module.alerting_profiles.dynatrace_alerting_profiles.CD_ABC 9348098098safs9f8
|
||||
os.chdir(cwd)
|
||||
|
||||
def zipdir(path, ziph):# ziph is zipfile handlefor root, dirs, files in os.walk(path):for file in files:
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
ziph.write(os.path.join(root, file),
|
||||
os.path.relpath(os.path.join(root, file),
|
||||
os.path.join(path, '..')))
|
||||
|
||||
# [AA 2022.01.17] Arguments passed
|
||||
if(len(sys.argv) == 2):
|
||||
|
|
@ -253,8 +248,39 @@ if(len(sys.argv) == 2):
|
|||
|
||||
# [AA, EW 2022.01.17] Import the states for each module
|
||||
importStates()
|
||||
|
||||
#remote = "https://atc.bmwgroup.net/bitbucket/scm/opapm/coco_apm_fullbackup.git"
|
||||
|
||||
#repo = Repo.init(path="C:\\Users\\wiegee\\Documents\\Projekte\\BMW\\dev\\DynatraceFullBackUpData")
|
||||
|
||||
#if len(repo.index.diff("HEAD")) != 0:
|
||||
# repo.git.add('.')
|
||||
# repo.git.commit(m="testing")
|
||||
|
||||
#if len(repo.remotes)==0:
|
||||
# repo.git.remote('add', 'origin', remote)
|
||||
|
||||
#repo.git.push('-u', 'origin', 'master')
|
||||
|
||||
#s3 = boto3.client('s3')
|
||||
|
||||
# Upload the Zip file ATA.zip within the folder2 on S3with open('ATA.zip', 'rb') as data:
|
||||
|
||||
#s3.upload_fileobj(data, 'first-us-east-1-bucket', 'ATA.zip')g
|
||||
|
||||
zipf = zipfile.ZipFile(outputFolder+"/"+timestamp + "_" +env+'.zip', 'w', zipfile.ZIP_DEFLATED)
|
||||
zipdir(targetFolder, zipf)
|
||||
zipf.close()
|
||||
|
||||
s3 = boto3.client('s3')
|
||||
|
||||
with open(outputFolder+"/"+timestamp + "_" +env+'.zip', 'rb') as data:
|
||||
s3.upload_fileobj(data, 'coco-dynatrace-tfstate', 'backups/'+timestamp + "_" +env+'.zip') #TODO: Make s3 bucket name configurable over environment variables
|
||||
|
||||
print("Finished!")
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
except Exception as err:
|
||||
print("Exception occured: "+ str(err))
|
||||
sys.exit(1)
|
||||
|
|
|
|||
|
|
@ -9,3 +9,4 @@ typing
|
|||
python-dotenv
|
||||
pyhcl
|
||||
subprocess32
|
||||
boto3==1.17.0
|
||||
|
|
@ -23,6 +23,3 @@ provider "dynatrace" {
|
|||
dt_env_url = "${var.{$env}_ENV_URL}"
|
||||
dt_api_token = "${var.{$env}_API_TOKEN}"
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue