119 lines
5.7 KiB
Python
119 lines
5.7 KiB
Python
#Main dashboard generation script
|
|
import yaml
|
|
from decouple import config
|
|
import json
|
|
from datetime import datetime
|
|
import argparse
|
|
from tileFactory import createHeaderTiles
|
|
from tileFactory import createImageTile
|
|
from tileFactory import createSloTileRow
|
|
from repoConfig import clone_repo_if_notexist
|
|
from repoConfig import pull_repo
|
|
from repoConfig import push_repo
|
|
from sloHelper import load_slo_parameter
|
|
from sloConfigLoader import getSloConfigurations
|
|
from remoteDashboard import get_all_dashboards_withname
|
|
from remoteDashboard import backup_dashboards
|
|
from remoteDashboard import create_or_update_dashboard
|
|
import warnings
|
|
warnings.filterwarnings("ignore")
|
|
|
|
#Script args definition
|
|
parser = argparse.ArgumentParser(description="Generate and deploy the Dynatrace Global Dashboard as Code. Auto deployment works only for STAGING dashboard",
|
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
|
parser.add_argument("-R", "--rows", type=int, help="Number of rows per dashboard. If not specified, all rows will be added to single dashboard")
|
|
parser.add_argument('--auto-upload', default=False, action='store_true', help="Auto upload to STAGING dashboard")
|
|
parser.add_argument('-D', '--department', type=str,default="ALL", required=False, help="Define department for which the dashboard should be updated: 'DE-3', 'DE-7', 'DE-4' or 'EC-DE'. Leave empty or use 'ALL' if you want to generate 1 cumulated dashboard")
|
|
parser.add_argument('--wall', default=False, action='store_true', help="By default script is generating desktop version. Use parameter to set dashboard generation to type 'Wall'.")
|
|
args = parser.parse_args()
|
|
|
|
#Loads script config file
|
|
def loadConfig():
|
|
with open('./config.yaml') as file:
|
|
return yaml.safe_load(file)
|
|
|
|
def main():
|
|
#Load script config file
|
|
script_config = loadConfig()
|
|
|
|
#Pull shared repositories
|
|
configrepo = clone_repo_if_notexist(script_config["repo"]["config_repo_url"], script_config["repo"]["config_repo_name"])
|
|
pull_repo(configrepo)
|
|
archiverepo = clone_repo_if_notexist(script_config["repo"]["archive_repo_url"], script_config["repo"]["archive_repo_name"])
|
|
pull_repo(archiverepo)
|
|
|
|
#Load env file
|
|
with open('./environment.yaml') as file:
|
|
doc = yaml.safe_load(file)
|
|
|
|
#Load SLO yaml file
|
|
slo_doc = load_slo_parameter(script_config["repo"]["slo_path"])
|
|
|
|
#Create empty dashboards and fill with default headers
|
|
dashboard_json = []
|
|
dashboard_json.append(createImageTile(script_config))
|
|
dashboard_json = dashboard_json + createHeaderTiles(script_config,args.wall)
|
|
|
|
|
|
print("Generating dashboard tiles...")
|
|
|
|
#Configure counters for SLO loop
|
|
dahboardcount = boundindex = 1
|
|
rowcount = 0
|
|
generatedfiles = []
|
|
|
|
if(args.rows is not None):
|
|
rowcount = args.rows
|
|
if(args.department == "ALL"):
|
|
blname = "ALL"
|
|
else:
|
|
blname = script_config["businesslines"][args.department]
|
|
slorelevant = False
|
|
blvalue = args.department
|
|
#SLO loop
|
|
for slo_name, configuration in slo_doc.items():
|
|
if configuration['department'].startswith(blvalue) or blvalue == "ALL":
|
|
if(configuration['selector_var'] == "CoCo-QM-Report_Mobile"):
|
|
slorelevant = True
|
|
print("Dashboard #"+str(dahboardcount)+" : Configurint SLO "+slo_name)
|
|
|
|
#When row splitting is configured, generate default tiles for each partial dashboard
|
|
if rowcount > 0 and boundindex > rowcount:
|
|
dashboard_json = []
|
|
dahboardcount = dahboardcount+1
|
|
dashboard_json.append(createImageTile(config))
|
|
dashboard_json = dashboard_json + createHeaderTiles(config,args.wall)
|
|
boundindex = 1
|
|
#Load SLO config as object from yaml definition
|
|
sloconfigs = getSloConfigurations(configuration, script_config)
|
|
#Generate tile row including description, single value and graph tiles
|
|
dashboard_json = dashboard_json + createSloTileRow(boundindex,script_config,args.wall,sloconfigs,doc)
|
|
#Increment row index
|
|
boundindex = boundindex+1
|
|
#Save tile JSON to file
|
|
if rowcount == 0 or (args.rows is not None and boundindex%args.rows != 0):
|
|
with open("./tiles/dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
|
|
json.dump(dashboard_json, file, indent=2)
|
|
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json")
|
|
#Upload staging dashboard if auto upload is specified
|
|
if args.auto_upload:
|
|
print("Getting existing STAGING dashboards from Dynatrace")
|
|
for item, doc in doc.items():
|
|
if(item == "globaldashboard"):
|
|
token = dict(doc[2])
|
|
url = dict(doc[1])
|
|
print("Crawling through: " + item)
|
|
print("Gather data, hold on a minute")
|
|
DTTOKEN = config(token.get('env-token-name'))
|
|
DTURL = url.get('env-url')
|
|
print("Downloading STAGING dashboards to local repo ("+blname+")...")
|
|
existingdashboards = get_all_dashboards_withname(DTTOKEN, DTURL,script_config["metadata"]["stagingname"]+" - " +blname)
|
|
print("Uploading STAGING dashboards to Dynatrace ("+blname+")...")
|
|
backup_dashboards(DTTOKEN, DTURL, existingdashboards)
|
|
now=datetime.now()
|
|
strnowdate = now.strftime("%Y%m%d")
|
|
push_repo(archiverepo, strnowdate+"_Global dashboard as code auto-upload backup")
|
|
create_or_update_dashboard(DTTOKEN, DTURL, existingdashboards, generatedfiles, blname,script_config,args.department)
|
|
print("DONE")
|
|
if __name__ == "__main__":
|
|
main() |