From 04dea5af1629a9d4e679f56975af5b192544547a Mon Sep 17 00:00:00 2001 From: Patryk Gudalewicz Date: Mon, 6 Mar 2023 12:24:33 +0100 Subject: [PATCH] Changing script to Business Lines --- createDash.py | 141 ++++++++++++++++++++++++++------------------------ 1 file changed, 72 insertions(+), 69 deletions(-) diff --git a/createDash.py b/createDash.py index 020f23b..606d56c 100644 --- a/createDash.py +++ b/createDash.py @@ -7,18 +7,19 @@ from datetime import datetime from git import Repo import os #set STAGING global dashboard name -DASHBOARD_NAME = "[STAGING]Global Offboard Reliability - Touchpoint " +DASHBOARD_NAME = "[STAGING]Global Offboard Reliability 2.0 " AUTHSTRING = config("BITBUCKET_USERNAME")+":"+config("BITBUCKET_TOKEN") CONFIG_REPO_URL = "https://"+AUTHSTRING+"@atc.bmwgroup.net/bitbucket/scm/opapm/shared_configuration.git" CONFIG_REPO_NAME = "shared_configuration" ARCHIVE_REPO_URL = "https://"+AUTHSTRING+"@atc.bmwgroup.net/bitbucket/scm/opapm/archive.git" ARCHIVE_REPO_NAME = "archive" +BUSINESS_LINES = {'DE-3':'My Journey','DE-7':'Connected Vehicle Platform','DE-4':'My Life','EC-DE':'China Services'} parser = argparse.ArgumentParser(description="Generate and deploy the Dynatrace Global Dashboard as Code. Auto deployment works only for STAGING dashboard", formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument("-R", "--rows", type=int, help="Number of rows per dashboard. If not specified, all rows will be added to single dashboard") parser.add_argument('--auto-upload', default=False, action='store_true', help="Auto upload to STAGING dashboard") -parser.add_argument('-T', '--touchpoint', type=str, help="Define touchpoint for which the dashboard should be updated: 'Mobile' or 'Vehicle'") +parser.add_argument('-D', '--department', type=str, required=True, help="Define department for which the dashboard should be updated: 'DE-3', 'DE-7', 'DE-4' or 'EC-DE'") args = parser.parse_args() def clone_repo_if_notexist(repourl, reponame): @@ -100,7 +101,7 @@ def remove_dashboards(DTAPIToken, DTENV, dashboards): DTAPIURL = DTENV + "api/config/v1/dashboards/" + dashboard["id"] print(make_request(DTAPIURL,DTAPIToken,True,"delete",None)) -def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files): +def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files, businessline): if(files): for index, filename in enumerate(files,start=1): with open('./'+filename) as file: @@ -123,7 +124,7 @@ def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files): print("Dashboard for file: "+filename + " not found.") newdashboard = { "dashboardMetadata":{ - "name": DASHBOARD_NAME+ args.touchpoint + "#" + str(index), + "name": DASHBOARD_NAME+ businessline + " #" + str(index), "owner": "PATRYK.GUDALEWICZ@partner.bmw.de" }, "tiles":[] @@ -261,17 +262,17 @@ def create_default_tiles(): newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 7 , 20 , 2), "tileFilter": {}, "markdown": "# EMEA" }) newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 7 , 4 , 1), "tileFilter": {} }) newDashboardTiles.append({ "name": "Reliability Graph (24 hrs)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 11 , 12 , 1), "tileFilter": {} }) - newDashboardTiles.append({ "name": "Last 24 hrs" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 23 , 4 , 1), "tileFilter": {} }) + newDashboardTiles.append({ "name": "Last 24 hrs" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 23 , 4 , 1), "tileFilter": {} }) # NORTH AMERICA HUB - newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 27 , 20 , 2), "tileFilter": {}, "markdown": "# NORTH AMERICA" }) - newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 27 , 4 , 1), "tileFilter": {} }) - newDashboardTiles.append({ "name": "Reliability Graph (24 hrs)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 31 , 12 , 1), "tileFilter": {} }) - newDashboardTiles.append({ "name": "Last 24 hrs" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 43 , 4 , 1), "tileFilter": {} }) + newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 29 , 20 , 2), "tileFilter": {}, "markdown": "# NORTH AMERICA" }) + newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 29 , 4 , 1), "tileFilter": {} }) + newDashboardTiles.append({ "name": "Reliability Graph (24 hrs)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 33 , 12 , 1), "tileFilter": {} }) + newDashboardTiles.append({ "name": "Last 24 hrs" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 45 , 4 , 1), "tileFilter": {} }) # CHINA HUB - newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 47 , 20 , 2), "tileFilter": {}, "markdown": "# CHINA" }) - newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 47 , 4 , 1), "tileFilter": {} }) - newDashboardTiles.append({ "name": "Reliability Graph (24 hrs)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 51 , 12 , 1), "tileFilter": {} }) - newDashboardTiles.append({ "name": "Last 24 hrs" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 63 , 4 , 1), "tileFilter": {} }) + newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 51 , 20 , 2), "tileFilter": {}, "markdown": "# CHINA" }) + newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 51 , 4 , 1), "tileFilter": {} }) + newDashboardTiles.append({ "name": "Reliability Graph (24 hrs)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 55 , 12 , 1), "tileFilter": {} }) + newDashboardTiles.append({ "name": "Last 24 hrs" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 67 , 4 , 1), "tileFilter": {} }) return newDashboardTiles @@ -292,12 +293,12 @@ def main(slo_path): }, "naprod": { - "offset": 20, + "offset": 22, "remote_url": 'https://wgv50241.live.dynatrace.com' }, "cnprod": { - "offset": 40, + "offset": 44, "remote_url": 'https://dynatrace-cn-int.bmwgroup.com:443/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b' } } @@ -308,67 +309,69 @@ def main(slo_path): dahboardcount = 1 rowcount = 0 - currindex = 0 boundindex = 1 generatedfiles = [] if(args.rows is not None): rowcount = args.rows - for slo_name, configuration in slo_doc.items(): - if "TP_" + args.touchpoint in slo_name: - slo_index = configuration["index"] - currindex = slo_index - if rowcount > 0 and slo_index > rowcount: - dashboard_json = create_default_tiles() - rowcount = rowcount+args.rows - dahboardcount = dahboardcount+1 - boundindex = 1 - slo_display = configuration["displayname"] - slo_department = configuration["department"] - #timeframe_ytd = configuration["yearstart"] + " 00:00 to now" - timeframe_ytd = "-24h" - slo_graphThreshold_SingleValue = get_dataExplorerTileSloThreshold(configuration["thresholds"]["single_value"]) - slo_graphThreshold_Graph = get_dataExplorerTileSloThreshold(configuration["thresholds"]["graph_value"]) - - if len(configuration["hubs"]) > 0: - dashboard_json.append(get_DataExplorerTile_Markdown(slo_display, slo_department, get_bounds(((boundindex)*(3)) , 0 , 7 , 3), configuration["ops_dashboard"]["emea"], configuration["ops_dashboard"]["na"], configuration["ops_dashboard"]["cn"],configuration["doc_url"])) - for hub,tiles in configuration["hubs"].items(): - if 'actual' in tiles["tiles"]: - dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 7 + hub_config[hub]["offset"] , 4 , 3), timeframe_actual, slo_graphThreshold_SingleValue)) - if "graph" in tiles["tiles"]: - dashboard_json.append(get_DataExplorerTile_Graph(slo_name, configuration["metric"], configuration["selector_var"].replace("~",""), hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 11 + hub_config[hub]["offset"] , 12 , 3), timeframe_graph, "97", "102", slo_graphThreshold_Graph)) - if "ytd" in tiles["tiles"]: - dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 23 + hub_config[hub]["offset"] , 4 , 3), timeframe_ytd, slo_graphThreshold_SingleValue)) - boundindex = boundindex+1 - if rowcount > 0 and slo_index == rowcount: - with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file: - json.dump(dashboard_json, file, indent=2) - generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json") + blname = BUSINESS_LINES[args.department] + blvalue = args.department + if(blname and blvalue): + for slo_name, configuration in slo_doc.items(): + if configuration['department'].startswith(blvalue): + print("Dashboard #"+str(dahboardcount)+" : Configurint SLO "+str(boundindex) +" of "+str(rowcount)) + if rowcount > 0 and boundindex > rowcount: + dashboard_json = create_default_tiles() + dahboardcount = dahboardcount+1 + boundindex = 1 + slo_display = configuration["displayname"] + slo_department = configuration["department"] + #timeframe_ytd = configuration["yearstart"] + " 00:00 to now" + timeframe_ytd = "-24h" + slo_graphThreshold_SingleValue = get_dataExplorerTileSloThreshold(configuration["thresholds"]["single_value"]) + slo_graphThreshold_Graph = get_dataExplorerTileSloThreshold(configuration["thresholds"]["graph_value"]) + + if len(configuration["hubs"]) > 0: + dashboard_json.append(get_DataExplorerTile_Markdown(slo_display, slo_department, get_bounds(((boundindex)*(3)) , 0 , 7 , 3), configuration["ops_dashboard"]["emea"], configuration["ops_dashboard"]["na"], configuration["ops_dashboard"]["cn"],configuration["doc_url"])) + for hub,tiles in configuration["hubs"].items(): + if 'actual' in tiles["tiles"]: + dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 7 + hub_config[hub]["offset"] , 4 , 3), timeframe_actual, slo_graphThreshold_SingleValue)) + if "graph" in tiles["tiles"]: + dashboard_json.append(get_DataExplorerTile_Graph(slo_name, configuration["metric"], configuration["selector_var"].replace("~",""), hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 11 + hub_config[hub]["offset"] , 12 , 3), timeframe_graph, "97", "102", slo_graphThreshold_Graph)) + if "ytd" in tiles["tiles"]: + dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 23 + hub_config[hub]["offset"] , 4 , 3), timeframe_ytd, slo_graphThreshold_SingleValue)) + boundindex = boundindex+1 + if rowcount > 0 and boundindex > rowcount: + with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file: + json.dump(dashboard_json, file, indent=2) + generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json") - if rowcount == 0 or (args.rows is not None and currindex%args.rows != 0): - with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file: - json.dump(dashboard_json, file, indent=2) - generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json") + if rowcount == 0 or (args.rows is not None and boundindex%args.rows != 0): + with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file: + json.dump(dashboard_json, file, indent=2) + generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json") - if args.auto_upload: - print("Getting existing STAGING dashboards from Dynatrace") - with open('./environment.yaml') as file: - doc = yaml.safe_load(file) + if args.auto_upload: + print("Getting existing STAGING dashboards from Dynatrace") + with open('./environment.yaml') as file: + doc = yaml.safe_load(file) - for item, doc in doc.items(): - token = dict(doc[2]) - url = dict(doc[1]) - print("Crawling through: " + item) - print("Gather data, hold on a minute") - DTTOKEN = config(token.get('env-token-name')) - DTURL = url.get('env-url') - - existingdashboards = get_all_dashboards_withname(DTTOKEN, DTURL,DASHBOARD_NAME + args.touchpoint) - print("Uploading STAGING dashboards to Dynatrace...") - backup_dashboards(DTTOKEN, DTURL, existingdashboards) - now=datetime.now() - strnowdate = now.strftime("%Y%m%d") - push_repo(archiverepo, strnowdate+"_Global dashboard as code auto-upload backup") - create_or_update_dashboard(DTTOKEN, DTURL, existingdashboards, generatedfiles) + for item, doc in doc.items(): + token = dict(doc[2]) + url = dict(doc[1]) + print("Crawling through: " + item) + print("Gather data, hold on a minute") + DTTOKEN = config(token.get('env-token-name')) + DTURL = url.get('env-url') + print("Downloading STAGING dashboards to local repo ("+blname+")...") + existingdashboards = get_all_dashboards_withname(DTTOKEN, DTURL,DASHBOARD_NAME +blname) + print("Uploading STAGING dashboards to Dynatrace ("+blname+")...") + backup_dashboards(DTTOKEN, DTURL, existingdashboards) + now=datetime.now() + strnowdate = now.strftime("%Y%m%d") + push_repo(archiverepo, strnowdate+"_Global dashboard as code auto-upload backup") + create_or_update_dashboard(DTTOKEN, DTURL, existingdashboards, generatedfiles, blname) + else: + print("ERROR: Could not find Business line for given department.") if __name__ == "__main__":