Pull request #1: OPMAAS-3253

Merge in OPAPM/global_dashboard_as_code from OPMAAS-3253 to master
Reviewed on Dynatrace with Andreas Danzer

* commit '6ded7c9f295b92aee9b362711b0f06589fe31a6f':
  Adjusting timeframe and column spacing
  Dashboard name adjustment
  readme update
  Changing script to Business Lines
OPMAAS-3988
PATRYK GUDALEWICZ (ext.) 2023-03-06 12:03:34 +00:00
commit ec54e89232
2 changed files with 79 additions and 76 deletions

View File

@ -7,18 +7,19 @@ from datetime import datetime
from git import Repo from git import Repo
import os import os
#set STAGING global dashboard name #set STAGING global dashboard name
DASHBOARD_NAME = "[STAGING]Global Offboard Reliability - Touchpoint " DASHBOARD_NAME = "[STAGING]Global Offboard Reliability 2.0 - "
AUTHSTRING = config("BITBUCKET_USERNAME")+":"+config("BITBUCKET_TOKEN") AUTHSTRING = config("BITBUCKET_USERNAME")+":"+config("BITBUCKET_TOKEN")
CONFIG_REPO_URL = "https://"+AUTHSTRING+"@atc.bmwgroup.net/bitbucket/scm/opapm/shared_configuration.git" CONFIG_REPO_URL = "https://"+AUTHSTRING+"@atc.bmwgroup.net/bitbucket/scm/opapm/shared_configuration.git"
CONFIG_REPO_NAME = "shared_configuration" CONFIG_REPO_NAME = "shared_configuration"
ARCHIVE_REPO_URL = "https://"+AUTHSTRING+"@atc.bmwgroup.net/bitbucket/scm/opapm/archive.git" ARCHIVE_REPO_URL = "https://"+AUTHSTRING+"@atc.bmwgroup.net/bitbucket/scm/opapm/archive.git"
ARCHIVE_REPO_NAME = "archive" ARCHIVE_REPO_NAME = "archive"
BUSINESS_LINES = {'DE-3':'My Journey','DE-7':'Connected Vehicle Platform','DE-4':'My Life','EC-DE':'China Services'}
parser = argparse.ArgumentParser(description="Generate and deploy the Dynatrace Global Dashboard as Code. Auto deployment works only for STAGING dashboard", parser = argparse.ArgumentParser(description="Generate and deploy the Dynatrace Global Dashboard as Code. Auto deployment works only for STAGING dashboard",
formatter_class=argparse.ArgumentDefaultsHelpFormatter) formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-R", "--rows", type=int, help="Number of rows per dashboard. If not specified, all rows will be added to single dashboard") parser.add_argument("-R", "--rows", type=int, help="Number of rows per dashboard. If not specified, all rows will be added to single dashboard")
parser.add_argument('--auto-upload', default=False, action='store_true', help="Auto upload to STAGING dashboard") parser.add_argument('--auto-upload', default=False, action='store_true', help="Auto upload to STAGING dashboard")
parser.add_argument('-T', '--touchpoint', type=str, help="Define touchpoint for which the dashboard should be updated: 'Mobile' or 'Vehicle'") parser.add_argument('-D', '--department', type=str, required=True, help="Define department for which the dashboard should be updated: 'DE-3', 'DE-7', 'DE-4' or 'EC-DE'")
args = parser.parse_args() args = parser.parse_args()
def clone_repo_if_notexist(repourl, reponame): def clone_repo_if_notexist(repourl, reponame):
@ -100,7 +101,7 @@ def remove_dashboards(DTAPIToken, DTENV, dashboards):
DTAPIURL = DTENV + "api/config/v1/dashboards/" + dashboard["id"] DTAPIURL = DTENV + "api/config/v1/dashboards/" + dashboard["id"]
print(make_request(DTAPIURL,DTAPIToken,True,"delete",None)) print(make_request(DTAPIURL,DTAPIToken,True,"delete",None))
def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files): def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files, businessline):
if(files): if(files):
for index, filename in enumerate(files,start=1): for index, filename in enumerate(files,start=1):
with open('./'+filename) as file: with open('./'+filename) as file:
@ -123,7 +124,7 @@ def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files):
print("Dashboard for file: "+filename + " not found.") print("Dashboard for file: "+filename + " not found.")
newdashboard = { newdashboard = {
"dashboardMetadata":{ "dashboardMetadata":{
"name": DASHBOARD_NAME+ args.touchpoint + "#" + str(index), "name": DASHBOARD_NAME+ businessline + " #" + str(index),
"owner": "PATRYK.GUDALEWICZ@partner.bmw.de" "owner": "PATRYK.GUDALEWICZ@partner.bmw.de"
}, },
"tiles":[] "tiles":[]
@ -260,18 +261,18 @@ def create_default_tiles():
# EMEA HUB # EMEA HUB
newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 7 , 20 , 2), "tileFilter": {}, "markdown": "# EMEA" }) newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 7 , 20 , 2), "tileFilter": {}, "markdown": "# EMEA" })
newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 7 , 4 , 1), "tileFilter": {} }) newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 7 , 4 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Reliability Graph (24 hrs)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 11 , 12 , 1), "tileFilter": {} }) newDashboardTiles.append({ "name": "Reliability Graph (3 days)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 11 , 12 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Last 24 hrs" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 23 , 4 , 1), "tileFilter": {} }) newDashboardTiles.append({ "name": "Last 3 days" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 23 , 4 , 1), "tileFilter": {} })
# NORTH AMERICA HUB # NORTH AMERICA HUB
newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 27 , 20 , 2), "tileFilter": {}, "markdown": "# NORTH AMERICA" }) newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 28 , 20 , 2), "tileFilter": {}, "markdown": "# NORTH AMERICA" })
newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 27 , 4 , 1), "tileFilter": {} }) newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 28 , 4 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Reliability Graph (24 hrs)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 31 , 12 , 1), "tileFilter": {} }) newDashboardTiles.append({ "name": "Reliability Graph (3 days)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 32 , 12 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Last 24 hrs" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 43 , 4 , 1), "tileFilter": {} }) newDashboardTiles.append({ "name": "Last 3 days" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 44 , 4 , 1), "tileFilter": {} })
# CHINA HUB # CHINA HUB
newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 47 , 20 , 2), "tileFilter": {}, "markdown": "# CHINA" }) newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 49 , 20 , 2), "tileFilter": {}, "markdown": "# CHINA" })
newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 47 , 4 , 1), "tileFilter": {} }) newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 49 , 4 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Reliability Graph (24 hrs)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 51 , 12 , 1), "tileFilter": {} }) newDashboardTiles.append({ "name": "Reliability Graph (3 days)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 53 , 12 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Last 24 hrs" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 63 , 4 , 1), "tileFilter": {} }) newDashboardTiles.append({ "name": "Last 3 days" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 65 , 4 , 1), "tileFilter": {} })
return newDashboardTiles return newDashboardTiles
@ -292,83 +293,85 @@ def main(slo_path):
}, },
"naprod": "naprod":
{ {
"offset": 20, "offset": 21,
"remote_url": 'https://wgv50241.live.dynatrace.com' "remote_url": 'https://wgv50241.live.dynatrace.com'
}, },
"cnprod": "cnprod":
{ {
"offset": 40, "offset": 42,
"remote_url": 'https://dynatrace-cn-int.bmwgroup.com:443/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b' "remote_url": 'https://dynatrace-cn-int.bmwgroup.com:443/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b'
} }
} }
timeframe_actual = "-1h" timeframe_actual = "-1h"
timeframe_graph = "-24h" timeframe_graph = "-3d"
dahboardcount = 1 dahboardcount = 1
rowcount = 0 rowcount = 0
currindex = 0
boundindex = 1 boundindex = 1
generatedfiles = [] generatedfiles = []
if(args.rows is not None): if(args.rows is not None):
rowcount = args.rows rowcount = args.rows
for slo_name, configuration in slo_doc.items(): blname = BUSINESS_LINES[args.department]
if "TP_" + args.touchpoint in slo_name: blvalue = args.department
slo_index = configuration["index"] if(blname and blvalue):
currindex = slo_index for slo_name, configuration in slo_doc.items():
if rowcount > 0 and slo_index > rowcount: if configuration['department'].startswith(blvalue):
dashboard_json = create_default_tiles() print("Dashboard #"+str(dahboardcount)+" : Configurint SLO "+str(boundindex) +" of "+str(rowcount))
rowcount = rowcount+args.rows if rowcount > 0 and boundindex > rowcount:
dahboardcount = dahboardcount+1 dashboard_json = create_default_tiles()
boundindex = 1 dahboardcount = dahboardcount+1
slo_display = configuration["displayname"] boundindex = 1
slo_department = configuration["department"] slo_display = configuration["displayname"]
#timeframe_ytd = configuration["yearstart"] + " 00:00 to now" slo_department = configuration["department"]
timeframe_ytd = "-24h" #timeframe_ytd = configuration["yearstart"] + " 00:00 to now"
slo_graphThreshold_SingleValue = get_dataExplorerTileSloThreshold(configuration["thresholds"]["single_value"]) timeframe_ytd = "-3d"
slo_graphThreshold_Graph = get_dataExplorerTileSloThreshold(configuration["thresholds"]["graph_value"]) slo_graphThreshold_SingleValue = get_dataExplorerTileSloThreshold(configuration["thresholds"]["single_value"])
slo_graphThreshold_Graph = get_dataExplorerTileSloThreshold(configuration["thresholds"]["graph_value"])
if len(configuration["hubs"]) > 0:
dashboard_json.append(get_DataExplorerTile_Markdown(slo_display, slo_department, get_bounds(((boundindex)*(3)) , 0 , 7 , 3), configuration["ops_dashboard"]["emea"], configuration["ops_dashboard"]["na"], configuration["ops_dashboard"]["cn"],configuration["doc_url"])) if len(configuration["hubs"]) > 0:
for hub,tiles in configuration["hubs"].items(): dashboard_json.append(get_DataExplorerTile_Markdown(slo_display, slo_department, get_bounds(((boundindex)*(3)) , 0 , 7 , 3), configuration["ops_dashboard"]["emea"], configuration["ops_dashboard"]["na"], configuration["ops_dashboard"]["cn"],configuration["doc_url"]))
if 'actual' in tiles["tiles"]: for hub,tiles in configuration["hubs"].items():
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 7 + hub_config[hub]["offset"] , 4 , 3), timeframe_actual, slo_graphThreshold_SingleValue)) if 'actual' in tiles["tiles"]:
if "graph" in tiles["tiles"]: dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 7 + hub_config[hub]["offset"] , 4 , 3), timeframe_actual, slo_graphThreshold_SingleValue))
dashboard_json.append(get_DataExplorerTile_Graph(slo_name, configuration["metric"], configuration["selector_var"].replace("~",""), hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 11 + hub_config[hub]["offset"] , 12 , 3), timeframe_graph, "97", "102", slo_graphThreshold_Graph)) if "graph" in tiles["tiles"]:
if "ytd" in tiles["tiles"]: dashboard_json.append(get_DataExplorerTile_Graph(slo_name, configuration["metric"], configuration["selector_var"].replace("~",""), hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 11 + hub_config[hub]["offset"] , 12 , 3), timeframe_graph, "97", "102", slo_graphThreshold_Graph))
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 23 + hub_config[hub]["offset"] , 4 , 3), timeframe_ytd, slo_graphThreshold_SingleValue)) if "ytd" in tiles["tiles"]:
boundindex = boundindex+1 dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 23 + hub_config[hub]["offset"] , 4 , 3), timeframe_ytd, slo_graphThreshold_SingleValue))
if rowcount > 0 and slo_index == rowcount: boundindex = boundindex+1
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file: if rowcount > 0 and boundindex > rowcount:
json.dump(dashboard_json, file, indent=2) with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json") json.dump(dashboard_json, file, indent=2)
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json")
if rowcount == 0 or (args.rows is not None and currindex%args.rows != 0): if rowcount == 0 or (args.rows is not None and boundindex%args.rows != 0):
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file: with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
json.dump(dashboard_json, file, indent=2) json.dump(dashboard_json, file, indent=2)
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json") generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json")
if args.auto_upload: if args.auto_upload:
print("Getting existing STAGING dashboards from Dynatrace") print("Getting existing STAGING dashboards from Dynatrace")
with open('./environment.yaml') as file: with open('./environment.yaml') as file:
doc = yaml.safe_load(file) doc = yaml.safe_load(file)
for item, doc in doc.items(): for item, doc in doc.items():
token = dict(doc[2]) token = dict(doc[2])
url = dict(doc[1]) url = dict(doc[1])
print("Crawling through: " + item) print("Crawling through: " + item)
print("Gather data, hold on a minute") print("Gather data, hold on a minute")
DTTOKEN = config(token.get('env-token-name')) DTTOKEN = config(token.get('env-token-name'))
DTURL = url.get('env-url') DTURL = url.get('env-url')
print("Downloading STAGING dashboards to local repo ("+blname+")...")
existingdashboards = get_all_dashboards_withname(DTTOKEN, DTURL,DASHBOARD_NAME + args.touchpoint) existingdashboards = get_all_dashboards_withname(DTTOKEN, DTURL,DASHBOARD_NAME +blname)
print("Uploading STAGING dashboards to Dynatrace...") print("Uploading STAGING dashboards to Dynatrace ("+blname+")...")
backup_dashboards(DTTOKEN, DTURL, existingdashboards) backup_dashboards(DTTOKEN, DTURL, existingdashboards)
now=datetime.now() now=datetime.now()
strnowdate = now.strftime("%Y%m%d") strnowdate = now.strftime("%Y%m%d")
push_repo(archiverepo, strnowdate+"_Global dashboard as code auto-upload backup") push_repo(archiverepo, strnowdate+"_Global dashboard as code auto-upload backup")
create_or_update_dashboard(DTTOKEN, DTURL, existingdashboards, generatedfiles) create_or_update_dashboard(DTTOKEN, DTURL, existingdashboards, generatedfiles, blname)
else:
print("ERROR: Could not find Business line for given department.")
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -10,7 +10,7 @@ While the PROD dashboard is adapted manually, the Staging dashboard is auto upda
# Dashboard Splitting # Dashboard Splitting
To enable flexibility for different screensizes, the script takes an -R or --ROWS parameter to define how many SLOs should be on one dedicated dashboard. To enable flexibility for different screensizes, the script takes an -R or --ROWS parameter to define how many SLOs should be on one dedicated dashboard.
If left empty, only one dashboard will be created, if entered a row the dashboard will be splitted to multiple dashboards and uploaded with the following name: [STAGING]Global Offboard Reliability - Touchpoint Mobile|Vehicle #1 ..#2..#3 If left empty, only one dashboard will be created, if entered a row the dashboard will be splitted to multiple dashboards and uploaded with the following name: [STAGING]Global Offboard Reliability 2.0 - [Business line] #1 ..#2..#3
# shared configuration # shared configuration
@ -43,16 +43,16 @@ To provide authentication for API calls, create ".env" file in the script direct
# Usage # Usage
usage: createDash.py [-h] [-R ROWS] [--auto-upload] [-T TOUCHPOINT] usage: createDash.py [-h] [-R ROWS] [--auto-upload] -D DEPARTMENT
Generate and deploy the Dynatrace Global Dashboard as Code. Auto deployment works only for STAGING dashboard Generate and deploy the Dynatrace Global Dashboard as Code. Auto deployment works only for STAGING dashboard
optional arguments: options:
-h, --help show this help message and exit -h, --help show this help message and exit
-R ROWS, --rows ROWS Number of rows per dashboard. If not specified, all rows will be added to single dashboard (default: None) -R ROWS, --rows ROWS Number of rows per dashboard. If not specified, all rows will be added to single dashboard (default: None)
--auto-upload Auto upload to STAGING dashboard (default: False) --auto-upload Auto upload to STAGING dashboard (default: False)
-T TOUCHPOINT, --touchpoint TOUCHPOINT -D DEPARTMENT, --department DEPARTMENT
Define touchpoint for which the dashboard should be updated: 'Mobile' or 'Vehicle' (default: None) Define department for which the dashboard should be updated: 'DE-3', 'DE-7', 'DE-4' or 'EC-DE' (default: None)
# Files # Files
## createDash.py ## createDash.py