Added splitting and auto-upload features. ! environment.yaml file not in repository. Create your own !

OPMAAS-3253
Patryk Gudalewicz 2022-07-26 00:15:18 +02:00
parent 5e914ed992
commit f910b4306f
2 changed files with 158 additions and 10 deletions

View File

@ -1,13 +1,115 @@
import yaml import yaml
from decouple import config from decouple import config
import json import json
import argparse
import requests
#set STAGING global dashboard name
DASHBOARD_NAME = "[STAGING]Global Offboard Reliability - Touchpoint Mobile #"
parser = argparse.ArgumentParser(description="Generate and deploy the Dynatrace Global Dashboard as Code. Auto deployment works only for STAGING dashboard",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-R", "--rows", type=int, help="Number of rows per dashboard. If not specified, all rows will be added to single dashboard")
parser.add_argument('--auto-upload', default=False, action='store_true', help="Auto upload to STAGING dashboard")
args = parser.parse_args()
def load_slo_parameter(path): def load_slo_parameter(path):
# the first part is to read a yaml and only select latest, valid config # the first part is to read a yaml and only select latest, valid config
with open(path) as file: with open(path) as file:
slo_doc = yaml.safe_load(file) slo_doc = yaml.safe_load(file)
return slo_doc return slo_doc
def make_request(url, headers,verify):
try:
response = requests.get(url, headers=headers,verify=verify)
response.raise_for_status()
except requests.exceptions.HTTPError as errh:
return "An Http Error occurred:" + repr(errh)
except requests.exceptions.ConnectionError as errc:
return "An Error Connecting to the API occurred:" + repr(errc)
except requests.exceptions.Timeout as errt:
return "A Timeout Error occurred:" + repr(errt)
except requests.exceptions.RequestException as err:
return "An Unknown Error occurred" + repr(err)
return response
def make_put_request(url, headers,verify, jsondata):
try:
response = requests.put(url, headers=headers,verify=verify, data=jsondata)
response.raise_for_status()
except requests.exceptions.HTTPError as errh:
return "An Http Error occurred:" + repr(errh)
except requests.exceptions.ConnectionError as errc:
return "An Error Connecting to the API occurred:" + repr(errc)
except requests.exceptions.Timeout as errt:
return "A Timeout Error occurred:" + repr(errt)
except requests.exceptions.RequestException as err:
return "An Unknown Error occurred" + repr(err)
return response
def make_post_request(url, headers,verify, jsondata):
try:
response = requests.post(url, headers=headers,verify=verify, data=jsondata)
response.raise_for_status()
except requests.exceptions.HTTPError as errh:
return "An Http Error occurred:" + repr(errh)
except requests.exceptions.ConnectionError as errc:
return "An Error Connecting to the API occurred:" + repr(errc)
except requests.exceptions.Timeout as errt:
return "A Timeout Error occurred:" + repr(errt)
except requests.exceptions.RequestException as err:
return "An Unknown Error occurred" + repr(err)
return response
def get_all_dashboards_withname(DTAPIToken, DTENV,name):
DTAPIURL= DTENV + "api/config/v1/dashboards"
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
r = make_request(DTAPIURL,headers,True)
entityResponse = r.json()
result = []
if("dashboards" in entityResponse):
for dashboard in entityResponse["dashboards"]:
if(dashboard["name"]).startswith(name):
result.append(dashboard)
result = sorted(result, key=lambda x : x['name'], reverse=False)
return result
def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files):
if(files):
for index, filename in enumerate(files,start=1):
with open('./'+filename) as file:
tilesjson = json.load(file)
if any(dashboard["name"].endswith("#"+str(index)) for dashboard in dashboards):
existingdashboard = next((dashboard for dashboard in dashboards if dashboard["name"].endswith("#"+str(index))), None)
if existingdashboard:
print("Found dashboard for file: "+filename + ", Name: "+ existingdashboard["name"])
DTAPIURL = DTENV + "api/config/v1/dashboards/" + existingdashboard["id"]
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
r = make_request(DTAPIURL,headers,True)
entityResponse = r.json()
print("Downloaded dashboard details from Dynatrace: "+entityResponse["dashboardMetadata"]["name"])
entityResponse["tiles"] = tilesjson
print("Updating dashboard: "+entityResponse["dashboardMetadata"]["name"])
print(make_put_request(DTAPIURL,headers,True,json.dumps(entityResponse)))
else:
print("Dashboard for file: "+filename + " not found.")
newdashboard = {
"dashboardMetadata":{
"name": DASHBOARD_NAME+str(index),
"owner": "PATRYK.GUDALEWICZ@partner.bmw.de"
},
"tiles":[]
}
DTAPIURL = DTENV + "api/config/v1/dashboards"
newdashboard["tiles"] = tilesjson
print("Creating dashboard: "+newdashboard["dashboardMetadata"]["name"])
print(make_post_request(DTAPIURL,headers,True,json.dumps(newdashboard)))
def get_bounds (grid_row, grid_column, tile_columnwidth, tile_rowheight): def get_bounds (grid_row, grid_column, tile_columnwidth, tile_rowheight):
grid_brick = 38 grid_brick = 38
@ -151,6 +253,7 @@ def create_default_tiles():
return newDashboardTiles return newDashboardTiles
def main(slo_path): def main(slo_path):
print("Generating dashboard tiles...")
slo_doc = load_slo_parameter(slo_path) slo_doc = load_slo_parameter(slo_path)
dashboard_json = create_default_tiles() dashboard_json = create_default_tiles()
@ -176,28 +279,63 @@ def main(slo_path):
timeframe_actual = "-1h" timeframe_actual = "-1h"
timeframe_graph = "-10d to now" timeframe_graph = "-10d to now"
dahboardcount = 1
rowcount = 0
currindex = 0
boundindex = 1
generatedfiles = []
if(args.rows is not None):
rowcount = args.rows
for slo_name, config in slo_doc.items(): for slo_name, config in slo_doc.items():
slo_index = config["index"] slo_index = config["index"]
currindex = slo_index
if rowcount > 0 and slo_index > rowcount:
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
json.dump(dashboard_json, file, indent=2)
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json")
dashboard_json = create_default_tiles()
rowcount = rowcount+args.rows
dahboardcount = dahboardcount+1
boundindex = 1
slo_display = config["displayname"] slo_display = config["displayname"]
slo_department = config["department"] slo_department = config["department"]
timeframe_ytd = config["yearstart"] + " 00:00 to now" timeframe_ytd = config["yearstart"] + " 00:00 to now"
slo_graphThreshold_SingleValue = get_dataExplorerTileSloThreshold(config["thresholds"]["single_value"]) slo_graphThreshold_SingleValue = get_dataExplorerTileSloThreshold(config["thresholds"]["single_value"])
slo_graphThreshold_Graph = get_dataExplorerTileSloThreshold(config["thresholds"]["graph_value"]) slo_graphThreshold_Graph = get_dataExplorerTileSloThreshold(config["thresholds"]["graph_value"])
if len(config["hubs"]) > 0: if len(config["hubs"]) > 0:
dashboard_json.append(get_DataExplorerTile_Markdown(slo_display, slo_department, get_bounds(((slo_index)*(3)) , 0 , 7 , 3), config["ops_dashboard"]["emea"], config["ops_dashboard"]["na"], config["ops_dashboard"]["cn"],config["doc_url"])) dashboard_json.append(get_DataExplorerTile_Markdown(slo_display, slo_department, get_bounds(((boundindex)*(3)) , 0 , 7 , 3), config["ops_dashboard"]["emea"], config["ops_dashboard"]["na"], config["ops_dashboard"]["cn"],config["doc_url"]))
for hub,tiles in config["hubs"].items(): for hub,tiles in config["hubs"].items():
if 'actual' in tiles["tiles"]: if 'actual' in tiles["tiles"]:
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, config["metric"], hub_config[hub]["remote_url"], get_bounds(((slo_index)*(3)) , 7 + hub_config[hub]["offset"] , 4 , 3), timeframe_actual, slo_graphThreshold_SingleValue)) dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, config["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 7 + hub_config[hub]["offset"] , 4 , 3), timeframe_actual, slo_graphThreshold_SingleValue))
if "graph" in tiles["tiles"]: if "graph" in tiles["tiles"]:
dashboard_json.append(get_DataExplorerTile_Graph(slo_name, config["metric"], hub_config[hub]["remote_url"], get_bounds(((slo_index)*(3)) , 11 + hub_config[hub]["offset"] , 12 , 3), timeframe_graph, "97", "102", slo_graphThreshold_Graph)) dashboard_json.append(get_DataExplorerTile_Graph(slo_name, config["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 11 + hub_config[hub]["offset"] , 12 , 3), timeframe_graph, "97", "102", slo_graphThreshold_Graph))
if "ytd" in tiles["tiles"]: if "ytd" in tiles["tiles"]:
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, config["metric"], hub_config[hub]["remote_url"], get_bounds(((slo_index)*(3)) , 23 + hub_config[hub]["offset"] , 4 , 3), timeframe_ytd, slo_graphThreshold_SingleValue)) dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, config["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 23 + hub_config[hub]["offset"] , 4 , 3), timeframe_ytd, slo_graphThreshold_SingleValue))
with open("dashboard_tiles.json", "w") as file: boundindex = boundindex+1
json.dump(dashboard_json, file, indent=2) if rowcount == 0 or (args.rows is not None and currindex%args.rows != 0):
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
json.dump(dashboard_json, file, indent=2)
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json")
if args.auto_upload:
print("Getting existing STAGING dashboards from Dynatrace")
with open('./environment.yaml') as file:
doc = yaml.safe_load(file)
for item, doc in doc.items():
token = dict(doc[2])
url = dict(doc[1])
print("Crawling through: " + item)
print("Gather data, hold on a minute")
DTTOKEN = token.get('env-token-name')
DTURL = url.get('env-url')
existingdashboards = get_all_dashboards_withname(DTTOKEN, DTURL,DASHBOARD_NAME)
print("Uploading STAGING dashboards to Dynatrace...")
create_or_update_dashboard(DTTOKEN, DTURL, existingdashboards, generatedfiles)
if __name__ == "__main__": if __name__ == "__main__":
main('./slo_parameter.yaml') main('./slo_parameter.yaml')

View File

@ -7,6 +7,16 @@ The Global Dashboard splits in 2 different dashboards:
- PROD - PROD
While the PROD dashboard is adapted manually, the Staging dashboard is auto updated when this script runs. While the PROD dashboard is adapted manually, the Staging dashboard is auto updated when this script runs.
# Usage
python createDash.py [-h] [-R ROWS] [--auto-upload]
options:
-h, --help show help message and exit
-R ROWS, --rows ROWS Number of rows per dashboard. If not specified, all rows will be added to single dashboard
(default: None)
--auto-upload Auto upload to STAGING dashboard (default: False)
# Files # Files
## slo_parameter.yaml ## slo_parameter.yaml