Added auto-deletion of unused dashboards

OPMAAS-3253
Patryk Gudalewicz 2022-07-26 14:41:12 +02:00
parent f910b4306f
commit 558ab3ae6a
5 changed files with 94 additions and 61 deletions

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
.env
dashboard_tiles_*
\[STAGING\]*

View File

@ -3,6 +3,7 @@ from decouple import config
import json
import argparse
import requests
from datetime import datetime
#set STAGING global dashboard name
DASHBOARD_NAME = "[STAGING]Global Offboard Reliability - Touchpoint Mobile #"
parser = argparse.ArgumentParser(description="Generate and deploy the Dynatrace Global Dashboard as Code. Auto deployment works only for STAGING dashboard",
@ -17,37 +18,20 @@ def load_slo_parameter(path):
slo_doc = yaml.safe_load(file)
return slo_doc
def make_request(url, headers,verify):
def make_request(url, DTAPIToken,verify, method, jsondata):
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
try:
response = requests.get(url, headers=headers,verify=verify)
response.raise_for_status()
except requests.exceptions.HTTPError as errh:
return "An Http Error occurred:" + repr(errh)
except requests.exceptions.ConnectionError as errc:
return "An Error Connecting to the API occurred:" + repr(errc)
except requests.exceptions.Timeout as errt:
return "A Timeout Error occurred:" + repr(errt)
except requests.exceptions.RequestException as err:
return "An Unknown Error occurred" + repr(err)
return response
def make_put_request(url, headers,verify, jsondata):
try:
response = requests.put(url, headers=headers,verify=verify, data=jsondata)
response.raise_for_status()
except requests.exceptions.HTTPError as errh:
return "An Http Error occurred:" + repr(errh)
except requests.exceptions.ConnectionError as errc:
return "An Error Connecting to the API occurred:" + repr(errc)
except requests.exceptions.Timeout as errt:
return "A Timeout Error occurred:" + repr(errt)
except requests.exceptions.RequestException as err:
return "An Unknown Error occurred" + repr(err)
return response
def make_post_request(url, headers,verify, jsondata):
try:
response = requests.post(url, headers=headers,verify=verify, data=jsondata)
if method == "get":
response = requests.get(url, headers=headers,verify=verify)
elif method == "post":
response = requests.post(url, headers=headers,verify=verify, data=jsondata)
elif method == "put":
response = requests.put(url, headers=headers,verify=verify, data=jsondata)
elif method == "delete":
response = requests.delete(url, headers=headers,verify=verify)
response.raise_for_status()
except requests.exceptions.HTTPError as errh:
return "An Http Error occurred:" + repr(errh)
@ -61,11 +45,7 @@ def make_post_request(url, headers,verify, jsondata):
return response
def get_all_dashboards_withname(DTAPIToken, DTENV,name):
DTAPIURL= DTENV + "api/config/v1/dashboards"
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
r = make_request(DTAPIURL,headers,True)
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
entityResponse = r.json()
result = []
if("dashboards" in entityResponse):
@ -74,7 +54,23 @@ def get_all_dashboards_withname(DTAPIToken, DTENV,name):
result.append(dashboard)
result = sorted(result, key=lambda x : x['name'], reverse=False)
return result
def backup_dashboards(DTAPIToken, DTENV, dashboards):
for dashboard in dashboards:
DTAPIURL = DTENV + "api/config/v1/dashboards/" + dashboard["id"]
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
entityResponse = r.json()
print("Downloaded dashboard from Dynatrace: "+entityResponse["dashboardMetadata"]["name"]+", creating backup...")
now=datetime.now()
strnow = now.strftime("%Y%m%d_%H%M%S")
with open(entityResponse["dashboardMetadata"]["name"]+"_"+strnow+".json", "w") as file:
json.dump(entityResponse, file, indent=2)
def remove_dashboards(DTAPIToken, DTENV, dashboards):
for dashboard in dashboards:
print("Removing STAGING dashboard from Dynatrace: "+dashboard["name"])
DTAPIURL = DTENV + "api/config/v1/dashboards/" + dashboard["id"]
print(make_request(DTAPIURL,DTAPIToken,True,"delete",None))
def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files):
backup_dashboards(DTAPIToken, DTENV, dashboards)
if(files):
for index, filename in enumerate(files,start=1):
with open('./'+filename) as file:
@ -84,16 +80,14 @@ def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files):
if existingdashboard:
print("Found dashboard for file: "+filename + ", Name: "+ existingdashboard["name"])
DTAPIURL = DTENV + "api/config/v1/dashboards/" + existingdashboard["id"]
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
r = make_request(DTAPIURL,headers,True)
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
entityResponse = r.json()
print("Downloaded dashboard details from Dynatrace: "+entityResponse["dashboardMetadata"]["name"])
entityResponse["tiles"] = tilesjson
print("Updating dashboard: "+entityResponse["dashboardMetadata"]["name"])
print(make_put_request(DTAPIURL,headers,True,json.dumps(entityResponse)))
print(make_request(DTAPIURL,DTAPIToken,True,"put",json.dumps(entityResponse)))
dashboards.remove(existingdashboard)
else:
print("Dashboard for file: "+filename + " not found.")
@ -107,7 +101,8 @@ def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files):
DTAPIURL = DTENV + "api/config/v1/dashboards"
newdashboard["tiles"] = tilesjson
print("Creating dashboard: "+newdashboard["dashboardMetadata"]["name"])
print(make_post_request(DTAPIURL,headers,True,json.dumps(newdashboard)))
print(make_request(DTAPIURL,DTAPIToken,True,"post",json.dumps(newdashboard)))
remove_dashboards(DTAPIToken, DTENV, dashboards)
@ -286,34 +281,36 @@ def main(slo_path):
generatedfiles = []
if(args.rows is not None):
rowcount = args.rows
for slo_name, config in slo_doc.items():
slo_index = config["index"]
for slo_name, configuration in slo_doc.items():
slo_index = configuration["index"]
currindex = slo_index
if rowcount > 0 and slo_index > rowcount:
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
json.dump(dashboard_json, file, indent=2)
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json")
dashboard_json = create_default_tiles()
rowcount = rowcount+args.rows
dahboardcount = dahboardcount+1
boundindex = 1
slo_display = config["displayname"]
slo_department = config["department"]
timeframe_ytd = config["yearstart"] + " 00:00 to now"
slo_display = configuration["displayname"]
slo_department = configuration["department"]
timeframe_ytd = configuration["yearstart"] + " 00:00 to now"
slo_graphThreshold_SingleValue = get_dataExplorerTileSloThreshold(config["thresholds"]["single_value"])
slo_graphThreshold_Graph = get_dataExplorerTileSloThreshold(config["thresholds"]["graph_value"])
slo_graphThreshold_SingleValue = get_dataExplorerTileSloThreshold(configuration["thresholds"]["single_value"])
slo_graphThreshold_Graph = get_dataExplorerTileSloThreshold(configuration["thresholds"]["graph_value"])
if len(config["hubs"]) > 0:
dashboard_json.append(get_DataExplorerTile_Markdown(slo_display, slo_department, get_bounds(((boundindex)*(3)) , 0 , 7 , 3), config["ops_dashboard"]["emea"], config["ops_dashboard"]["na"], config["ops_dashboard"]["cn"],config["doc_url"]))
for hub,tiles in config["hubs"].items():
if len(configuration["hubs"]) > 0:
dashboard_json.append(get_DataExplorerTile_Markdown(slo_display, slo_department, get_bounds(((boundindex)*(3)) , 0 , 7 , 3), configuration["ops_dashboard"]["emea"], configuration["ops_dashboard"]["na"], configuration["ops_dashboard"]["cn"],configuration["doc_url"]))
for hub,tiles in configuration["hubs"].items():
if 'actual' in tiles["tiles"]:
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, config["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 7 + hub_config[hub]["offset"] , 4 , 3), timeframe_actual, slo_graphThreshold_SingleValue))
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 7 + hub_config[hub]["offset"] , 4 , 3), timeframe_actual, slo_graphThreshold_SingleValue))
if "graph" in tiles["tiles"]:
dashboard_json.append(get_DataExplorerTile_Graph(slo_name, config["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 11 + hub_config[hub]["offset"] , 12 , 3), timeframe_graph, "97", "102", slo_graphThreshold_Graph))
dashboard_json.append(get_DataExplorerTile_Graph(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 11 + hub_config[hub]["offset"] , 12 , 3), timeframe_graph, "97", "102", slo_graphThreshold_Graph))
if "ytd" in tiles["tiles"]:
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, config["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 23 + hub_config[hub]["offset"] , 4 , 3), timeframe_ytd, slo_graphThreshold_SingleValue))
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 23 + hub_config[hub]["offset"] , 4 , 3), timeframe_ytd, slo_graphThreshold_SingleValue))
boundindex = boundindex+1
if rowcount > 0 and slo_index == rowcount:
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
json.dump(dashboard_json, file, indent=2)
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json")
if rowcount == 0 or (args.rows is not None and currindex%args.rows != 0):
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
json.dump(dashboard_json, file, indent=2)
@ -329,7 +326,7 @@ def main(slo_path):
url = dict(doc[1])
print("Crawling through: " + item)
print("Gather data, hold on a minute")
DTTOKEN = token.get('env-token-name')
DTTOKEN = config(token.get('env-token-name'))
DTURL = url.get('env-url')
existingdashboards = get_all_dashboards_withname(DTTOKEN, DTURL,DASHBOARD_NAME)

4
environment.yaml Normal file
View File

@ -0,0 +1,4 @@
globaldashboard:
- name: "globaldashboard"
- env-url: "https://jyy23483.live.dynatrace.com/"
- env-token-name: "GLOBAL_CONFIG_TOKEN"

View File

@ -6,6 +6,18 @@ The Global Dashboard splits in 2 different dashboards:
- STAGING
- PROD
While the PROD dashboard is adapted manually, the Staging dashboard is auto updated when this script runs.
# Prerequisites
## Python packages
Before executing scripts, python requirements have to be satisfied. To do so, execute following command:
pip install -r requirements.txt
## .env file
To provide authentication for API calls, create ".env" file in the script directory with following definition:
<ENV NAME>=<ENV TOKEN>
<ENV NAME> is name of environment variable. This name should be passed to "environment.yaml" file as "env-token-name" parameter
# Usage
@ -76,3 +88,15 @@ Defintion Description:
## createDash . py
This scripts generates the "tile" Section of a Dynatrace Dashboard and takes the slo_parameter.yaml as input parameter (no need to add it manually)
## environment.yaml
File containing environments to execute --auto-upload
Environment name:
- name: string #name ov environment
- env-url: str #url of environment
- env-token-name: str #name of environment variable containing API token
## requirements.txt
File

5
requirements.txt Normal file
View File

@ -0,0 +1,5 @@
python-decouple
pyyaml
requests
datetime
argparse