Added auto-deletion of unused dashboards
parent
f910b4306f
commit
558ab3ae6a
|
|
@ -0,0 +1,3 @@
|
||||||
|
.env
|
||||||
|
dashboard_tiles_*
|
||||||
|
\[STAGING\]*
|
||||||
117
createDash.py
117
createDash.py
|
|
@ -3,6 +3,7 @@ from decouple import config
|
||||||
import json
|
import json
|
||||||
import argparse
|
import argparse
|
||||||
import requests
|
import requests
|
||||||
|
from datetime import datetime
|
||||||
#set STAGING global dashboard name
|
#set STAGING global dashboard name
|
||||||
DASHBOARD_NAME = "[STAGING]Global Offboard Reliability - Touchpoint Mobile #"
|
DASHBOARD_NAME = "[STAGING]Global Offboard Reliability - Touchpoint Mobile #"
|
||||||
parser = argparse.ArgumentParser(description="Generate and deploy the Dynatrace Global Dashboard as Code. Auto deployment works only for STAGING dashboard",
|
parser = argparse.ArgumentParser(description="Generate and deploy the Dynatrace Global Dashboard as Code. Auto deployment works only for STAGING dashboard",
|
||||||
|
|
@ -17,37 +18,20 @@ def load_slo_parameter(path):
|
||||||
slo_doc = yaml.safe_load(file)
|
slo_doc = yaml.safe_load(file)
|
||||||
|
|
||||||
return slo_doc
|
return slo_doc
|
||||||
def make_request(url, headers,verify):
|
def make_request(url, DTAPIToken,verify, method, jsondata):
|
||||||
|
headers = {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Authorization': 'Api-Token ' + DTAPIToken
|
||||||
|
}
|
||||||
try:
|
try:
|
||||||
response = requests.get(url, headers=headers,verify=verify)
|
if method == "get":
|
||||||
response.raise_for_status()
|
response = requests.get(url, headers=headers,verify=verify)
|
||||||
except requests.exceptions.HTTPError as errh:
|
elif method == "post":
|
||||||
return "An Http Error occurred:" + repr(errh)
|
response = requests.post(url, headers=headers,verify=verify, data=jsondata)
|
||||||
except requests.exceptions.ConnectionError as errc:
|
elif method == "put":
|
||||||
return "An Error Connecting to the API occurred:" + repr(errc)
|
response = requests.put(url, headers=headers,verify=verify, data=jsondata)
|
||||||
except requests.exceptions.Timeout as errt:
|
elif method == "delete":
|
||||||
return "A Timeout Error occurred:" + repr(errt)
|
response = requests.delete(url, headers=headers,verify=verify)
|
||||||
except requests.exceptions.RequestException as err:
|
|
||||||
return "An Unknown Error occurred" + repr(err)
|
|
||||||
|
|
||||||
return response
|
|
||||||
def make_put_request(url, headers,verify, jsondata):
|
|
||||||
try:
|
|
||||||
response = requests.put(url, headers=headers,verify=verify, data=jsondata)
|
|
||||||
response.raise_for_status()
|
|
||||||
except requests.exceptions.HTTPError as errh:
|
|
||||||
return "An Http Error occurred:" + repr(errh)
|
|
||||||
except requests.exceptions.ConnectionError as errc:
|
|
||||||
return "An Error Connecting to the API occurred:" + repr(errc)
|
|
||||||
except requests.exceptions.Timeout as errt:
|
|
||||||
return "A Timeout Error occurred:" + repr(errt)
|
|
||||||
except requests.exceptions.RequestException as err:
|
|
||||||
return "An Unknown Error occurred" + repr(err)
|
|
||||||
|
|
||||||
return response
|
|
||||||
def make_post_request(url, headers,verify, jsondata):
|
|
||||||
try:
|
|
||||||
response = requests.post(url, headers=headers,verify=verify, data=jsondata)
|
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except requests.exceptions.HTTPError as errh:
|
except requests.exceptions.HTTPError as errh:
|
||||||
return "An Http Error occurred:" + repr(errh)
|
return "An Http Error occurred:" + repr(errh)
|
||||||
|
|
@ -61,11 +45,7 @@ def make_post_request(url, headers,verify, jsondata):
|
||||||
return response
|
return response
|
||||||
def get_all_dashboards_withname(DTAPIToken, DTENV,name):
|
def get_all_dashboards_withname(DTAPIToken, DTENV,name):
|
||||||
DTAPIURL= DTENV + "api/config/v1/dashboards"
|
DTAPIURL= DTENV + "api/config/v1/dashboards"
|
||||||
headers = {
|
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'Authorization': 'Api-Token ' + DTAPIToken
|
|
||||||
}
|
|
||||||
r = make_request(DTAPIURL,headers,True)
|
|
||||||
entityResponse = r.json()
|
entityResponse = r.json()
|
||||||
result = []
|
result = []
|
||||||
if("dashboards" in entityResponse):
|
if("dashboards" in entityResponse):
|
||||||
|
|
@ -74,7 +54,23 @@ def get_all_dashboards_withname(DTAPIToken, DTENV,name):
|
||||||
result.append(dashboard)
|
result.append(dashboard)
|
||||||
result = sorted(result, key=lambda x : x['name'], reverse=False)
|
result = sorted(result, key=lambda x : x['name'], reverse=False)
|
||||||
return result
|
return result
|
||||||
|
def backup_dashboards(DTAPIToken, DTENV, dashboards):
|
||||||
|
for dashboard in dashboards:
|
||||||
|
DTAPIURL = DTENV + "api/config/v1/dashboards/" + dashboard["id"]
|
||||||
|
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
|
||||||
|
entityResponse = r.json()
|
||||||
|
print("Downloaded dashboard from Dynatrace: "+entityResponse["dashboardMetadata"]["name"]+", creating backup...")
|
||||||
|
now=datetime.now()
|
||||||
|
strnow = now.strftime("%Y%m%d_%H%M%S")
|
||||||
|
with open(entityResponse["dashboardMetadata"]["name"]+"_"+strnow+".json", "w") as file:
|
||||||
|
json.dump(entityResponse, file, indent=2)
|
||||||
|
def remove_dashboards(DTAPIToken, DTENV, dashboards):
|
||||||
|
for dashboard in dashboards:
|
||||||
|
print("Removing STAGING dashboard from Dynatrace: "+dashboard["name"])
|
||||||
|
DTAPIURL = DTENV + "api/config/v1/dashboards/" + dashboard["id"]
|
||||||
|
print(make_request(DTAPIURL,DTAPIToken,True,"delete",None))
|
||||||
def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files):
|
def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files):
|
||||||
|
backup_dashboards(DTAPIToken, DTENV, dashboards)
|
||||||
if(files):
|
if(files):
|
||||||
for index, filename in enumerate(files,start=1):
|
for index, filename in enumerate(files,start=1):
|
||||||
with open('./'+filename) as file:
|
with open('./'+filename) as file:
|
||||||
|
|
@ -84,16 +80,14 @@ def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files):
|
||||||
if existingdashboard:
|
if existingdashboard:
|
||||||
print("Found dashboard for file: "+filename + ", Name: "+ existingdashboard["name"])
|
print("Found dashboard for file: "+filename + ", Name: "+ existingdashboard["name"])
|
||||||
DTAPIURL = DTENV + "api/config/v1/dashboards/" + existingdashboard["id"]
|
DTAPIURL = DTENV + "api/config/v1/dashboards/" + existingdashboard["id"]
|
||||||
headers = {
|
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'Authorization': 'Api-Token ' + DTAPIToken
|
|
||||||
}
|
|
||||||
r = make_request(DTAPIURL,headers,True)
|
|
||||||
entityResponse = r.json()
|
entityResponse = r.json()
|
||||||
print("Downloaded dashboard details from Dynatrace: "+entityResponse["dashboardMetadata"]["name"])
|
|
||||||
entityResponse["tiles"] = tilesjson
|
entityResponse["tiles"] = tilesjson
|
||||||
print("Updating dashboard: "+entityResponse["dashboardMetadata"]["name"])
|
print("Updating dashboard: "+entityResponse["dashboardMetadata"]["name"])
|
||||||
print(make_put_request(DTAPIURL,headers,True,json.dumps(entityResponse)))
|
|
||||||
|
print(make_request(DTAPIURL,DTAPIToken,True,"put",json.dumps(entityResponse)))
|
||||||
|
dashboards.remove(existingdashboard)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print("Dashboard for file: "+filename + " not found.")
|
print("Dashboard for file: "+filename + " not found.")
|
||||||
|
|
@ -107,7 +101,8 @@ def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files):
|
||||||
DTAPIURL = DTENV + "api/config/v1/dashboards"
|
DTAPIURL = DTENV + "api/config/v1/dashboards"
|
||||||
newdashboard["tiles"] = tilesjson
|
newdashboard["tiles"] = tilesjson
|
||||||
print("Creating dashboard: "+newdashboard["dashboardMetadata"]["name"])
|
print("Creating dashboard: "+newdashboard["dashboardMetadata"]["name"])
|
||||||
print(make_post_request(DTAPIURL,headers,True,json.dumps(newdashboard)))
|
print(make_request(DTAPIURL,DTAPIToken,True,"post",json.dumps(newdashboard)))
|
||||||
|
remove_dashboards(DTAPIToken, DTENV, dashboards)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -286,34 +281,36 @@ def main(slo_path):
|
||||||
generatedfiles = []
|
generatedfiles = []
|
||||||
if(args.rows is not None):
|
if(args.rows is not None):
|
||||||
rowcount = args.rows
|
rowcount = args.rows
|
||||||
for slo_name, config in slo_doc.items():
|
for slo_name, configuration in slo_doc.items():
|
||||||
slo_index = config["index"]
|
slo_index = configuration["index"]
|
||||||
currindex = slo_index
|
currindex = slo_index
|
||||||
if rowcount > 0 and slo_index > rowcount:
|
if rowcount > 0 and slo_index > rowcount:
|
||||||
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
|
|
||||||
json.dump(dashboard_json, file, indent=2)
|
|
||||||
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json")
|
|
||||||
dashboard_json = create_default_tiles()
|
dashboard_json = create_default_tiles()
|
||||||
rowcount = rowcount+args.rows
|
rowcount = rowcount+args.rows
|
||||||
dahboardcount = dahboardcount+1
|
dahboardcount = dahboardcount+1
|
||||||
boundindex = 1
|
boundindex = 1
|
||||||
slo_display = config["displayname"]
|
slo_display = configuration["displayname"]
|
||||||
slo_department = config["department"]
|
slo_department = configuration["department"]
|
||||||
timeframe_ytd = config["yearstart"] + " 00:00 to now"
|
timeframe_ytd = configuration["yearstart"] + " 00:00 to now"
|
||||||
|
|
||||||
slo_graphThreshold_SingleValue = get_dataExplorerTileSloThreshold(config["thresholds"]["single_value"])
|
slo_graphThreshold_SingleValue = get_dataExplorerTileSloThreshold(configuration["thresholds"]["single_value"])
|
||||||
slo_graphThreshold_Graph = get_dataExplorerTileSloThreshold(config["thresholds"]["graph_value"])
|
slo_graphThreshold_Graph = get_dataExplorerTileSloThreshold(configuration["thresholds"]["graph_value"])
|
||||||
|
|
||||||
if len(config["hubs"]) > 0:
|
if len(configuration["hubs"]) > 0:
|
||||||
dashboard_json.append(get_DataExplorerTile_Markdown(slo_display, slo_department, get_bounds(((boundindex)*(3)) , 0 , 7 , 3), config["ops_dashboard"]["emea"], config["ops_dashboard"]["na"], config["ops_dashboard"]["cn"],config["doc_url"]))
|
dashboard_json.append(get_DataExplorerTile_Markdown(slo_display, slo_department, get_bounds(((boundindex)*(3)) , 0 , 7 , 3), configuration["ops_dashboard"]["emea"], configuration["ops_dashboard"]["na"], configuration["ops_dashboard"]["cn"],configuration["doc_url"]))
|
||||||
for hub,tiles in config["hubs"].items():
|
for hub,tiles in configuration["hubs"].items():
|
||||||
if 'actual' in tiles["tiles"]:
|
if 'actual' in tiles["tiles"]:
|
||||||
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, config["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 7 + hub_config[hub]["offset"] , 4 , 3), timeframe_actual, slo_graphThreshold_SingleValue))
|
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 7 + hub_config[hub]["offset"] , 4 , 3), timeframe_actual, slo_graphThreshold_SingleValue))
|
||||||
if "graph" in tiles["tiles"]:
|
if "graph" in tiles["tiles"]:
|
||||||
dashboard_json.append(get_DataExplorerTile_Graph(slo_name, config["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 11 + hub_config[hub]["offset"] , 12 , 3), timeframe_graph, "97", "102", slo_graphThreshold_Graph))
|
dashboard_json.append(get_DataExplorerTile_Graph(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 11 + hub_config[hub]["offset"] , 12 , 3), timeframe_graph, "97", "102", slo_graphThreshold_Graph))
|
||||||
if "ytd" in tiles["tiles"]:
|
if "ytd" in tiles["tiles"]:
|
||||||
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, config["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 23 + hub_config[hub]["offset"] , 4 , 3), timeframe_ytd, slo_graphThreshold_SingleValue))
|
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 23 + hub_config[hub]["offset"] , 4 , 3), timeframe_ytd, slo_graphThreshold_SingleValue))
|
||||||
boundindex = boundindex+1
|
boundindex = boundindex+1
|
||||||
|
if rowcount > 0 and slo_index == rowcount:
|
||||||
|
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
|
||||||
|
json.dump(dashboard_json, file, indent=2)
|
||||||
|
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json")
|
||||||
|
|
||||||
if rowcount == 0 or (args.rows is not None and currindex%args.rows != 0):
|
if rowcount == 0 or (args.rows is not None and currindex%args.rows != 0):
|
||||||
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
|
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
|
||||||
json.dump(dashboard_json, file, indent=2)
|
json.dump(dashboard_json, file, indent=2)
|
||||||
|
|
@ -329,7 +326,7 @@ def main(slo_path):
|
||||||
url = dict(doc[1])
|
url = dict(doc[1])
|
||||||
print("Crawling through: " + item)
|
print("Crawling through: " + item)
|
||||||
print("Gather data, hold on a minute")
|
print("Gather data, hold on a minute")
|
||||||
DTTOKEN = token.get('env-token-name')
|
DTTOKEN = config(token.get('env-token-name'))
|
||||||
DTURL = url.get('env-url')
|
DTURL = url.get('env-url')
|
||||||
|
|
||||||
existingdashboards = get_all_dashboards_withname(DTTOKEN, DTURL,DASHBOARD_NAME)
|
existingdashboards = get_all_dashboards_withname(DTTOKEN, DTURL,DASHBOARD_NAME)
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
globaldashboard:
|
||||||
|
- name: "globaldashboard"
|
||||||
|
- env-url: "https://jyy23483.live.dynatrace.com/"
|
||||||
|
- env-token-name: "GLOBAL_CONFIG_TOKEN"
|
||||||
24
readme.md
24
readme.md
|
|
@ -6,6 +6,18 @@ The Global Dashboard splits in 2 different dashboards:
|
||||||
- STAGING
|
- STAGING
|
||||||
- PROD
|
- PROD
|
||||||
While the PROD dashboard is adapted manually, the Staging dashboard is auto updated when this script runs.
|
While the PROD dashboard is adapted manually, the Staging dashboard is auto updated when this script runs.
|
||||||
|
# Prerequisites
|
||||||
|
|
||||||
|
## Python packages
|
||||||
|
Before executing scripts, python requirements have to be satisfied. To do so, execute following command:
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
## .env file
|
||||||
|
|
||||||
|
To provide authentication for API calls, create ".env" file in the script directory with following definition:
|
||||||
|
|
||||||
|
<ENV NAME>=<ENV TOKEN>
|
||||||
|
<ENV NAME> is name of environment variable. This name should be passed to "environment.yaml" file as "env-token-name" parameter
|
||||||
|
|
||||||
# Usage
|
# Usage
|
||||||
|
|
||||||
|
|
@ -76,3 +88,15 @@ Defintion Description:
|
||||||
## createDash . py
|
## createDash . py
|
||||||
|
|
||||||
This scripts generates the "tile" Section of a Dynatrace Dashboard and takes the slo_parameter.yaml as input parameter (no need to add it manually)
|
This scripts generates the "tile" Section of a Dynatrace Dashboard and takes the slo_parameter.yaml as input parameter (no need to add it manually)
|
||||||
|
|
||||||
|
## environment.yaml
|
||||||
|
|
||||||
|
File containing environments to execute --auto-upload
|
||||||
|
Environment name:
|
||||||
|
- name: string #name ov environment
|
||||||
|
- env-url: str #url of environment
|
||||||
|
- env-token-name: str #name of environment variable containing API token
|
||||||
|
|
||||||
|
## requirements.txt
|
||||||
|
|
||||||
|
File
|
||||||
|
|
@ -0,0 +1,5 @@
|
||||||
|
python-decouple
|
||||||
|
pyyaml
|
||||||
|
requests
|
||||||
|
datetime
|
||||||
|
argparse
|
||||||
Loading…
Reference in New Issue