implementaion for handling legacy dashboards that have been deleted in the past
parent
7d60b71141
commit
3c05cda4a6
162
main.py
162
main.py
|
|
@ -4,6 +4,14 @@ from decouple import config
|
|||
import yaml
|
||||
from dynatrace import Dynatrace
|
||||
import logging
|
||||
import pandas as pd
|
||||
import openpyxl
|
||||
|
||||
|
||||
def writeToExcel(itme, results):
|
||||
df = pd.DataFrame(results)
|
||||
for d in df[0]:
|
||||
print(d)
|
||||
|
||||
|
||||
def format_block(string, max):
|
||||
|
|
@ -11,51 +19,123 @@ def format_block(string, max):
|
|||
string = (f'{string}{" "*(max-string_length)}')
|
||||
return string
|
||||
|
||||
def calculateDifference(dashboards, viewCounts):
|
||||
|
||||
def writeToTxt(item, results):
|
||||
for result in results:
|
||||
id = getattr(result, "id")
|
||||
name = getattr(result, "name")
|
||||
owner = getattr(result, "owner")
|
||||
filename = os.path.join(".\log", item+"-log.txt")
|
||||
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
||||
|
||||
with open(filename, "a", encoding="utf-8") as f:
|
||||
f.write(f"{'id: ' + id + ' '} " +
|
||||
f"{'name: ' + name + ' '} " +
|
||||
f"{'owner: ' + owner}" +
|
||||
"\n")
|
||||
# logging.info(f"{'id: %s '}"
|
||||
# f"{'name: %s'}"
|
||||
# f"{'owner: %s'}",
|
||||
# format_block(id, 50),
|
||||
# format_block(name, 70), owner)
|
||||
|
||||
|
||||
def convertToList(results):
|
||||
dashboard = []
|
||||
dashboards = {}
|
||||
|
||||
for x, result in enumerate(results):
|
||||
dashboard = [getattr(result, "id"),
|
||||
getattr(result, "name"),
|
||||
getattr(result, "owner")]
|
||||
dashboards[x] = dashboard
|
||||
dashboard = []
|
||||
|
||||
return dashboards
|
||||
|
||||
|
||||
def evaluate(env, dashboards, viewCounts):
|
||||
ids = []
|
||||
viewIdsTotal = []
|
||||
viewIds = []
|
||||
legacy = []
|
||||
obsolete = []
|
||||
available = []
|
||||
|
||||
for stub in getattr(dashboards, "_PaginatedList__elements"):
|
||||
ids.append(getattr(stub, "id"))
|
||||
|
||||
viewIds = []
|
||||
for metricSeries in getattr(viewCounts, "_PaginatedList__elements"):
|
||||
for metric in getattr(metricSeries, "data"):
|
||||
viewIdsTotal.append(getattr(metric, "dimension_map")["id"])
|
||||
viewIds.append(getattr(metric, "dimension_map")["id"])
|
||||
|
||||
obsolete = []
|
||||
# for value in ids:
|
||||
# if value not in viewIds:
|
||||
# obsolete.append(value)
|
||||
for viewId in viewIdsTotal:
|
||||
if viewId not in ids:
|
||||
legacy.append(viewId)
|
||||
viewIds.remove(viewId)
|
||||
logging.debug("%s %s have been deleted in the past", str(env), len(legacy))
|
||||
logging.debug("%s %s dashboards with viewCount and active", str(env),
|
||||
len(viewIds))
|
||||
|
||||
for stub in getattr(dashboards, "_PaginatedList__elements"):
|
||||
if getattr(stub, "id") in viewIds:
|
||||
available.append(stub)
|
||||
if getattr(stub, "id") not in viewIds:
|
||||
obsolete.append(stub)
|
||||
logging.info("%s %s dashboards with viewCount!", str(env), len(available))
|
||||
logging.info("%s %s dashboards with 0 viewCount!", str(env), len(obsolete))
|
||||
|
||||
# filename = os.path.join(".\log2", "ids.txt")
|
||||
# os.makedirs(os.path.dirname(filename), exist_ok=True)
|
||||
# with open(filename, "a", encoding="utf-8") as f:
|
||||
# f.write(str(ids))
|
||||
|
||||
# filename = os.path.join(".\log2", "legacy.txt")
|
||||
# os.makedirs(os.path.dirname(filename), exist_ok=True)
|
||||
# with open(filename, "a", encoding="utf-8") as f:
|
||||
# f.write(str(legacy))
|
||||
|
||||
return obsolete
|
||||
|
||||
|
||||
def getDashboardsWithViewCount(DT_CLIENT, METRIC_SELECTOR, RESOLUTION,
|
||||
def getDashboardsWithViewCount(env, client, METRIC_SELECTOR, RESOLUTION,
|
||||
FROM_DATE, TO_DATE):
|
||||
metrics = DT_CLIENT.metrics.query(METRIC_SELECTOR, RESOLUTION, FROM_DATE,
|
||||
logging.debug("%s get dashboards with viewCount, resolution %s ...",
|
||||
str(env), RESOLUTION)
|
||||
metric_query = client.metrics.query(METRIC_SELECTOR, RESOLUTION, FROM_DATE,
|
||||
TO_DATE)
|
||||
count = getattr(metrics, "_PaginatedList__total_count")
|
||||
return count, metrics
|
||||
n_metric_query = getattr(metric_query, "_PaginatedList__total_count")
|
||||
logging.debug("%s %s dashboards with viewCount and older than 6 Months",
|
||||
str(env), str(n_metric_query))
|
||||
|
||||
return metric_query
|
||||
|
||||
|
||||
def getDashboards(DT_CLIENT):
|
||||
dashboards = DT_CLIENT.dashboards.list(owner=None, tags=None)
|
||||
def getDashboards(env, client):
|
||||
logging.debug("%s get all dashboards...", str(env))
|
||||
dashboards = client.dashboards.list(owner=None, tags=None)
|
||||
n_dashboards = getattr(dashboards, "_PaginatedList__total_count")
|
||||
return n_dashboards, dashboards
|
||||
logging.info("%s %s total dashboards", str(env), str(n_dashboards))
|
||||
|
||||
return dashboards
|
||||
|
||||
|
||||
def init_dt_client(env, DT_URL, DT_TOKEN):
|
||||
logging.debug("%s init Dynatrace client...", str(env))
|
||||
DT_CLIENT = Dynatrace(DT_URL, DT_TOKEN, logging.Logger("ERROR"), None, None,
|
||||
0, 10*1000)
|
||||
return DT_CLIENT
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
|
||||
|
||||
with open(os.path.basename("./environment.yaml")) as env_cfg:
|
||||
environment = yaml.safe_load(env_cfg)
|
||||
|
||||
for item, doc in environment.items():
|
||||
logging.debug("%s checking token...", str(item))
|
||||
for env, doc in environment.items():
|
||||
logging.debug("%s checking token...", str(env))
|
||||
|
||||
if config(dict(doc[2]).get("env-token-name"), default='') != "":
|
||||
DT_URL = dict(doc[1]).get("env-url")
|
||||
|
|
@ -65,43 +145,17 @@ if __name__ == "__main__":
|
|||
FROM_DATE= dict(doc[7]).get("fromDate")
|
||||
TO_DATE= dict(doc[8]).get("toDate")
|
||||
|
||||
logging.debug("%s init Dynatrace client...", str(item))
|
||||
DT_CLIENT = Dynatrace(DT_URL, DT_TOKEN, logging.Logger("ERROR"),
|
||||
None, None, 0, 10*1000)
|
||||
client = init_dt_client(env, DT_URL, DT_TOKEN)
|
||||
dashboards = getDashboards(env, client)
|
||||
metric_query = getDashboardsWithViewCount(env, client,
|
||||
METRIC_SELECTOR,
|
||||
RESOLUTION, FROM_DATE,
|
||||
TO_DATE)
|
||||
results = evaluate(env, dashboards, metric_query)
|
||||
|
||||
logging.debug("%s get all dashboards...", str(item))
|
||||
n_dashboards, dashboards = getDashboards(DT_CLIENT)
|
||||
logging.info("%s %s total dashboards", str(item), str(n_dashboards))
|
||||
converted_results = convertToList(results)
|
||||
print(len(converted_results))
|
||||
# writeToTxt(item, results)
|
||||
# writeToExcel(item, results)
|
||||
|
||||
logging.debug("%s get dashboards with viewCount, resolution %s ...",
|
||||
str(item), RESOLUTION)
|
||||
count, viewCounts = getDashboardsWithViewCount(DT_CLIENT,
|
||||
METRIC_SELECTOR,
|
||||
RESOLUTION,
|
||||
FROM_DATE,
|
||||
TO_DATE)
|
||||
logging.info("%s %s dashboards with viewCount and older than 6 "
|
||||
"Months", str(item), str(count))
|
||||
|
||||
logging.debug("%s store ids of obsolete dashboards...", str(item))
|
||||
results = calculateDifference(dashboards, viewCounts)
|
||||
logging.info("%s %s dashboards with 0 viewCount!",
|
||||
str(item), n_dashboards - count)
|
||||
for result in results:
|
||||
id = getattr(result, "id")
|
||||
name = getattr(result, "name")
|
||||
owner = getattr(result, "owner")
|
||||
filename = os.path.join(".\log", item+"-log.txt")
|
||||
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
||||
|
||||
with open(filename, "a", encoding="utf-8") as f:
|
||||
f.write(f"{'id: ' + id + ' '} " +
|
||||
f"{'name: ' + name + ' '} " +
|
||||
f"{'owner: ' + owner}" +
|
||||
"\n")
|
||||
# logging.info(f"{'id: %s '}"
|
||||
# f"{'name: %s'}"
|
||||
# f"{'owner: %s'}",
|
||||
# format_block(id, 50),
|
||||
# format_block(name, 70), owner)
|
||||
print("finished")
|
||||
print("finished")
|
||||
Loading…
Reference in New Issue