diff --git a/.gitignore b/.gitignore index b7464da..accaf47 100644 --- a/.gitignore +++ b/.gitignore @@ -148,4 +148,5 @@ metricexpressions.json failed_requests.txt # other -*.txt \ No newline at end of file +*.txt +log/ \ No newline at end of file diff --git a/main.py b/main.py index 8872299..8afdc41 100644 --- a/main.py +++ b/main.py @@ -5,98 +5,93 @@ import yaml from dynatrace import Dynatrace import logging import pandas as pd -import openpyxl +import copy +import time; -def writeToExcel(itme, results): - df = pd.DataFrame(results) - for d in df[0]: - print(d) +def writeToExcel(env, t, result): + list_available = [] + list_legacy = [] + list_obsolete = [] + + for type in ["available", "legacy", "obsolete"]: + for i, (ki, vi) in enumerate(result[type].items()): + if type == "available": + list_available.append([vi["id"], vi["name"], vi["owner"]]) + if type == "legacy": + list_legacy.append([vi["id"], vi["name"], vi["owner"]]) + if type == "obsolete": + list_obsolete.append([vi["id"], vi["name"], vi["owner"]]) + + df_available = pd.DataFrame(list_available, columns=['id', 'name', 'owner']) + df_legacy = pd.DataFrame(list_legacy, columns=['id', 'name', 'owner']) + df_obsolete = pd.DataFrame(list_obsolete, columns=['id', 'name', 'owner']) + + filename = os.path.join(".\log", + str(t) + "_" + str(env) + '_dashboards.xlsx') + os.makedirs(os.path.dirname(filename), exist_ok=True) + + with pd.ExcelWriter(filename) as writer: + df_available.to_excel(writer, sheet_name='available') + df_legacy.to_excel(writer, sheet_name='legacy') + df_obsolete.to_excel(writer, sheet_name='obsolete') -def format_block(string, max): - string_length = len(string) - string = (f'{string}{" "*(max-string_length)}') - return string +def evaluate(env, data): + legacy = {} + available = {} + obsolete = {} + dict_dashboards = data[0] + list_dashboard_ids = data[1] + dict_metric_queries = data[2] + list_metric_query_ids = data[3] + dict_metric_queries_copy = copy.deepcopy(dict_metric_queries) + list_metric_query_copy_ids = copy.deepcopy(list_metric_query_ids) - -def writeToTxt(item, results): - for result in results: - id = getattr(result, "id") - name = getattr(result, "name") - owner = getattr(result, "owner") - filename = os.path.join(".\log", item+"-log.txt") - os.makedirs(os.path.dirname(filename), exist_ok=True) - - with open(filename, "a", encoding="utf-8") as f: - f.write(f"{'id: ' + id + ' '} " + - f"{'name: ' + name + ' '} " + - f"{'owner: ' + owner}" + - "\n") - # logging.info(f"{'id: %s '}" - # f"{'name: %s'}" - # f"{'owner: %s'}", - # format_block(id, 50), - # format_block(name, 70), owner) - - -def convertToList(results): - dashboard = [] - dashboards = {} - - for x, result in enumerate(results): - dashboard = [getattr(result, "id"), - getattr(result, "name"), - getattr(result, "owner")] - dashboards[x] = dashboard - dashboard = [] - - return dashboards - - -def evaluate(env, dashboards, viewCounts): - ids = [] - viewIdsTotal = [] - viewIds = [] - legacy = [] - obsolete = [] - available = [] - - for stub in getattr(dashboards, "_PaginatedList__elements"): - ids.append(getattr(stub, "id")) - - for metricSeries in getattr(viewCounts, "_PaginatedList__elements"): - for metric in getattr(metricSeries, "data"): - viewIdsTotal.append(getattr(metric, "dimension_map")["id"]) - viewIds.append(getattr(metric, "dimension_map")["id"]) - - for viewId in viewIdsTotal: - if viewId not in ids: - legacy.append(viewId) - viewIds.remove(viewId) + + for x, (m, metric_query) in enumerate(dict_metric_queries.items()): + if metric_query["id"] not in list_dashboard_ids: + legacy[x] = {"id" : metric_query["id"], + "name" : metric_query["name"], + "owner" : metric_query["owner"]} + del dict_metric_queries_copy[m] + list_metric_query_copy_ids.remove(metric_query["id"]) logging.debug("%s %s have been deleted in the past", str(env), len(legacy)) logging.debug("%s %s dashboards with viewCount and active", str(env), - len(viewIds)) + len(dict_metric_queries_copy)) - for stub in getattr(dashboards, "_PaginatedList__elements"): - if getattr(stub, "id") in viewIds: - available.append(stub) - if getattr(stub, "id") not in viewIds: - obsolete.append(stub) + for i, (d, dashboard) in enumerate(dict_dashboards.items()): + if dashboard["id"] in list_metric_query_copy_ids: + available[i] = dashboard + if dashboard["id"] not in list_metric_query_copy_ids: + obsolete[i] = dashboard logging.info("%s %s dashboards with viewCount!", str(env), len(available)) logging.info("%s %s dashboards with 0 viewCount!", str(env), len(obsolete)) - # filename = os.path.join(".\log2", "ids.txt") - # os.makedirs(os.path.dirname(filename), exist_ok=True) - # with open(filename, "a", encoding="utf-8") as f: - # f.write(str(ids)) + return {"available" : available, "legacy" : legacy, "obsolete" : obsolete} - # filename = os.path.join(".\log2", "legacy.txt") - # os.makedirs(os.path.dirname(filename), exist_ok=True) - # with open(filename, "a", encoding="utf-8") as f: - # f.write(str(legacy)) - return obsolete +def adaptDataStructure(dashboards, metric_queries): + dict_dashboards= {} + list_dashboard_ids = [] + dict_metric_queries = {} + list_metric_query_ids = [] + + for s, stub in enumerate(getattr(dashboards, "_PaginatedList__elements")): + dict_dashboards[s] = {"id" : getattr(stub, "id"), + "name" : getattr(stub, "name"), + "owner" : getattr(stub, "owner")} + list_dashboard_ids.append(getattr(stub, "id")) + + for collection in getattr(metric_queries, "_PaginatedList__elements"): + for m, query in enumerate(getattr(collection, "data")): + dict_metric_queries[m] = {"id" : getattr(query, "dimension_map")["id"], + "name" : None, + "owner" : None} + list_metric_query_ids.append(getattr(query, "dimension_map")["id"]) + + return [dict_dashboards, list_dashboard_ids, dict_metric_queries, + list_metric_query_ids] def getDashboardsWithViewCount(env, client, METRIC_SELECTOR, RESOLUTION, @@ -121,7 +116,7 @@ def getDashboards(env, client): return dashboards -def init_dt_client(env, DT_URL, DT_TOKEN): +def initDtClient(env, DT_URL, DT_TOKEN): logging.debug("%s init Dynatrace client...", str(env)) DT_CLIENT = Dynatrace(DT_URL, DT_TOKEN, logging.Logger("ERROR"), None, None, 0, 10*1000) @@ -129,6 +124,8 @@ def init_dt_client(env, DT_URL, DT_TOKEN): if __name__ == "__main__": + t = time.strftime("%Y%m%d-%H%M%S") + logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') with open(os.path.basename("./environment.yaml")) as env_cfg: @@ -145,17 +142,14 @@ if __name__ == "__main__": FROM_DATE= dict(doc[7]).get("fromDate") TO_DATE= dict(doc[8]).get("toDate") - client = init_dt_client(env, DT_URL, DT_TOKEN) + client = initDtClient(env, DT_URL, DT_TOKEN) dashboards = getDashboards(env, client) - metric_query = getDashboardsWithViewCount(env, client, - METRIC_SELECTOR, - RESOLUTION, FROM_DATE, - TO_DATE) - results = evaluate(env, dashboards, metric_query) + metric_queries = getDashboardsWithViewCount(env, client, + METRIC_SELECTOR, + RESOLUTION, FROM_DATE, + TO_DATE) + data = adaptDataStructure(dashboards, metric_queries) + result = evaluate(env, data) + writeToExcel(env, t, result) - converted_results = convertToList(results) - print(len(converted_results)) - # writeToTxt(item, results) - # writeToExcel(item, results) - - print("finished") \ No newline at end of file + print("finished")