204 lines
7.7 KiB
Python
204 lines
7.7 KiB
Python
import copy
|
|
import git
|
|
import glob
|
|
import logging
|
|
import os
|
|
import pandas as pd
|
|
import time;
|
|
import yaml
|
|
|
|
from decouple import config
|
|
from dynatrace import Dynatrace
|
|
from pathlib import Path
|
|
|
|
|
|
t = time.strftime("%Y%m%d-%H%M%S")
|
|
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
|
|
|
|
|
|
def fetch_branches(repo):
|
|
logging.info("fetching branches...")
|
|
# branches = [repo.git.branch("-r").replace("origin/", "").split("\n ")]
|
|
# branches = repo.remotes.origin.fetch()
|
|
branch_list = [r.remote_head for r in repo.remote().refs]
|
|
|
|
return branch_list
|
|
|
|
|
|
def fetch_repository(REPOSITORY_URL, REPOSITORY_PATH):
|
|
logging.info("fetching repository %s", str(REPOSITORY_URL))
|
|
# git.Repo(Path(config(REPOSITORY_PATH)))
|
|
repo = git.Repo.clone_from(REPOSITORY_URL,
|
|
Path("../coco_apm_terraform_onboarding"))
|
|
|
|
return repo
|
|
|
|
|
|
def writeToExcel(env, t, result):
|
|
list_available = []
|
|
list_legacy = []
|
|
list_obsolete = []
|
|
|
|
for type in ["available", "legacy", "obsolete"]:
|
|
for i, (ki, vi) in enumerate(result[type].items()):
|
|
if type == "available":
|
|
list_available.append([vi["id"], vi["name"], vi["owner"]])
|
|
if type == "legacy":
|
|
list_legacy.append([vi["id"], vi["name"], vi["owner"]])
|
|
if type == "obsolete":
|
|
list_obsolete.append([vi["id"], vi["name"], vi["owner"]])
|
|
|
|
df_available = pd.DataFrame(list_available, columns=['id', 'name', 'owner'])
|
|
df_legacy = pd.DataFrame(list_legacy, columns=['id', 'name', 'owner'])
|
|
df_obsolete = pd.DataFrame(list_obsolete, columns=['id', 'name', 'owner'])
|
|
|
|
filename = os.path.join(".\log",
|
|
str(t) + "_" + str(env) + '_dashboards.xlsx')
|
|
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
|
|
|
with pd.ExcelWriter(filename) as writer:
|
|
df_available.to_excel(writer, sheet_name='available')
|
|
df_legacy.to_excel(writer, sheet_name='legacy')
|
|
df_obsolete.to_excel(writer, sheet_name='obsolete')
|
|
|
|
|
|
def evaluate(env, data):
|
|
legacy = {}
|
|
available = {}
|
|
obsolete = {}
|
|
dict_dashboards = data[0]
|
|
list_dashboard_ids = data[1]
|
|
dict_metric_queries = data[2]
|
|
list_metric_query_ids = data[3]
|
|
dict_metric_queries_copy = copy.deepcopy(dict_metric_queries)
|
|
list_metric_query_copy_ids = copy.deepcopy(list_metric_query_ids)
|
|
|
|
|
|
for x, (m, metric_query) in enumerate(dict_metric_queries.items()):
|
|
if metric_query["id"] not in list_dashboard_ids:
|
|
legacy[x] = {"id" : metric_query["id"],
|
|
"name" : metric_query["name"],
|
|
"owner" : metric_query["owner"]}
|
|
del dict_metric_queries_copy[m]
|
|
list_metric_query_copy_ids.remove(metric_query["id"])
|
|
logging.debug("%s %s have been deleted in the past", str(env), len(legacy))
|
|
logging.debug("%s %s dashboards with viewCount and active", str(env),
|
|
len(dict_metric_queries_copy))
|
|
|
|
for i, (d, dashboard) in enumerate(dict_dashboards.items()):
|
|
if dashboard["id"] in list_metric_query_copy_ids:
|
|
available[i] = dashboard
|
|
if dashboard["id"] not in list_metric_query_copy_ids:
|
|
obsolete[i] = dashboard
|
|
logging.info("%s %s dashboards with viewCount!", str(env), len(available))
|
|
logging.info("%s %s dashboards with 0 viewCount!", str(env), len(obsolete))
|
|
|
|
return {"available" : available, "legacy" : legacy, "obsolete" : obsolete}
|
|
|
|
|
|
def adaptDataStructure(dashboards, metric_queries):
|
|
dict_dashboards= {}
|
|
list_dashboard_ids = []
|
|
dict_metric_queries = {}
|
|
list_metric_query_ids = []
|
|
|
|
for s, stub in enumerate(getattr(dashboards, "_PaginatedList__elements")):
|
|
dict_dashboards[s] = {"id" : getattr(stub, "id"),
|
|
"name" : getattr(stub, "name"),
|
|
"owner" : getattr(stub, "owner")}
|
|
list_dashboard_ids.append(getattr(stub, "id"))
|
|
|
|
for collection in getattr(metric_queries, "_PaginatedList__elements"):
|
|
for m, q in enumerate(getattr(collection, "data")):
|
|
dict_metric_queries[m] = {"id" : getattr(q, "dimension_map")["id"],
|
|
"name" : None,
|
|
"owner" : None}
|
|
list_metric_query_ids.append(getattr(q, "dimension_map")["id"])
|
|
|
|
return [dict_dashboards, list_dashboard_ids, dict_metric_queries,
|
|
list_metric_query_ids]
|
|
|
|
|
|
def getDashboardsWithViewCount(env, client, METRIC_SELECTOR, RESOLUTION,
|
|
FROM_DATE, TO_DATE):
|
|
logging.debug("%s get dashboards with viewCount, resolution %s ...",
|
|
str(env), RESOLUTION)
|
|
metric_query = client.metrics.query(METRIC_SELECTOR, RESOLUTION, FROM_DATE,
|
|
TO_DATE)
|
|
n_metric_query = getattr(metric_query, "_PaginatedList__total_count")
|
|
logging.debug("%s %s dashboards with viewCount and older than 6 Months",
|
|
str(env), str(n_metric_query))
|
|
|
|
return metric_query
|
|
|
|
|
|
def getDashboards(env, client):
|
|
logging.debug("%s get all dashboards...", str(env))
|
|
dashboards = client.dashboards.list(owner=None, tags=None)
|
|
n_dashboards = getattr(dashboards, "_PaginatedList__total_count")
|
|
logging.info("%s %s total dashboards", str(env), str(n_dashboards))
|
|
|
|
return dashboards
|
|
|
|
|
|
def initDtClient(env, DT_URL, DT_TOKEN):
|
|
logging.debug("%s init Dynatrace client...", str(env))
|
|
DT_CLIENT = Dynatrace(DT_URL, DT_TOKEN, logging.Logger("ERROR"), None, None,
|
|
0, 10*1000)
|
|
return DT_CLIENT
|
|
|
|
|
|
if __name__ == "__main__":
|
|
all_data = {}
|
|
|
|
with open(Path("./environment.yaml")) as env_cfg:
|
|
environment = yaml.safe_load(env_cfg)
|
|
|
|
# for env, doc in environment.items():
|
|
# logging.debug("%s checking token...", str(env))
|
|
|
|
# if config(dict(doc[2]).get("env-token-name"), default='') != "":
|
|
# DT_URL = dict(doc[1]).get("env-url")
|
|
# DT_TOKEN = config(dict(doc[2]).get("env-token-name"), default='')
|
|
# METRIC_SELECTOR = dict(doc[5]).get("metricSelector")
|
|
# RESOLUTION = dict(doc[6]).get("resolution")
|
|
# FROM_DATE= dict(doc[7]).get("fromDate")
|
|
# TO_DATE= dict(doc[8]).get("toDate")
|
|
|
|
# client = initDtClient(env, DT_URL, DT_TOKEN)
|
|
# dashboards = getDashboards(env, client)
|
|
# metric_queries = getDashboardsWithViewCount(env, client,
|
|
# METRIC_SELECTOR,
|
|
# RESOLUTION, FROM_DATE,
|
|
# TO_DATE)
|
|
# data = adaptDataStructure(dashboards, metric_queries)
|
|
# result = evaluate(env, data)
|
|
# # writeToExcel(env, t, result)
|
|
|
|
# all_data[env] = result
|
|
|
|
target_dirs = ["EMEA_PROD", "EMEA_PREPROD","NA_PROD", "NA_PPREPROD",
|
|
"CN_PROD", "CN_PREPROD"]
|
|
repo = fetch_repository(config("REPOSITORY_URL"), config("REPOSITORY"))
|
|
list_branches = fetch_branches(repo)
|
|
list_branches.remove("HEAD")
|
|
|
|
if repo.active_branch.name != repo.heads.master.name:
|
|
if not repo.active_branch.is_detached:
|
|
repo.git.checkout("master")
|
|
else:
|
|
# repo.heads.master.checkout()
|
|
repo.git.checkout("master")
|
|
|
|
try:
|
|
for i, branch in enumerate(list_branches):
|
|
repo.git.checkout(branch)
|
|
print("i: ", i, " ", branch)
|
|
# with open("out.txt", "a+") as f:
|
|
# for file in Path(repo.git.working_dir).glob('**/dashboard/*.tf'):
|
|
# f.write("%s | %s\n" % (branch, file))
|
|
except Exception as e:
|
|
print(e)
|
|
|
|
print("finished")
|