diff --git a/main.py b/main.py index 9f6b099..dd3c3aa 100644 --- a/main.py +++ b/main.py @@ -1,12 +1,36 @@ -import os -import logging -from decouple import config -import yaml -from dynatrace import Dynatrace -import logging -import pandas as pd import copy +import git +import glob +import logging +import os +import pandas as pd import time; +import yaml + +from decouple import config +from dynatrace import Dynatrace +from pathlib import Path + + +t = time.strftime("%Y%m%d-%H%M%S") +logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') + + +def fetch_branches(repo): + # branches = [repo.git.branch("-r").replace("origin/", "").split("\n ")] + # branches = repo.remotes.origin.fetch() + branch_list = [r.remote_head for r in repo.remote().refs] + + return branch_list + + +def fetch_repository(REPOSITORY_URL, REPOSITORY_PATH): + logging.debug("fetching repository %s", str(REPOSITORY_URL)) + # git.Repo(Path(config(REPOSITORY_PATH))) + repo = git.Repo.clone_from(REPOSITORY_URL, + Path("../coco_apm_terraform_onboarding")) + + return repo def writeToExcel(env, t, result): @@ -124,32 +148,55 @@ def initDtClient(env, DT_URL, DT_TOKEN): if __name__ == "__main__": - t = time.strftime("%Y%m%d-%H%M%S") + all_data = {} - logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') + with open(Path("./environment.yaml")) as env_cfg: + environment = yaml.safe_load(env_cfg) - with open(os.path.basename("./environment.yaml")) as env_cfg: - environment = yaml.safe_load(env_cfg) + # for env, doc in environment.items(): + # logging.debug("%s checking token...", str(env)) - for env, doc in environment.items(): - logging.debug("%s checking token...", str(env)) + # if config(dict(doc[2]).get("env-token-name"), default='') != "": + # DT_URL = dict(doc[1]).get("env-url") + # DT_TOKEN = config(dict(doc[2]).get("env-token-name"), default='') + # METRIC_SELECTOR = dict(doc[5]).get("metricSelector") + # RESOLUTION = dict(doc[6]).get("resolution") + # FROM_DATE= dict(doc[7]).get("fromDate") + # TO_DATE= dict(doc[8]).get("toDate") - if config(dict(doc[2]).get("env-token-name"), default='') != "": - DT_URL = dict(doc[1]).get("env-url") - DT_TOKEN = config(dict(doc[2]).get("env-token-name"), default='') - METRIC_SELECTOR = dict(doc[5]).get("metricSelector") - RESOLUTION = dict(doc[6]).get("resolution") - FROM_DATE= dict(doc[7]).get("fromDate") - TO_DATE= dict(doc[8]).get("toDate") + # client = initDtClient(env, DT_URL, DT_TOKEN) + # dashboards = getDashboards(env, client) + # metric_queries = getDashboardsWithViewCount(env, client, + # METRIC_SELECTOR, + # RESOLUTION, FROM_DATE, + # TO_DATE) + # data = adaptDataStructure(dashboards, metric_queries) + # result = evaluate(env, data) + # # writeToExcel(env, t, result) + + # all_data[env] = result - client = initDtClient(env, DT_URL, DT_TOKEN) - dashboards = getDashboards(env, client) - metric_queries = getDashboardsWithViewCount(env, client, - METRIC_SELECTOR, - RESOLUTION, FROM_DATE, - TO_DATE) - data = adaptDataStructure(dashboards, metric_queries) - result = evaluate(env, data) - writeToExcel(env, t, result) + target_dirs = ["EMEA_PROD", "EMEA_PREPROD","NA_PROD", "NA_PPREPROD", + "CN_PROD", "CN_PREPROD"] + repo = fetch_repository(config("REPOSITORY_URL"), config("REPOSITORY")) + list_branches = fetch_branches(repo) + list_branches.remove("HEAD") - print("finished") + if repo.active_branch.name != repo.heads.master.name: + if not repo.active_branch.is_detached: + repo.git.checkout("master") + else: + # repo.heads.master.checkout() + repo.git.checkout("master") + + try: + for i, branch in enumerate(list_branches): + repo.git.checkout(branch) + print("i: ", i, " ", branch) + # with open("out.txt", "a+") as f: + # for file in Path(repo.git.working_dir).glob('**/dashboard/*.tf'): + # f.write("%s | %s\n" % (branch, file)) + except Exception as e: + print(e) + + print("finished")