First prototype with pre-deleting capability

master
SLW\ARNAUA 2023-07-14 18:46:12 +02:00
parent a8557b1fcc
commit 95d6cbfdcb
2 changed files with 90 additions and 34 deletions

1
.gitignore vendored
View File

@ -112,6 +112,7 @@ venv/
ENV/
env.bak/
venv.bak/
venv2/
# Spyder project settings
.spyderproject

123
main.py
View File

@ -17,6 +17,44 @@ from pathlib import Path
t = time.strftime("%Y%m%d-%H%M%S")
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
def check_metadata(file):
with open(file, "r") as f:
lines = [next(f) for _ in range(2)]
if "LEGACY" in str(lines[1]):
# l_stripped = lines[1].strip().replace("\n", "").replace("#", "").strip().split(" ")[1].strip()
# l_replaced = l_stripped.replace("\n", "")
# l_rr = l_replaced.replace("#", "")
# l_s = l_rr.strip()
# l_splitted = l_s.split(" ")
# legacy_id = l_splitted[1].strip()
id = lines[1].strip().replace("\n", "").replace("#", "").strip().split(" ")[1].strip()
else:
# stripped = lines[0].strip().replace("\n", "").split("=")[1].strip()
# replaced = stripped.replace("\n", "")
# splitted = replaced.split("=")
# id = splitted[1].strip()
id = lines[0].strip().replace("\n", "").split("=")[1].strip()
with open(file, "r") as f:
num_lines = sum(1 for _ in f)
with open(file, "r") as f:
lines = [next(f) for _ in range(int(num_lines))]
for x, line in enumerate(lines):
if "dashboard_metadata {" in line:
metadata = lines[x:x+5]
for md in metadata:
if "name" in md:
name = md.strip().replace("\n", "").split("=")[1].strip().replace('"',"").strip()
if "owner" in md:
owner = md.strip().replace("\n", "").split("=")[1].strip().replace('"',"").strip()
return [id, name, owner]
def format_block(string, max):
string_length = len(string)
string = (f'{" "*(max-string_length)}{string}')
@ -220,56 +258,73 @@ def initDtClient(env, DT_URL, DT_TOKEN):
if __name__ == "__main__":
all_data = {}
dictionary_dashboards = {}
list_environments = []
# do it manually for CD_TS-CMS
list_exclude_branches = ["HEAD", "master", "template", "CD_TS-CMS"]
list_exclude_files = ["providers.tf", "data_source.tf"]
with open(Path("./environment.yaml")) as env_cfg:
environment = yaml.safe_load(env_cfg)
# for env, doc in environment.items():
# logging.debug("%s checking token...", str(env))
for env, doc in environment.items():
logging.debug("%s checking token...", str(env))
# if config(dict(doc[2]).get("env-token-name"), default='') != "":
# DT_URL = dict(doc[1]).get("env-url")
# DT_TOKEN = config(dict(doc[2]).get("env-token-name"), default='')
# METRIC_SELECTOR = dict(doc[5]).get("metricSelector")
# RESOLUTION = dict(doc[6]).get("resolution")
# FROM_DATE= dict(doc[7]).get("fromDate")
# TO_DATE= dict(doc[8]).get("toDate")
if config(dict(doc[2]).get("env-token-name"), default='') != "":
DT_URL = dict(doc[1]).get("env-url")
DT_TOKEN = config(dict(doc[2]).get("env-token-name"), default='')
METRIC_SELECTOR = dict(doc[5]).get("metricSelector")
RESOLUTION = dict(doc[6]).get("resolution")
FROM_DATE= dict(doc[7]).get("fromDate")
TO_DATE= dict(doc[8]).get("toDate")
# client = initDtClient(env, DT_URL, DT_TOKEN)
# dashboards = getDashboards(env, client)
# metric_queries = getDashboardsWithViewCount(env, client,
# METRIC_SELECTOR,
# RESOLUTION, FROM_DATE,
# TO_DATE)
# data = adaptDataStructure(dashboards, metric_queries)
# result = evaluate(env, data)
# # writeToExcel(env, t, result)
client = initDtClient(env, DT_URL, DT_TOKEN)
dashboards = getDashboards(env, client)
metric_queries = getDashboardsWithViewCount(env, client,
METRIC_SELECTOR,
RESOLUTION, FROM_DATE,
TO_DATE)
data = adaptDataStructure(dashboards, metric_queries)
result = evaluate(env, data)
# writeToExcel(env, t, result)
# all_data[env] = result
dictionary_dashboards[env] = result
list_environments.append(env)
target_dirs = ["EMEA_PROD", "EMEA_PREPROD","NA_PROD", "NA_PPREPROD",
"CN_PROD", "CN_PREPROD"]
repo = fetch_repository(config("REPOSITORY_URL"), config("REPOSITORY_PATH"))
list_branches = fetch_branches(repo)
list_branches.remove("HEAD")
list_branches.remove("master")
list_branches.remove("template")
list_branches.remove("CD_TS-CMS") # do it manually
for b in list_exclude_branches:
list_branches.remove(b)
# repo_ = checkout_master(repo)
repo_ = repo
wd = Path(repo_.git.working_dir)
list_excluded_files = ["providers.tf", "data_source.tf"]
try:
with open(Path("./dashboards.txt"), "a+", encoding="utf-8") as f:
for i, branch in enumerate(list_branches):
repo_.git.checkout(branch)
logging.info("%d - branch: %s", i, str(branch))
for file in glob.glob(str(wd) + '/**/dashboard/*.tf', recursive=True):
if os.path.basename(file) not in list_excluded_files:
f.write("%s | %s\n" % (format_block(branch, 50), file))
# with open(Path("./dashboards.txt"), "a+", encoding="utf-8") as f:
for i, branch in enumerate(list_branches):
repo_.git.checkout(branch)
logging.info("%d - branch: %s", i, str(branch))
for file in glob.glob(str(wd) + '/**/dashboard/*.tf', recursive=True):
if os.path.basename(file) not in list_exclude_files:
# f.write("%s | %s\n" % (format_block(branch, 50), file))
id, name, owner = check_metadata(file)
current_db = {"id": id, "name" : name ,"owner" : owner}
for e in list_environments:
for k, v in dictionary_dashboards[e]["obsolete"].items():
if current_db == v:
print(current_db)
print(v)
print("DELETING", "BRANCH:", str(branch), "FILE:", file)
print("")
else:
print(current_db)
print(v)
print("")
except Exception as e:
print("Exception:", e)