Compare commits

...

10 Commits

Author SHA1 Message Date
Arnel Arnautovic 32174adda7 ToDo: Create Pull Request and Merge automated or manually? 2023-07-18 17:02:16 +02:00
Arnel Arnautovic ff32507e77 not tested yet and pull request via atlassian needs to be implemented 2023-07-18 16:58:59 +02:00
SLW\ARNAUA 95d6cbfdcb First prototype with pre-deleting capability 2023-07-14 18:46:12 +02:00
SLW\ARNAUA a8557b1fcc dashboards.txt 2023-07-13 21:23:59 +02:00
SLW\ARNAUA e06353151e updated 2023-07-12 19:35:37 +02:00
SLW\ARNAUA d505b0df16 updated 2023-07-12 19:34:51 +02:00
SLW\ARNAUA eef660de21 updated 2023-07-12 19:32:53 +02:00
SLW\ARNAUA 88a51df08b slight change 2023-07-07 08:30:05 +02:00
SLW\ARNAUA 2701320c08 writing to excel + code refactoring and increased speed-up 2023-07-06 21:01:48 +02:00
SLW\ARNAUA 3c05cda4a6 implementaion for handling legacy dashboards that have been deleted in the past 2023-07-06 15:11:55 +02:00
4 changed files with 638 additions and 324 deletions

304
.gitignore vendored
View File

@ -1,151 +1,153 @@
.vscode .vscode
.idea .idea
# Byte-compiled / optimized / DLL files # Byte-compiled / optimized / DLL files
__pycache__/ __pycache__/
more_utils/__pycache__/ more_utils/__pycache__/
*.py[cod] *.py[cod]
*$py.class *$py.class
# C extensions # C extensions
*.so *.so
# Distribution / packaging # Distribution / packaging
.Python .Python
build/ build/
develop-eggs/ develop-eggs/
dist/ dist/
downloads/ downloads/
eggs/ eggs/
.eggs/ .eggs/
lib/ lib/
lib64/ lib64/
parts/ parts/
sdist/ sdist/
var/ var/
wheels/ wheels/
pip-wheel-metadata/ pip-wheel-metadata/
share/python-wheels/ share/python-wheels/
*.egg-info/ *.egg-info/
.installed.cfg .installed.cfg
*.egg *.egg
MANIFEST MANIFEST
# PyInstaller # PyInstaller
# Usually these files are written by a python script from a template # Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it. # before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest *.manifest
*.spec *.spec
# Installer logs # Installer logs
pip-log.txt pip-log.txt
pip-delete-this-directory.txt pip-delete-this-directory.txt
# Unit test / coverage reports # Unit test / coverage reports
htmlcov/ htmlcov/
.tox/ .tox/
.nox/ .nox/
.coverage .coverage
.coverage.* .coverage.*
.cache .cache
nosetests.xml nosetests.xml
coverage.xml coverage.xml
*.cover *.cover
*.py,cover *.py,cover
.hypothesis/ .hypothesis/
.pytest_cache/ .pytest_cache/
# Translations # Translations
*.mo *.mo
*.pot *.pot
# Django stuff: # Django stuff:
*.log *.log
local_settings.py local_settings.py
db.sqlite3 db.sqlite3
db.sqlite3-journal db.sqlite3-journal
# Flask stuff: # Flask stuff:
instance/ instance/
.webassets-cache .webassets-cache
# Scrapy stuff: # Scrapy stuff:
.scrapy .scrapy
# Sphinx documentation # Sphinx documentation
docs/_build/ docs/_build/
# PyBuilder # PyBuilder
target/ target/
# Jupyter Notebook # Jupyter Notebook
.ipynb_checkpoints .ipynb_checkpoints
# IPython # IPython
profile_default/ profile_default/
ipython_config.py ipython_config.py
# pyenv # pyenv
.python-version .python-version
# pipenv # pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies # However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not # having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies. # install all needed dependencies.
#Pipfile.lock #Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow # PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/ __pypackages__/
# Celery stuff # Celery stuff
celerybeat-schedule celerybeat-schedule
celerybeat.pid celerybeat.pid
# SageMath parsed files # SageMath parsed files
*.sage.py *.sage.py
# Environments # Environments
.env .env
.venv .venv
env/ env/
venv/ venv/
ENV/ ENV/
env.bak/ env.bak/
venv.bak/ venv.bak/
venv2/
# Spyder project settings
.spyderproject # Spyder project settings
.spyproject .spyderproject
.spyproject
# Rope project settings
.ropeproject # Rope project settings
.ropeproject
# mkdocs documentation
/site # mkdocs documentation
/site
# mypy
.mypy_cache/ # mypy
.dmypy.json .mypy_cache/
dmypy.json .dmypy.json
dmypy.json
# Pyre type checker
.pyre/ # Pyre type checker
.pyre/
### Terraform stuff
**/.terraform/* ### Terraform stuff
crash.log **/.terraform/*
*.tfvars crash.log
*.tfvars
#excel reports
*.xlsx #excel reports
*.csv *.xlsx
*.csv
# for dev
slo_parameter.yaml # for dev
metricexpressions.json slo_parameter.yaml
*.bak metricexpressions.json
*.json *.bak
failed_requests.txt *.json
failed_requests.txt
# other
*.txt # other
*.txt
log/

View File

@ -1,61 +1,61 @@
--- ---
euprod-coco: euprod-coco:
- name: "euprod" - name: "euprod"
- env-url: "https://xxu26128.live.dynatrace.com" - env-url: "https://xxu26128.live.dynatrace.com"
- env-token-name: "EUPROD_TOKEN_VAR" - env-token-name: "EUPROD_TOKEN_VAR"
- jenkins: "https://jaws.bmwgroup.net/opapm/" - jenkins: "https://jaws.bmwgroup.net/opapm/"
- type: "coco" - type: "coco"
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))" - metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
- resolution: "1M" - resolution: "1M"
- fromDate: "now-6M" - fromDate: "now-6M"
- toDate: "now" - toDate: "now"
eupreprod-coco: eupreprod-coco:
- name: "eupreprod" - name: "eupreprod"
- env-url: "https://qqk70169.live.dynatrace.com" - env-url: "https://qqk70169.live.dynatrace.com"
- env-token-name: "EUPREPROD_TOKEN_VAR" - env-token-name: "EUPREPROD_TOKEN_VAR"
- jenkins: "https://jaws.bmwgroup.net/opapm/" - jenkins: "https://jaws.bmwgroup.net/opapm/"
- type: "coco" - type: "coco"
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))" - metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
- resolution: "1M" - resolution: "1M"
- fromDate: "now-6M" - fromDate: "now-6M"
- toDate: "now" - toDate: "now"
naprod-coco: naprod-coco:
- name: "naprod" - name: "naprod"
- env-url: "https://wgv50241.live.dynatrace.com" - env-url: "https://wgv50241.live.dynatrace.com"
- env-token-name: "NAPROD_TOKEN_VAR" - env-token-name: "NAPROD_TOKEN_VAR"
- jenkins: "https://jaws.bmwgroup.net/opapm/" - jenkins: "https://jaws.bmwgroup.net/opapm/"
- type: "coco" - type: "coco"
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))" - metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
- resolution: "1M" - resolution: "1M"
- fromDate: "now-6M" - fromDate: "now-6M"
- toDate: "now" - toDate: "now"
napreprod-coco: napreprod-coco:
- name: "napreprod" - name: "napreprod"
- env-url: "https://onb44935.live.dynatrace.com" - env-url: "https://onb44935.live.dynatrace.com"
- env-token-name: "NAPREPROD_TOKEN_VAR" - env-token-name: "NAPREPROD_TOKEN_VAR"
- jenkins: "https://jaws.bmwgroup.net/opapm/" - jenkins: "https://jaws.bmwgroup.net/opapm/"
- type: "coco" - type: "coco"
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))" - metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
- resolution: "1M" - resolution: "1M"
- fromDate: "now-6M" - fromDate: "now-6M"
- toDate: "now" - toDate: "now"
cnprod-coco: cnprod-coco:
- name: "cnprod" - name: "cnprod"
- env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b" - env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
- env-token-name: "CNPROD_TOKEN_VAR" - env-token-name: "CNPROD_TOKEN_VAR"
- jenkins: "https://jaws-china.bmwgroup.net/opmaas/" - jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
- type: "coco" - type: "coco"
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))" - metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
- resolution: "1M" - resolution: "1M"
- fromDate: "now-6M" - fromDate: "now-6M"
- toDate: "now" - toDate: "now"
cnpreprod-coco: cnpreprod-coco:
- name: "cnpreprod" - name: "cnpreprod"
- env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35" - env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35"
- env-token-name: "CNPREPROD_TOKEN_VAR" - env-token-name: "CNPREPROD_TOKEN_VAR"
- jenkins: "https://jaws-china.bmwgroup.net/opmaas/" - jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
- type: "coco" - type: "coco"
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))" - metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
- resolution: "1M" - resolution: "1M"
- fromDate: "now-6M" - fromDate: "now-6M"
- toDate: "now" - toDate: "now"

532
main.py
View File

@ -1,107 +1,425 @@
import os import copy
import logging import git
from decouple import config import glob
import yaml import logging
from dynatrace import Dynatrace import os
import logging import pandas as pd
import shutil
import time
def format_block(string, max): import yaml
string_length = len(string)
string = (f'{string}{" "*(max-string_length)}') from decouple import config
return string from dynatrace import Dynatrace
from pathlib import Path
def calculateDifference(dashboards, viewCounts):
ids = []
for stub in getattr(dashboards, "_PaginatedList__elements"): t = time.strftime("%Y%m%d-%H%M%S")
ids.append(getattr(stub, "id")) logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
MAX_RETRIES = 3
viewIds = []
for metricSeries in getattr(viewCounts, "_PaginatedList__elements"):
for metric in getattr(metricSeries, "data"): def git_push(repo, origin, branch, message):
viewIds.append(getattr(metric, "dimension_map")["id"])
for _ in range(MAX_RETRIES):
obsolete = [] try:
# for value in ids: repo.git.add(all=True)
# if value not in viewIds: repo.git.commit("-m", message)
# obsolete.append(value) # set_origin = repo.remote(name=origin)
repo.git.push(origin, branch)
for stub in getattr(dashboards, "_PaginatedList__elements"): break
if getattr(stub, "id") not in viewIds: except Exception as e:
obsolete.append(stub) logging.info(e)
logging.info("retry attempt %d/%d" % (_+1, MAX_RETRIES))
return obsolete
def delete_dashboard(dt_client, environment, current_db):
def getDashboardsWithViewCount(DT_CLIENT, METRIC_SELECTOR, RESOLUTION, try:
FROM_DATE, TO_DATE): response = dt_client.dashboards.delete(str(current_db["id"]))
metrics = DT_CLIENT.metrics.query(METRIC_SELECTOR, RESOLUTION, FROM_DATE, logging.info("Deletion of dashbord %s (%s) in %s successful" %
TO_DATE) (str(current_db["name"]), current_db["id"],
count = getattr(metrics, "_PaginatedList__total_count") str(environment)))
return count, metrics except Exception as e:
logging.info("During deletion of dashbaord the following exception has\
been encountered: %s", e)
def getDashboards(DT_CLIENT):
dashboards = DT_CLIENT.dashboards.list(owner=None, tags=None) return response
n_dashboards = getattr(dashboards, "_PaginatedList__total_count")
return n_dashboards, dashboards
def get_credentials(e, environment):
for env, doc in environment.items():
if __name__ == "__main__": if str(e) == str(env):
DT_URL = dict(doc[1]).get("env-url")
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') DT_TOKEN = config(dict(doc[2]).get("env-token-name"), default='')
with open(os.path.basename("./environment.yaml")) as env_cfg: return [DT_URL, DT_TOKEN]
environment = yaml.safe_load(env_cfg)
for item, doc in environment.items(): def delete_file(branch, file):
logging.debug("%s checking token...", str(item)) is_deleted = False
if config(dict(doc[2]).get("env-token-name"), default='') != "": try:
DT_URL = dict(doc[1]).get("env-url") os.remove(file)
DT_TOKEN = config(dict(doc[2]).get("env-token-name"), default='') with open(Path("./log_deleted_" + str(t) + ".txt"), "a+",
METRIC_SELECTOR = dict(doc[5]).get("metricSelector") encoding="utf-8") as f:
RESOLUTION = dict(doc[6]).get("resolution") f.write("File on branch %s in %s has been deleted\n" % (branch,
FROM_DATE= dict(doc[7]).get("fromDate") file))
TO_DATE= dict(doc[8]).get("toDate") logging.debug("File on branch %s in %s has been deleted" % (branch,
file))
logging.debug("%s init Dynatrace client...", str(item)) is_deleted = True
DT_CLIENT = Dynatrace(DT_URL, DT_TOKEN, logging.Logger("ERROR"), except OSError as e:
None, None, 0, 10*1000) logging.info("Error: %s - %s." % (e.filename, e.strerror))
is_deleted = False
logging.debug("%s get all dashboards...", str(item))
n_dashboards, dashboards = getDashboards(DT_CLIENT) return is_deleted
logging.info("%s %s total dashboards", str(item), str(n_dashboards))
# delete based only by id! if there is no id, delete not possible!
logging.debug("%s get dashboards with viewCount, resolution %s ...", def check_dashboard(branch, file, current_db, list_environments,
str(item), RESOLUTION) dict_dashboards):
count, viewCounts = getDashboardsWithViewCount(DT_CLIENT, is_deleted = False
METRIC_SELECTOR, is_stop = False
RESOLUTION,
FROM_DATE, for e in list_environments:
TO_DATE) for k, v in dict_dashboards[e]["obsolete"].items():
logging.info("%s %s dashboards with viewCount and older than 6 " if current_db["id"] == v["id"]:
"Months", str(item), str(count)) is_stop = True
logging.debug("Obsolete dashboard on branch %s in %s",
logging.debug("%s store ids of obsolete dashboards...", str(item)) str(branch), str(file))
results = calculateDifference(dashboards, viewCounts) # return True
logging.info("%s %s dashboards with 0 viewCount!", is_deleted = delete_file(branch, Path(file))
str(item), n_dashboards - count) break
for result in results: if is_stop == True:
id = getattr(result, "id") break
name = getattr(result, "name")
owner = getattr(result, "owner") return is_deleted, e
filename = os.path.join(".\log", item+"-log.txt")
os.makedirs(os.path.dirname(filename), exist_ok=True)
def check_metadata(file):
with open(filename, "a", encoding="utf-8") as f: id = None
f.write(f"{'id: ' + id + ' '} " + name = None
f"{'name: ' + name + ' '} " + owner = None
f"{'owner: ' + owner}" +
"\n") with open(file, "r") as f:
# logging.info(f"{'id: %s '}" lines = [next(f) for _ in range(2)]
# f"{'name: %s'}"
# f"{'owner: %s'}", if "LEGACY" in str(lines[1]):
# format_block(id, 50), id = lines[1].strip().replace("\n", "").replace("#", "").strip()\
# format_block(name, 70), owner) .split(" ")[1].strip()
print("finished") elif "ID" in str(lines[0]):
id = lines[0].strip().replace("\n", "").replace("#", "").strip()\
.split(" ")[1].strip()
elif "DEFINE" in str(lines[0]):
id = lines[0].strip().replace("\n", "").split("=")[1].strip()
else:
id = None
with open(file, "r") as f:
num_lines = sum(1 for _ in f)
with open(file, "r") as f:
lines = [next(f) for _ in range(int(num_lines))]
for x, line in enumerate(lines):
if "dashboard_metadata {" in line:
metadata = lines[x:x+5]
for md in metadata:
if "name" in md:
name = md.strip().replace("\n", "").split("=")[1].strip()\
.replace('"',"").strip()
if "owner" in md:
owner = md.strip().replace("\n", "").split("=")[1].strip()\
.replace('"',"").strip()
elif "dashboardName" in line:
name = line.strip().replace("\n", "").split("=")[1].strip()\
.replace('"',"").strip()
owner = None
else:
name = None
owner = None
return [id, name, owner]
def format_block(string, max):
string_length = len(string)
string = (f'{" "*(max-string_length)}{string}')
return string
def onerror(func, path, exc_info):
"""
Error handler for ``shutil.rmtree``.
If the error is due to an access error (read only file)
it attempts to add write permission and then retries.
If the error is for another reason it re-raises the error.
Usage : ``shutil.rmtree(path, onerror=onerror)``
"""
import stat
# Is the error an access error?
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWUSR)
func(path)
else:
raise
def delete_dir(path):
logging.info("cleaning up...")
try:
shutil.rmtree(path, onerror=onerror)
logging.info("%s successfully deleted", str(path))
except OSError as e:
logging.info("Error: %s - %s." % (e.filename, e.strerror))
def checkout_master(repo):
logging.info("master branch name is: %s", str(repo.heads.master.name))
logging.info("checking active branch ...")
if repo.active_branch.name != repo.heads.master.name:
logging.info("active branch name is: %s", str(repo.active_branch.name))
if repo.active_branch.is_detached:
logging.info("active branch (%s) is detached: %s",
str(repo.active_branch.name),
str(repo.active_branch.is_detached))
logging.info("checking out master...")
repo.git.checkout("master")
logging.info("checkout to master successful")
logging.info("active branch is %s and is detached: %s",
str(repo.active_branch.name),
str(repo.active_branch.is_detached))
else:
# repo.heads.master.checkout()
logging.info("active branch (%s) is detached: %s",
str(repo.active_branch.name),
str(repo.active_branch.is_detached))
logging.info("checking out master...")
repo.git.checkout("master")
logging.info("checkout to master successful")
logging.info("active branch is %s and is detached: %s",
str(repo.active_branch.name),
str(repo.active_branch.is_detached))
else:
logging.info("active branch is already master (%s) and is detached: %s",
str(repo.active_branch.name),
str(repo.active_branch.is_detached))
return repo
def fetch_branches(repo):
logging.info("fetching branches...")
# branches = [repo.git.branch("-r").replace("origin/", "").split("\n ")]
# branches = repo.remotes.origin.fetch()
branch_list = [r.remote_head for r in repo.remote().refs]
return branch_list
def fetch_repository(REPOSITORY_URL, REPOSITORY_PATH):
# logging.info("fetching repository %s", str(REPOSITORY_URL))
# repo = git.Repo.clone_from(REPOSITORY_URL,
# Path("../coco_apm_terraform_onboarding"))
logging.info("repository path %s", str(REPOSITORY_PATH))
repo = git.Repo(Path(REPOSITORY_PATH))
return repo
def writeToExcel(env, t, result):
list_available = []
list_legacy = []
list_obsolete = []
for type in ["available", "legacy", "obsolete"]:
for i, (ki, vi) in enumerate(result[type].items()):
if type == "available":
list_available.append([vi["id"], vi["name"], vi["owner"]])
if type == "legacy":
list_legacy.append([vi["id"], vi["name"], vi["owner"]])
if type == "obsolete":
list_obsolete.append([vi["id"], vi["name"], vi["owner"]])
df_available = pd.DataFrame(list_available, columns=['id', 'name', 'owner'])
df_legacy = pd.DataFrame(list_legacy, columns=['id', 'name', 'owner'])
df_obsolete = pd.DataFrame(list_obsolete, columns=['id', 'name', 'owner'])
filename = os.path.join(".\log",
str(t) + "_" + str(env) + '_dashboards.xlsx')
os.makedirs(os.path.dirname(filename), exist_ok=True)
with pd.ExcelWriter(filename) as writer:
df_available.to_excel(writer, sheet_name='available')
df_legacy.to_excel(writer, sheet_name='legacy')
df_obsolete.to_excel(writer, sheet_name='obsolete')
def evaluate(env, data):
legacy = {}
available = {}
obsolete = {}
dict_dashboards = data[0]
list_dashboard_ids = data[1]
dict_metric_queries = data[2]
list_metric_query_ids = data[3]
dict_metric_queries_copy = copy.deepcopy(dict_metric_queries)
list_metric_query_copy_ids = copy.deepcopy(list_metric_query_ids)
for x, (m, metric_query) in enumerate(dict_metric_queries.items()):
if metric_query["id"] not in list_dashboard_ids:
legacy[x] = {"id" : metric_query["id"],
"name" : metric_query["name"],
"owner" : metric_query["owner"]}
del dict_metric_queries_copy[m]
list_metric_query_copy_ids.remove(metric_query["id"])
logging.debug("%s %s have been deleted in the past", str(env), len(legacy))
logging.debug("%s %s dashboards with viewCount and active", str(env),
len(dict_metric_queries_copy))
for i, (d, dashboard) in enumerate(dict_dashboards.items()):
if dashboard["id"] in list_metric_query_copy_ids:
available[i] = dashboard
if dashboard["id"] not in list_metric_query_copy_ids:
obsolete[i] = dashboard
logging.info("%s %s dashboards with viewCount!", str(env), len(available))
logging.info("%s %s dashboards with 0 viewCount!", str(env), len(obsolete))
return {"available" : available, "legacy" : legacy, "obsolete" : obsolete}
def adaptDataStructure(dashboards, metric_queries):
dict_dashboards= {}
list_dashboard_ids = []
dict_metric_queries = {}
list_metric_query_ids = []
for s, stub in enumerate(getattr(dashboards, "_PaginatedList__elements")):
dict_dashboards[s] = {"id" : getattr(stub, "id"),
"name" : getattr(stub, "name"),
"owner" : getattr(stub, "owner")}
list_dashboard_ids.append(getattr(stub, "id"))
for collection in getattr(metric_queries, "_PaginatedList__elements"):
for m, q in enumerate(getattr(collection, "data")):
dict_metric_queries[m] = {"id" : getattr(q, "dimension_map")["id"],
"name" : None,
"owner" : None}
list_metric_query_ids.append(getattr(q, "dimension_map")["id"])
return [dict_dashboards, list_dashboard_ids, dict_metric_queries,
list_metric_query_ids]
def getDashboardsWithViewCount(env, client, METRIC_SELECTOR, RESOLUTION,
FROM_DATE, TO_DATE):
logging.debug("%s get dashboards with viewCount, resolution %s ...",
str(env), RESOLUTION)
metric_query = client.metrics.query(METRIC_SELECTOR, RESOLUTION, FROM_DATE,
TO_DATE)
n_metric_query = getattr(metric_query, "_PaginatedList__total_count")
logging.debug("%s %s dashboards with viewCount and older than 6 Months",
str(env), str(n_metric_query))
return metric_query
def getDashboards(env, client):
logging.debug("%s get all dashboards...", str(env))
dashboards = client.dashboards.list(owner=None, tags=None)
n_dashboards = getattr(dashboards, "_PaginatedList__total_count")
logging.info("%s %s total dashboards", str(env), str(n_dashboards))
return dashboards
def initDtClient(env, DT_URL, DT_TOKEN):
logging.debug("%s init Dynatrace client...", str(env))
DT_CLIENT = Dynatrace(DT_URL, DT_TOKEN, logging.Logger("ERROR"), None, None,
0, 10*1000)
return DT_CLIENT
if __name__ == "__main__":
dict_dashboards = {}
list_environments = []
# do it manually for CD_TS-CMS
list_exclude_branches = ["HEAD", "master", "template", "CD_TS-CMS"]
list_exclude_files = ["providers.tf", "data_source.tf"]
with open(Path("./environment.yaml")) as env_cfg:
environments = yaml.safe_load(env_cfg)
for env, doc in environments.items():
logging.debug("%s checking token...", str(env))
if config(dict(doc[2]).get("env-token-name"), default='') != "":
DT_URL = dict(doc[1]).get("env-url")
DT_TOKEN = config(dict(doc[2]).get("env-token-name"), default='')
METRIC_SELECTOR = dict(doc[5]).get("metricSelector")
RESOLUTION = dict(doc[6]).get("resolution")
FROM_DATE= dict(doc[7]).get("fromDate")
TO_DATE= dict(doc[8]).get("toDate")
client = initDtClient(env, DT_URL, DT_TOKEN)
dashboards = getDashboards(env, client)
metric_queries = getDashboardsWithViewCount(env, client,
METRIC_SELECTOR,
RESOLUTION, FROM_DATE,
TO_DATE)
data = adaptDataStructure(dashboards, metric_queries)
result = evaluate(env, data)
# writeToExcel(env, t, result)
dict_dashboards[env] = result
list_environments.append(env)
repo = fetch_repository(config("REPOSITORY_URL"), config("REPOSITORY_PATH"))
list_branches = fetch_branches(repo)
for b in list_exclude_branches:
list_branches.remove(b)
# repo_ = checkout_master(repo)
repo_ = repo
wd = Path(repo_.git.working_dir)
# try:
# with open(Path("./dashboards.txt"), "a+", encoding="utf-8") as f:
for i, branch in enumerate(list_branches):
is_commit = False
repo_.git.checkout(branch)
logging.info("%d - branch: %s", i, str(branch))
files = glob.glob(str(wd) + '/**/dashboard/*.tf', recursive=True)
for file in files:
is_deleted = False
if os.path.basename(file) not in list_exclude_files:
# f.write("%s | %s\n" % (format_block(branch, 50), file))
iid, nname, oowner = check_metadata(file)
current_db = {"id": iid, "name" : nname ,"owner" : oowner}
is_deleted, environment = check_dashboard(branch, file,
current_db,
list_environments,
dict_dashboards)
if is_deleted == True:
is_commit = True
dt_url, dt_token = get_credentials(environment,
environments)
dt_client = initDtClient(dt_url, dt_token)
# I have not tested the deletion yet !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# delete_dashboard(dt_client, environment, current_db)
if is_commit == True:
git_push(repo_, "origin", branch, "Dashboard cleanup")
# ToDo Create Pull Request and Merge --> Manually or via Code?
# except Exception as e:
# print("FINAL Exception:", e)
# delete_dir(Path(config("REPOSITORY_PATH")))
logging.info("finished")

View File

@ -1,6 +0,0 @@
format_block("SCHSHSHSHSHSHSHSCHSHSHSHSHSHSH")
format_block("SCHSHSHSHSHSHSHSCHSHSHSHSHSHSHSCHSHSHSHSHSHSH")
format_block("SCHSHSHSHSHSHSHSCHSHSHSHSHSHSHSCHSHSHSHSHSHSHSCHSHSHSHSHSHSH")
format_block("SCHSHSHSHSHSHSHSCHSHSHSHSHSHSHSCHSHSHSHSHSHSHSCHSHSHSHSHSHSHSCHSHSHSHSHSHSH")
format_block("SCHSHSHSHSHSHSHSCHSHSHSHSHSHSHSCHSHSHSHSHSHSHSCHSHSHSHSHSHSHSCHSHSHSHSHSHSHSCHSHSHSHSHSHSH")