not tested yet and pull request via atlassian needs to be implemented
parent
95d6cbfdcb
commit
ff32507e77
|
|
@ -1,153 +1,153 @@
|
||||||
.vscode
|
.vscode
|
||||||
.idea
|
.idea
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
__pycache__/
|
__pycache__/
|
||||||
more_utils/__pycache__/
|
more_utils/__pycache__/
|
||||||
*.py[cod]
|
*.py[cod]
|
||||||
*$py.class
|
*$py.class
|
||||||
|
|
||||||
# C extensions
|
# C extensions
|
||||||
*.so
|
*.so
|
||||||
|
|
||||||
# Distribution / packaging
|
# Distribution / packaging
|
||||||
.Python
|
.Python
|
||||||
build/
|
build/
|
||||||
develop-eggs/
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
downloads/
|
downloads/
|
||||||
eggs/
|
eggs/
|
||||||
.eggs/
|
.eggs/
|
||||||
lib/
|
lib/
|
||||||
lib64/
|
lib64/
|
||||||
parts/
|
parts/
|
||||||
sdist/
|
sdist/
|
||||||
var/
|
var/
|
||||||
wheels/
|
wheels/
|
||||||
pip-wheel-metadata/
|
pip-wheel-metadata/
|
||||||
share/python-wheels/
|
share/python-wheels/
|
||||||
*.egg-info/
|
*.egg-info/
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
*.egg
|
*.egg
|
||||||
MANIFEST
|
MANIFEST
|
||||||
|
|
||||||
# PyInstaller
|
# PyInstaller
|
||||||
# Usually these files are written by a python script from a template
|
# Usually these files are written by a python script from a template
|
||||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
*.manifest
|
*.manifest
|
||||||
*.spec
|
*.spec
|
||||||
|
|
||||||
# Installer logs
|
# Installer logs
|
||||||
pip-log.txt
|
pip-log.txt
|
||||||
pip-delete-this-directory.txt
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
# Unit test / coverage reports
|
# Unit test / coverage reports
|
||||||
htmlcov/
|
htmlcov/
|
||||||
.tox/
|
.tox/
|
||||||
.nox/
|
.nox/
|
||||||
.coverage
|
.coverage
|
||||||
.coverage.*
|
.coverage.*
|
||||||
.cache
|
.cache
|
||||||
nosetests.xml
|
nosetests.xml
|
||||||
coverage.xml
|
coverage.xml
|
||||||
*.cover
|
*.cover
|
||||||
*.py,cover
|
*.py,cover
|
||||||
.hypothesis/
|
.hypothesis/
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
*.pot
|
*.pot
|
||||||
|
|
||||||
# Django stuff:
|
# Django stuff:
|
||||||
*.log
|
*.log
|
||||||
local_settings.py
|
local_settings.py
|
||||||
db.sqlite3
|
db.sqlite3
|
||||||
db.sqlite3-journal
|
db.sqlite3-journal
|
||||||
|
|
||||||
# Flask stuff:
|
# Flask stuff:
|
||||||
instance/
|
instance/
|
||||||
.webassets-cache
|
.webassets-cache
|
||||||
|
|
||||||
# Scrapy stuff:
|
# Scrapy stuff:
|
||||||
.scrapy
|
.scrapy
|
||||||
|
|
||||||
# Sphinx documentation
|
# Sphinx documentation
|
||||||
docs/_build/
|
docs/_build/
|
||||||
|
|
||||||
# PyBuilder
|
# PyBuilder
|
||||||
target/
|
target/
|
||||||
|
|
||||||
# Jupyter Notebook
|
# Jupyter Notebook
|
||||||
.ipynb_checkpoints
|
.ipynb_checkpoints
|
||||||
|
|
||||||
# IPython
|
# IPython
|
||||||
profile_default/
|
profile_default/
|
||||||
ipython_config.py
|
ipython_config.py
|
||||||
|
|
||||||
# pyenv
|
# pyenv
|
||||||
.python-version
|
.python-version
|
||||||
|
|
||||||
# pipenv
|
# pipenv
|
||||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
# install all needed dependencies.
|
# install all needed dependencies.
|
||||||
#Pipfile.lock
|
#Pipfile.lock
|
||||||
|
|
||||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||||
__pypackages__/
|
__pypackages__/
|
||||||
|
|
||||||
# Celery stuff
|
# Celery stuff
|
||||||
celerybeat-schedule
|
celerybeat-schedule
|
||||||
celerybeat.pid
|
celerybeat.pid
|
||||||
|
|
||||||
# SageMath parsed files
|
# SageMath parsed files
|
||||||
*.sage.py
|
*.sage.py
|
||||||
|
|
||||||
# Environments
|
# Environments
|
||||||
.env
|
.env
|
||||||
.venv
|
.venv
|
||||||
env/
|
env/
|
||||||
venv/
|
venv/
|
||||||
ENV/
|
ENV/
|
||||||
env.bak/
|
env.bak/
|
||||||
venv.bak/
|
venv.bak/
|
||||||
venv2/
|
venv2/
|
||||||
|
|
||||||
# Spyder project settings
|
# Spyder project settings
|
||||||
.spyderproject
|
.spyderproject
|
||||||
.spyproject
|
.spyproject
|
||||||
|
|
||||||
# Rope project settings
|
# Rope project settings
|
||||||
.ropeproject
|
.ropeproject
|
||||||
|
|
||||||
# mkdocs documentation
|
# mkdocs documentation
|
||||||
/site
|
/site
|
||||||
|
|
||||||
# mypy
|
# mypy
|
||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
.dmypy.json
|
.dmypy.json
|
||||||
dmypy.json
|
dmypy.json
|
||||||
|
|
||||||
# Pyre type checker
|
# Pyre type checker
|
||||||
.pyre/
|
.pyre/
|
||||||
|
|
||||||
### Terraform stuff
|
### Terraform stuff
|
||||||
**/.terraform/*
|
**/.terraform/*
|
||||||
crash.log
|
crash.log
|
||||||
*.tfvars
|
*.tfvars
|
||||||
|
|
||||||
#excel reports
|
#excel reports
|
||||||
*.xlsx
|
*.xlsx
|
||||||
*.csv
|
*.csv
|
||||||
|
|
||||||
# for dev
|
# for dev
|
||||||
slo_parameter.yaml
|
slo_parameter.yaml
|
||||||
metricexpressions.json
|
metricexpressions.json
|
||||||
*.bak
|
*.bak
|
||||||
*.json
|
*.json
|
||||||
failed_requests.txt
|
failed_requests.txt
|
||||||
|
|
||||||
# other
|
# other
|
||||||
*.txt
|
*.txt
|
||||||
log/
|
log/
|
||||||
120
environment.yaml
120
environment.yaml
|
|
@ -1,61 +1,61 @@
|
||||||
---
|
---
|
||||||
euprod-coco:
|
euprod-coco:
|
||||||
- name: "euprod"
|
- name: "euprod"
|
||||||
- env-url: "https://xxu26128.live.dynatrace.com"
|
- env-url: "https://xxu26128.live.dynatrace.com"
|
||||||
- env-token-name: "EUPROD_TOKEN_VAR"
|
- env-token-name: "EUPROD_TOKEN_VAR"
|
||||||
- jenkins: "https://jaws.bmwgroup.net/opapm/"
|
- jenkins: "https://jaws.bmwgroup.net/opapm/"
|
||||||
- type: "coco"
|
- type: "coco"
|
||||||
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
|
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
|
||||||
- resolution: "1M"
|
- resolution: "1M"
|
||||||
- fromDate: "now-6M"
|
- fromDate: "now-6M"
|
||||||
- toDate: "now"
|
- toDate: "now"
|
||||||
eupreprod-coco:
|
eupreprod-coco:
|
||||||
- name: "eupreprod"
|
- name: "eupreprod"
|
||||||
- env-url: "https://qqk70169.live.dynatrace.com"
|
- env-url: "https://qqk70169.live.dynatrace.com"
|
||||||
- env-token-name: "EUPREPROD_TOKEN_VAR"
|
- env-token-name: "EUPREPROD_TOKEN_VAR"
|
||||||
- jenkins: "https://jaws.bmwgroup.net/opapm/"
|
- jenkins: "https://jaws.bmwgroup.net/opapm/"
|
||||||
- type: "coco"
|
- type: "coco"
|
||||||
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
|
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
|
||||||
- resolution: "1M"
|
- resolution: "1M"
|
||||||
- fromDate: "now-6M"
|
- fromDate: "now-6M"
|
||||||
- toDate: "now"
|
- toDate: "now"
|
||||||
naprod-coco:
|
naprod-coco:
|
||||||
- name: "naprod"
|
- name: "naprod"
|
||||||
- env-url: "https://wgv50241.live.dynatrace.com"
|
- env-url: "https://wgv50241.live.dynatrace.com"
|
||||||
- env-token-name: "NAPROD_TOKEN_VAR"
|
- env-token-name: "NAPROD_TOKEN_VAR"
|
||||||
- jenkins: "https://jaws.bmwgroup.net/opapm/"
|
- jenkins: "https://jaws.bmwgroup.net/opapm/"
|
||||||
- type: "coco"
|
- type: "coco"
|
||||||
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
|
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
|
||||||
- resolution: "1M"
|
- resolution: "1M"
|
||||||
- fromDate: "now-6M"
|
- fromDate: "now-6M"
|
||||||
- toDate: "now"
|
- toDate: "now"
|
||||||
napreprod-coco:
|
napreprod-coco:
|
||||||
- name: "napreprod"
|
- name: "napreprod"
|
||||||
- env-url: "https://onb44935.live.dynatrace.com"
|
- env-url: "https://onb44935.live.dynatrace.com"
|
||||||
- env-token-name: "NAPREPROD_TOKEN_VAR"
|
- env-token-name: "NAPREPROD_TOKEN_VAR"
|
||||||
- jenkins: "https://jaws.bmwgroup.net/opapm/"
|
- jenkins: "https://jaws.bmwgroup.net/opapm/"
|
||||||
- type: "coco"
|
- type: "coco"
|
||||||
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
|
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
|
||||||
- resolution: "1M"
|
- resolution: "1M"
|
||||||
- fromDate: "now-6M"
|
- fromDate: "now-6M"
|
||||||
- toDate: "now"
|
- toDate: "now"
|
||||||
cnprod-coco:
|
cnprod-coco:
|
||||||
- name: "cnprod"
|
- name: "cnprod"
|
||||||
- env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
|
- env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
|
||||||
- env-token-name: "CNPROD_TOKEN_VAR"
|
- env-token-name: "CNPROD_TOKEN_VAR"
|
||||||
- jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
|
- jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
|
||||||
- type: "coco"
|
- type: "coco"
|
||||||
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
|
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
|
||||||
- resolution: "1M"
|
- resolution: "1M"
|
||||||
- fromDate: "now-6M"
|
- fromDate: "now-6M"
|
||||||
- toDate: "now"
|
- toDate: "now"
|
||||||
cnpreprod-coco:
|
cnpreprod-coco:
|
||||||
- name: "cnpreprod"
|
- name: "cnpreprod"
|
||||||
- env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35"
|
- env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35"
|
||||||
- env-token-name: "CNPREPROD_TOKEN_VAR"
|
- env-token-name: "CNPREPROD_TOKEN_VAR"
|
||||||
- jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
|
- jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
|
||||||
- type: "coco"
|
- type: "coco"
|
||||||
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
|
- metricSelector: "builtin:dashboards.viewCount:splitBy(id):sort(value(auto,ascending))"
|
||||||
- resolution: "1M"
|
- resolution: "1M"
|
||||||
- fromDate: "now-6M"
|
- fromDate: "now-6M"
|
||||||
- toDate: "now"
|
- toDate: "now"
|
||||||
757
main.py
757
main.py
|
|
@ -1,333 +1,424 @@
|
||||||
import copy
|
import copy
|
||||||
import git
|
import git
|
||||||
import glob
|
import glob
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import time
|
||||||
import time;
|
import yaml
|
||||||
import yaml
|
|
||||||
|
from decouple import config
|
||||||
from decouple import config
|
from dynatrace import Dynatrace
|
||||||
from dynatrace import Dynatrace
|
from pathlib import Path
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
t = time.strftime("%Y%m%d-%H%M%S")
|
||||||
t = time.strftime("%Y%m%d-%H%M%S")
|
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
|
||||||
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
|
MAX_RETRIES = 3
|
||||||
|
|
||||||
|
|
||||||
def check_metadata(file):
|
def git_push(repo, origin, branch, message):
|
||||||
with open(file, "r") as f:
|
|
||||||
lines = [next(f) for _ in range(2)]
|
for _ in range(MAX_RETRIES):
|
||||||
|
try:
|
||||||
if "LEGACY" in str(lines[1]):
|
repo.git.add(all=True)
|
||||||
# l_stripped = lines[1].strip().replace("\n", "").replace("#", "").strip().split(" ")[1].strip()
|
repo.git.commit("-m", message)
|
||||||
# l_replaced = l_stripped.replace("\n", "")
|
# set_origin = repo.remote(name=origin)
|
||||||
# l_rr = l_replaced.replace("#", "")
|
repo.git.push(origin, branch)
|
||||||
# l_s = l_rr.strip()
|
break
|
||||||
# l_splitted = l_s.split(" ")
|
except Exception as e:
|
||||||
# legacy_id = l_splitted[1].strip()
|
logging.info(e)
|
||||||
id = lines[1].strip().replace("\n", "").replace("#", "").strip().split(" ")[1].strip()
|
logging.info("retry attempt %d/%d" % (_+1, MAX_RETRIES))
|
||||||
else:
|
|
||||||
# stripped = lines[0].strip().replace("\n", "").split("=")[1].strip()
|
|
||||||
# replaced = stripped.replace("\n", "")
|
def delete_dashboard(dt_client, environment, current_db):
|
||||||
# splitted = replaced.split("=")
|
try:
|
||||||
# id = splitted[1].strip()
|
response = dt_client.dashboards.delete(str(current_db["id"]))
|
||||||
id = lines[0].strip().replace("\n", "").split("=")[1].strip()
|
logging.info("Deletion of dashbord %s (%s) in %s successful" %
|
||||||
|
(str(current_db["name"]), current_db["id"],
|
||||||
with open(file, "r") as f:
|
str(environment)))
|
||||||
num_lines = sum(1 for _ in f)
|
except Exception as e:
|
||||||
|
logging.info("During deletion of dashbaord the following exception has\
|
||||||
with open(file, "r") as f:
|
been encountered: %s", e)
|
||||||
lines = [next(f) for _ in range(int(num_lines))]
|
|
||||||
|
return response
|
||||||
for x, line in enumerate(lines):
|
|
||||||
if "dashboard_metadata {" in line:
|
|
||||||
metadata = lines[x:x+5]
|
def get_credentials(e, environment):
|
||||||
for md in metadata:
|
for env, doc in environment.items():
|
||||||
if "name" in md:
|
if str(e) == str(env):
|
||||||
name = md.strip().replace("\n", "").split("=")[1].strip().replace('"',"").strip()
|
DT_URL = dict(doc[1]).get("env-url")
|
||||||
if "owner" in md:
|
DT_TOKEN = config(dict(doc[2]).get("env-token-name"), default='')
|
||||||
owner = md.strip().replace("\n", "").split("=")[1].strip().replace('"',"").strip()
|
|
||||||
|
return [DT_URL, DT_TOKEN]
|
||||||
return [id, name, owner]
|
|
||||||
|
|
||||||
|
def delete_file(branch, file):
|
||||||
def format_block(string, max):
|
is_deleted = False
|
||||||
string_length = len(string)
|
|
||||||
string = (f'{" "*(max-string_length)}{string}')
|
try:
|
||||||
return string
|
os.remove(file)
|
||||||
|
with open(Path("./log_deleted_" + str(t) + ".txt"), "a+",
|
||||||
|
encoding="utf-8") as f:
|
||||||
def onerror(func, path, exc_info):
|
f.write("File on branch %s in %s has been deleted\n" % (branch,
|
||||||
"""
|
file))
|
||||||
Error handler for ``shutil.rmtree``.
|
logging.debug("File on branch %s in %s has been deleted" % (branch,
|
||||||
|
file))
|
||||||
If the error is due to an access error (read only file)
|
is_deleted = True
|
||||||
it attempts to add write permission and then retries.
|
except OSError as e:
|
||||||
|
logging.info("Error: %s - %s." % (e.filename, e.strerror))
|
||||||
If the error is for another reason it re-raises the error.
|
is_deleted = False
|
||||||
|
|
||||||
Usage : ``shutil.rmtree(path, onerror=onerror)``
|
return is_deleted
|
||||||
"""
|
|
||||||
import stat
|
# delete based only by id! if there is no id, delete not possible!
|
||||||
# Is the error an access error?
|
def check_dashboard(branch, file, current_db, list_environments,
|
||||||
if not os.access(path, os.W_OK):
|
dict_dashboards):
|
||||||
os.chmod(path, stat.S_IWUSR)
|
is_deleted = False
|
||||||
func(path)
|
is_stop = False
|
||||||
else:
|
|
||||||
raise
|
for e in list_environments:
|
||||||
|
for k, v in dict_dashboards[e]["obsolete"].items():
|
||||||
|
if current_db["id"] == v["id"]:
|
||||||
def delete_directory(path):
|
is_stop = True
|
||||||
logging.info("cleaning up...")
|
logging.debug("Obsolete dashboard on branch %s in %s",
|
||||||
try:
|
str(branch), str(file))
|
||||||
shutil.rmtree(path, onerror=onerror)
|
# return True
|
||||||
logging.info("%s directory successfully deleted", str(path))
|
is_deleted = delete_file(branch, Path(file))
|
||||||
except OSError as e:
|
break
|
||||||
print("Error: %s - %s." % (e.filename, e.strerror))
|
if is_stop == True:
|
||||||
|
break
|
||||||
|
|
||||||
def checkout_master(repo):
|
return is_deleted, e
|
||||||
logging.info("master branch name is: %s", str(repo.heads.master.name))
|
|
||||||
logging.info("checking active branch ...")
|
|
||||||
if repo.active_branch.name != repo.heads.master.name:
|
def check_metadata(file):
|
||||||
logging.info("active branch name is: %s", str(repo.active_branch.name))
|
id = None
|
||||||
if repo.active_branch.is_detached:
|
name = None
|
||||||
logging.info("active branch (%s) is detached: %s",
|
owner = None
|
||||||
str(repo.active_branch.name),
|
|
||||||
str(repo.active_branch.is_detached))
|
with open(file, "r") as f:
|
||||||
logging.info("checking out master...")
|
lines = [next(f) for _ in range(2)]
|
||||||
repo.git.checkout("master")
|
|
||||||
logging.info("checkout to master successful")
|
if "LEGACY" in str(lines[1]):
|
||||||
logging.info("active branch is %s and is detached: %s",
|
id = lines[1].strip().replace("\n", "").replace("#", "").strip()\
|
||||||
str(repo.active_branch.name),
|
.split(" ")[1].strip()
|
||||||
str(repo.active_branch.is_detached))
|
elif "ID" in str(lines[0]):
|
||||||
else:
|
id = lines[0].strip().replace("\n", "").replace("#", "").strip()\
|
||||||
# repo.heads.master.checkout()
|
.split(" ")[1].strip()
|
||||||
logging.info("active branch (%s) is detached: %s",
|
elif "DEFINE" in str(lines[0]):
|
||||||
str(repo.active_branch.name),
|
id = lines[0].strip().replace("\n", "").split("=")[1].strip()
|
||||||
str(repo.active_branch.is_detached))
|
else:
|
||||||
logging.info("checking out master...")
|
id = None
|
||||||
repo.git.checkout("master")
|
|
||||||
logging.info("checkout to master successful")
|
with open(file, "r") as f:
|
||||||
logging.info("active branch is %s and is detached: %s",
|
num_lines = sum(1 for _ in f)
|
||||||
str(repo.active_branch.name),
|
|
||||||
str(repo.active_branch.is_detached))
|
with open(file, "r") as f:
|
||||||
else:
|
lines = [next(f) for _ in range(int(num_lines))]
|
||||||
logging.info("active branch is already master (%s) and is detached: %s",
|
|
||||||
str(repo.active_branch.name),
|
for x, line in enumerate(lines):
|
||||||
str(repo.active_branch.is_detached))
|
if "dashboard_metadata {" in line:
|
||||||
|
metadata = lines[x:x+5]
|
||||||
return repo
|
for md in metadata:
|
||||||
|
if "name" in md:
|
||||||
|
name = md.strip().replace("\n", "").split("=")[1].strip()\
|
||||||
def fetch_branches(repo):
|
.replace('"',"").strip()
|
||||||
logging.info("fetching branches...")
|
if "owner" in md:
|
||||||
# branches = [repo.git.branch("-r").replace("origin/", "").split("\n ")]
|
owner = md.strip().replace("\n", "").split("=")[1].strip()\
|
||||||
# branches = repo.remotes.origin.fetch()
|
.replace('"',"").strip()
|
||||||
branch_list = [r.remote_head for r in repo.remote().refs]
|
elif "dashboardName" in line:
|
||||||
|
name = line.strip().replace("\n", "").split("=")[1].strip()\
|
||||||
return branch_list
|
.replace('"',"").strip()
|
||||||
|
owner = None
|
||||||
|
else:
|
||||||
def fetch_repository(REPOSITORY_URL, REPOSITORY_PATH):
|
name = None
|
||||||
# logging.info("fetching repository %s", str(REPOSITORY_URL))
|
owner = None
|
||||||
# repo = git.Repo.clone_from(REPOSITORY_URL,
|
|
||||||
# Path("../coco_apm_terraform_onboarding"))
|
return [id, name, owner]
|
||||||
logging.info("repository path %s", str(REPOSITORY_PATH))
|
|
||||||
repo = git.Repo(Path(REPOSITORY_PATH))
|
|
||||||
|
def format_block(string, max):
|
||||||
return repo
|
string_length = len(string)
|
||||||
|
string = (f'{" "*(max-string_length)}{string}')
|
||||||
|
|
||||||
def writeToExcel(env, t, result):
|
return string
|
||||||
list_available = []
|
|
||||||
list_legacy = []
|
|
||||||
list_obsolete = []
|
def onerror(func, path, exc_info):
|
||||||
|
"""
|
||||||
for type in ["available", "legacy", "obsolete"]:
|
Error handler for ``shutil.rmtree``.
|
||||||
for i, (ki, vi) in enumerate(result[type].items()):
|
|
||||||
if type == "available":
|
If the error is due to an access error (read only file)
|
||||||
list_available.append([vi["id"], vi["name"], vi["owner"]])
|
it attempts to add write permission and then retries.
|
||||||
if type == "legacy":
|
|
||||||
list_legacy.append([vi["id"], vi["name"], vi["owner"]])
|
If the error is for another reason it re-raises the error.
|
||||||
if type == "obsolete":
|
|
||||||
list_obsolete.append([vi["id"], vi["name"], vi["owner"]])
|
Usage : ``shutil.rmtree(path, onerror=onerror)``
|
||||||
|
"""
|
||||||
df_available = pd.DataFrame(list_available, columns=['id', 'name', 'owner'])
|
import stat
|
||||||
df_legacy = pd.DataFrame(list_legacy, columns=['id', 'name', 'owner'])
|
# Is the error an access error?
|
||||||
df_obsolete = pd.DataFrame(list_obsolete, columns=['id', 'name', 'owner'])
|
if not os.access(path, os.W_OK):
|
||||||
|
os.chmod(path, stat.S_IWUSR)
|
||||||
filename = os.path.join(".\log",
|
func(path)
|
||||||
str(t) + "_" + str(env) + '_dashboards.xlsx')
|
else:
|
||||||
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
raise
|
||||||
|
|
||||||
with pd.ExcelWriter(filename) as writer:
|
|
||||||
df_available.to_excel(writer, sheet_name='available')
|
def delete_dir(path):
|
||||||
df_legacy.to_excel(writer, sheet_name='legacy')
|
logging.info("cleaning up...")
|
||||||
df_obsolete.to_excel(writer, sheet_name='obsolete')
|
try:
|
||||||
|
shutil.rmtree(path, onerror=onerror)
|
||||||
|
logging.info("%s successfully deleted", str(path))
|
||||||
def evaluate(env, data):
|
except OSError as e:
|
||||||
legacy = {}
|
logging.info("Error: %s - %s." % (e.filename, e.strerror))
|
||||||
available = {}
|
|
||||||
obsolete = {}
|
|
||||||
dict_dashboards = data[0]
|
def checkout_master(repo):
|
||||||
list_dashboard_ids = data[1]
|
logging.info("master branch name is: %s", str(repo.heads.master.name))
|
||||||
dict_metric_queries = data[2]
|
logging.info("checking active branch ...")
|
||||||
list_metric_query_ids = data[3]
|
if repo.active_branch.name != repo.heads.master.name:
|
||||||
dict_metric_queries_copy = copy.deepcopy(dict_metric_queries)
|
logging.info("active branch name is: %s", str(repo.active_branch.name))
|
||||||
list_metric_query_copy_ids = copy.deepcopy(list_metric_query_ids)
|
if repo.active_branch.is_detached:
|
||||||
|
logging.info("active branch (%s) is detached: %s",
|
||||||
|
str(repo.active_branch.name),
|
||||||
for x, (m, metric_query) in enumerate(dict_metric_queries.items()):
|
str(repo.active_branch.is_detached))
|
||||||
if metric_query["id"] not in list_dashboard_ids:
|
logging.info("checking out master...")
|
||||||
legacy[x] = {"id" : metric_query["id"],
|
repo.git.checkout("master")
|
||||||
"name" : metric_query["name"],
|
logging.info("checkout to master successful")
|
||||||
"owner" : metric_query["owner"]}
|
logging.info("active branch is %s and is detached: %s",
|
||||||
del dict_metric_queries_copy[m]
|
str(repo.active_branch.name),
|
||||||
list_metric_query_copy_ids.remove(metric_query["id"])
|
str(repo.active_branch.is_detached))
|
||||||
logging.debug("%s %s have been deleted in the past", str(env), len(legacy))
|
else:
|
||||||
logging.debug("%s %s dashboards with viewCount and active", str(env),
|
# repo.heads.master.checkout()
|
||||||
len(dict_metric_queries_copy))
|
logging.info("active branch (%s) is detached: %s",
|
||||||
|
str(repo.active_branch.name),
|
||||||
for i, (d, dashboard) in enumerate(dict_dashboards.items()):
|
str(repo.active_branch.is_detached))
|
||||||
if dashboard["id"] in list_metric_query_copy_ids:
|
logging.info("checking out master...")
|
||||||
available[i] = dashboard
|
repo.git.checkout("master")
|
||||||
if dashboard["id"] not in list_metric_query_copy_ids:
|
logging.info("checkout to master successful")
|
||||||
obsolete[i] = dashboard
|
logging.info("active branch is %s and is detached: %s",
|
||||||
logging.info("%s %s dashboards with viewCount!", str(env), len(available))
|
str(repo.active_branch.name),
|
||||||
logging.info("%s %s dashboards with 0 viewCount!", str(env), len(obsolete))
|
str(repo.active_branch.is_detached))
|
||||||
|
else:
|
||||||
return {"available" : available, "legacy" : legacy, "obsolete" : obsolete}
|
logging.info("active branch is already master (%s) and is detached: %s",
|
||||||
|
str(repo.active_branch.name),
|
||||||
|
str(repo.active_branch.is_detached))
|
||||||
def adaptDataStructure(dashboards, metric_queries):
|
|
||||||
dict_dashboards= {}
|
return repo
|
||||||
list_dashboard_ids = []
|
|
||||||
dict_metric_queries = {}
|
|
||||||
list_metric_query_ids = []
|
def fetch_branches(repo):
|
||||||
|
logging.info("fetching branches...")
|
||||||
for s, stub in enumerate(getattr(dashboards, "_PaginatedList__elements")):
|
# branches = [repo.git.branch("-r").replace("origin/", "").split("\n ")]
|
||||||
dict_dashboards[s] = {"id" : getattr(stub, "id"),
|
# branches = repo.remotes.origin.fetch()
|
||||||
"name" : getattr(stub, "name"),
|
branch_list = [r.remote_head for r in repo.remote().refs]
|
||||||
"owner" : getattr(stub, "owner")}
|
|
||||||
list_dashboard_ids.append(getattr(stub, "id"))
|
return branch_list
|
||||||
|
|
||||||
for collection in getattr(metric_queries, "_PaginatedList__elements"):
|
|
||||||
for m, q in enumerate(getattr(collection, "data")):
|
def fetch_repository(REPOSITORY_URL, REPOSITORY_PATH):
|
||||||
dict_metric_queries[m] = {"id" : getattr(q, "dimension_map")["id"],
|
# logging.info("fetching repository %s", str(REPOSITORY_URL))
|
||||||
"name" : None,
|
# repo = git.Repo.clone_from(REPOSITORY_URL,
|
||||||
"owner" : None}
|
# Path("../coco_apm_terraform_onboarding"))
|
||||||
list_metric_query_ids.append(getattr(q, "dimension_map")["id"])
|
logging.info("repository path %s", str(REPOSITORY_PATH))
|
||||||
|
repo = git.Repo(Path(REPOSITORY_PATH))
|
||||||
return [dict_dashboards, list_dashboard_ids, dict_metric_queries,
|
|
||||||
list_metric_query_ids]
|
return repo
|
||||||
|
|
||||||
|
|
||||||
def getDashboardsWithViewCount(env, client, METRIC_SELECTOR, RESOLUTION,
|
def writeToExcel(env, t, result):
|
||||||
FROM_DATE, TO_DATE):
|
list_available = []
|
||||||
logging.debug("%s get dashboards with viewCount, resolution %s ...",
|
list_legacy = []
|
||||||
str(env), RESOLUTION)
|
list_obsolete = []
|
||||||
metric_query = client.metrics.query(METRIC_SELECTOR, RESOLUTION, FROM_DATE,
|
|
||||||
TO_DATE)
|
for type in ["available", "legacy", "obsolete"]:
|
||||||
n_metric_query = getattr(metric_query, "_PaginatedList__total_count")
|
for i, (ki, vi) in enumerate(result[type].items()):
|
||||||
logging.debug("%s %s dashboards with viewCount and older than 6 Months",
|
if type == "available":
|
||||||
str(env), str(n_metric_query))
|
list_available.append([vi["id"], vi["name"], vi["owner"]])
|
||||||
|
if type == "legacy":
|
||||||
return metric_query
|
list_legacy.append([vi["id"], vi["name"], vi["owner"]])
|
||||||
|
if type == "obsolete":
|
||||||
|
list_obsolete.append([vi["id"], vi["name"], vi["owner"]])
|
||||||
def getDashboards(env, client):
|
|
||||||
logging.debug("%s get all dashboards...", str(env))
|
df_available = pd.DataFrame(list_available, columns=['id', 'name', 'owner'])
|
||||||
dashboards = client.dashboards.list(owner=None, tags=None)
|
df_legacy = pd.DataFrame(list_legacy, columns=['id', 'name', 'owner'])
|
||||||
n_dashboards = getattr(dashboards, "_PaginatedList__total_count")
|
df_obsolete = pd.DataFrame(list_obsolete, columns=['id', 'name', 'owner'])
|
||||||
logging.info("%s %s total dashboards", str(env), str(n_dashboards))
|
|
||||||
|
filename = os.path.join(".\log",
|
||||||
return dashboards
|
str(t) + "_" + str(env) + '_dashboards.xlsx')
|
||||||
|
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
||||||
|
|
||||||
def initDtClient(env, DT_URL, DT_TOKEN):
|
with pd.ExcelWriter(filename) as writer:
|
||||||
logging.debug("%s init Dynatrace client...", str(env))
|
df_available.to_excel(writer, sheet_name='available')
|
||||||
DT_CLIENT = Dynatrace(DT_URL, DT_TOKEN, logging.Logger("ERROR"), None, None,
|
df_legacy.to_excel(writer, sheet_name='legacy')
|
||||||
0, 10*1000)
|
df_obsolete.to_excel(writer, sheet_name='obsolete')
|
||||||
return DT_CLIENT
|
|
||||||
|
|
||||||
|
def evaluate(env, data):
|
||||||
if __name__ == "__main__":
|
legacy = {}
|
||||||
dictionary_dashboards = {}
|
available = {}
|
||||||
list_environments = []
|
obsolete = {}
|
||||||
|
dict_dashboards = data[0]
|
||||||
# do it manually for CD_TS-CMS
|
list_dashboard_ids = data[1]
|
||||||
list_exclude_branches = ["HEAD", "master", "template", "CD_TS-CMS"]
|
dict_metric_queries = data[2]
|
||||||
list_exclude_files = ["providers.tf", "data_source.tf"]
|
list_metric_query_ids = data[3]
|
||||||
|
dict_metric_queries_copy = copy.deepcopy(dict_metric_queries)
|
||||||
with open(Path("./environment.yaml")) as env_cfg:
|
list_metric_query_copy_ids = copy.deepcopy(list_metric_query_ids)
|
||||||
environment = yaml.safe_load(env_cfg)
|
|
||||||
|
for x, (m, metric_query) in enumerate(dict_metric_queries.items()):
|
||||||
for env, doc in environment.items():
|
if metric_query["id"] not in list_dashboard_ids:
|
||||||
logging.debug("%s checking token...", str(env))
|
legacy[x] = {"id" : metric_query["id"],
|
||||||
|
"name" : metric_query["name"],
|
||||||
if config(dict(doc[2]).get("env-token-name"), default='') != "":
|
"owner" : metric_query["owner"]}
|
||||||
DT_URL = dict(doc[1]).get("env-url")
|
del dict_metric_queries_copy[m]
|
||||||
DT_TOKEN = config(dict(doc[2]).get("env-token-name"), default='')
|
list_metric_query_copy_ids.remove(metric_query["id"])
|
||||||
METRIC_SELECTOR = dict(doc[5]).get("metricSelector")
|
logging.debug("%s %s have been deleted in the past", str(env), len(legacy))
|
||||||
RESOLUTION = dict(doc[6]).get("resolution")
|
logging.debug("%s %s dashboards with viewCount and active", str(env),
|
||||||
FROM_DATE= dict(doc[7]).get("fromDate")
|
len(dict_metric_queries_copy))
|
||||||
TO_DATE= dict(doc[8]).get("toDate")
|
|
||||||
|
for i, (d, dashboard) in enumerate(dict_dashboards.items()):
|
||||||
client = initDtClient(env, DT_URL, DT_TOKEN)
|
if dashboard["id"] in list_metric_query_copy_ids:
|
||||||
dashboards = getDashboards(env, client)
|
available[i] = dashboard
|
||||||
metric_queries = getDashboardsWithViewCount(env, client,
|
if dashboard["id"] not in list_metric_query_copy_ids:
|
||||||
METRIC_SELECTOR,
|
obsolete[i] = dashboard
|
||||||
RESOLUTION, FROM_DATE,
|
logging.info("%s %s dashboards with viewCount!", str(env), len(available))
|
||||||
TO_DATE)
|
logging.info("%s %s dashboards with 0 viewCount!", str(env), len(obsolete))
|
||||||
data = adaptDataStructure(dashboards, metric_queries)
|
|
||||||
result = evaluate(env, data)
|
return {"available" : available, "legacy" : legacy, "obsolete" : obsolete}
|
||||||
# writeToExcel(env, t, result)
|
|
||||||
|
|
||||||
dictionary_dashboards[env] = result
|
def adaptDataStructure(dashboards, metric_queries):
|
||||||
list_environments.append(env)
|
dict_dashboards= {}
|
||||||
|
list_dashboard_ids = []
|
||||||
repo = fetch_repository(config("REPOSITORY_URL"), config("REPOSITORY_PATH"))
|
dict_metric_queries = {}
|
||||||
list_branches = fetch_branches(repo)
|
list_metric_query_ids = []
|
||||||
|
|
||||||
for b in list_exclude_branches:
|
for s, stub in enumerate(getattr(dashboards, "_PaginatedList__elements")):
|
||||||
list_branches.remove(b)
|
dict_dashboards[s] = {"id" : getattr(stub, "id"),
|
||||||
|
"name" : getattr(stub, "name"),
|
||||||
# repo_ = checkout_master(repo)
|
"owner" : getattr(stub, "owner")}
|
||||||
repo_ = repo
|
list_dashboard_ids.append(getattr(stub, "id"))
|
||||||
wd = Path(repo_.git.working_dir)
|
|
||||||
|
for collection in getattr(metric_queries, "_PaginatedList__elements"):
|
||||||
try:
|
for m, q in enumerate(getattr(collection, "data")):
|
||||||
# with open(Path("./dashboards.txt"), "a+", encoding="utf-8") as f:
|
dict_metric_queries[m] = {"id" : getattr(q, "dimension_map")["id"],
|
||||||
for i, branch in enumerate(list_branches):
|
"name" : None,
|
||||||
repo_.git.checkout(branch)
|
"owner" : None}
|
||||||
logging.info("%d - branch: %s", i, str(branch))
|
list_metric_query_ids.append(getattr(q, "dimension_map")["id"])
|
||||||
for file in glob.glob(str(wd) + '/**/dashboard/*.tf', recursive=True):
|
|
||||||
if os.path.basename(file) not in list_exclude_files:
|
return [dict_dashboards, list_dashboard_ids, dict_metric_queries,
|
||||||
# f.write("%s | %s\n" % (format_block(branch, 50), file))
|
list_metric_query_ids]
|
||||||
id, name, owner = check_metadata(file)
|
|
||||||
current_db = {"id": id, "name" : name ,"owner" : owner}
|
|
||||||
for e in list_environments:
|
def getDashboardsWithViewCount(env, client, METRIC_SELECTOR, RESOLUTION,
|
||||||
for k, v in dictionary_dashboards[e]["obsolete"].items():
|
FROM_DATE, TO_DATE):
|
||||||
if current_db == v:
|
logging.debug("%s get dashboards with viewCount, resolution %s ...",
|
||||||
print(current_db)
|
str(env), RESOLUTION)
|
||||||
print(v)
|
metric_query = client.metrics.query(METRIC_SELECTOR, RESOLUTION, FROM_DATE,
|
||||||
print("DELETING", "BRANCH:", str(branch), "FILE:", file)
|
TO_DATE)
|
||||||
print("")
|
n_metric_query = getattr(metric_query, "_PaginatedList__total_count")
|
||||||
else:
|
logging.debug("%s %s dashboards with viewCount and older than 6 Months",
|
||||||
print(current_db)
|
str(env), str(n_metric_query))
|
||||||
print(v)
|
|
||||||
print("")
|
return metric_query
|
||||||
|
|
||||||
|
|
||||||
except Exception as e:
|
def getDashboards(env, client):
|
||||||
print("Exception:", e)
|
logging.debug("%s get all dashboards...", str(env))
|
||||||
|
dashboards = client.dashboards.list(owner=None, tags=None)
|
||||||
# delete_directory(Path(config("REPOSITORY_PATH")))
|
n_dashboards = getattr(dashboards, "_PaginatedList__total_count")
|
||||||
|
logging.info("%s %s total dashboards", str(env), str(n_dashboards))
|
||||||
logging.info("finished")
|
|
||||||
|
return dashboards
|
||||||
|
|
||||||
|
|
||||||
|
def initDtClient(env, DT_URL, DT_TOKEN):
|
||||||
|
logging.debug("%s init Dynatrace client...", str(env))
|
||||||
|
DT_CLIENT = Dynatrace(DT_URL, DT_TOKEN, logging.Logger("ERROR"), None, None,
|
||||||
|
0, 10*1000)
|
||||||
|
|
||||||
|
return DT_CLIENT
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
dict_dashboards = {}
|
||||||
|
list_environments = []
|
||||||
|
|
||||||
|
|
||||||
|
# do it manually for CD_TS-CMS
|
||||||
|
list_exclude_branches = ["HEAD", "master", "template", "CD_TS-CMS"]
|
||||||
|
list_exclude_files = ["providers.tf", "data_source.tf"]
|
||||||
|
|
||||||
|
with open(Path("./environment.yaml")) as env_cfg:
|
||||||
|
environments = yaml.safe_load(env_cfg)
|
||||||
|
|
||||||
|
for env, doc in environments.items():
|
||||||
|
logging.debug("%s checking token...", str(env))
|
||||||
|
|
||||||
|
if config(dict(doc[2]).get("env-token-name"), default='') != "":
|
||||||
|
DT_URL = dict(doc[1]).get("env-url")
|
||||||
|
DT_TOKEN = config(dict(doc[2]).get("env-token-name"), default='')
|
||||||
|
METRIC_SELECTOR = dict(doc[5]).get("metricSelector")
|
||||||
|
RESOLUTION = dict(doc[6]).get("resolution")
|
||||||
|
FROM_DATE= dict(doc[7]).get("fromDate")
|
||||||
|
TO_DATE= dict(doc[8]).get("toDate")
|
||||||
|
|
||||||
|
client = initDtClient(env, DT_URL, DT_TOKEN)
|
||||||
|
dashboards = getDashboards(env, client)
|
||||||
|
metric_queries = getDashboardsWithViewCount(env, client,
|
||||||
|
METRIC_SELECTOR,
|
||||||
|
RESOLUTION, FROM_DATE,
|
||||||
|
TO_DATE)
|
||||||
|
data = adaptDataStructure(dashboards, metric_queries)
|
||||||
|
result = evaluate(env, data)
|
||||||
|
# writeToExcel(env, t, result)
|
||||||
|
|
||||||
|
dict_dashboards[env] = result
|
||||||
|
list_environments.append(env)
|
||||||
|
|
||||||
|
repo = fetch_repository(config("REPOSITORY_URL"), config("REPOSITORY_PATH"))
|
||||||
|
list_branches = fetch_branches(repo)
|
||||||
|
|
||||||
|
for b in list_exclude_branches:
|
||||||
|
list_branches.remove(b)
|
||||||
|
|
||||||
|
# repo_ = checkout_master(repo)
|
||||||
|
repo_ = repo
|
||||||
|
wd = Path(repo_.git.working_dir)
|
||||||
|
|
||||||
|
# try:
|
||||||
|
# with open(Path("./dashboards.txt"), "a+", encoding="utf-8") as f:
|
||||||
|
for i, branch in enumerate(list_branches):
|
||||||
|
is_commit = False
|
||||||
|
repo_.git.checkout(branch)
|
||||||
|
logging.info("%d - branch: %s", i, str(branch))
|
||||||
|
files = glob.glob(str(wd) + '/**/dashboard/*.tf', recursive=True)
|
||||||
|
for file in files:
|
||||||
|
is_deleted = False
|
||||||
|
if os.path.basename(file) not in list_exclude_files:
|
||||||
|
# f.write("%s | %s\n" % (format_block(branch, 50), file))
|
||||||
|
iid, nname, oowner = check_metadata(file)
|
||||||
|
current_db = {"id": iid, "name" : nname ,"owner" : oowner}
|
||||||
|
is_deleted, environment = check_dashboard(branch, file,
|
||||||
|
current_db,
|
||||||
|
list_environments,
|
||||||
|
dict_dashboards)
|
||||||
|
if is_deleted == True:
|
||||||
|
is_commit = True
|
||||||
|
dt_url, dt_token = get_credentials(environment,
|
||||||
|
environments)
|
||||||
|
dt_client = initDtClient(dt_url, dt_token)
|
||||||
|
# I have not tested the deletion yet !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
# delete_dashboard(dt_client, environment, current_db)
|
||||||
|
|
||||||
|
if is_commit == True:
|
||||||
|
git_push(repo_, "origin", branch, "Dashboard cleanup")
|
||||||
|
|
||||||
|
# except Exception as e:
|
||||||
|
# print("FINAL Exception:", e)
|
||||||
|
|
||||||
|
# delete_dir(Path(config("REPOSITORY_PATH")))
|
||||||
|
|
||||||
|
logging.info("finished")
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue