Initial commit

master
Patryk Gudalewicz 2022-10-27 13:06:35 +02:00
commit 8fb5af1285
6 changed files with 2132 additions and 0 deletions

143
.gitignore vendored Normal file
View File

@ -0,0 +1,143 @@
dashboard_tiles_*
\[STAGING\]*
<<<<<<< HEAD
shared_configuration/
archive/
=======
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
### Terraform stuff
**/.terraform/*
crash.log
*.tfvars
>>>>>>> 746e496e7a7c5e8134cda7921311f6a9ba22f8d3

129
createDash.py Normal file
View File

@ -0,0 +1,129 @@
import yaml
from decouple import config
import json
import argparse
import requests
from datetime import datetime
from git import Repo
import os
DASHBOARD_NAME = " - Kubernetes cluster overview"
parser = argparse.ArgumentParser(description="Generate and deploy the Kubernetes Overview Dashboard as Code.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-C", "--cluster", type=str,required=True, help="Name of the Kubernetes cluster")
parser.add_argument('--remove', default=False, action='store_true', help="Remove dashboard for given cluster. If not specified dashboard will be created or updated")
parser.add_argument('-E', '--environment', type=str, help="Name of the environment (the same as in environment.yaml file. Used to upload dashboard to specific Dynatrace environment. If not specified all environments in file will be used")
args = parser.parse_args()
def make_request(url, DTAPIToken,verify, method, jsondata):
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
try:
if method == "get":
response = requests.get(url, headers=headers,verify=verify)
elif method == "post":
response = requests.post(url, headers=headers,verify=verify, data=jsondata)
elif method == "put":
response = requests.put(url, headers=headers,verify=verify, data=jsondata)
elif method == "delete":
response = requests.delete(url, headers=headers,verify=verify)
response.raise_for_status()
except requests.exceptions.HTTPError as errh:
return "An Http Error occurred:" + repr(errh)
except requests.exceptions.ConnectionError as errc:
return "An Error Connecting to the API occurred:" + repr(errc)
except requests.exceptions.Timeout as errt:
return "A Timeout Error occurred:" + repr(errt)
except requests.exceptions.RequestException as err:
return "An Unknown Error occurred" + repr(err)
return response
def get_all_dashboards_withname(DTAPIToken, DTENV,name):
DTAPIURL= DTENV + "api/config/v1/dashboards"
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
print(r)
entityResponse = r.json()
result = []
if("dashboards" in entityResponse):
for dashboard in entityResponse["dashboards"]:
if(dashboard["name"]).startswith(name):
result.append(dashboard)
result = sorted(result, key=lambda x : x['name'], reverse=False)
return result
def remove_dashboards(DTAPIToken, DTENV, dashboards):
for dashboard in dashboards:
print("Removing dashboard from Dynatrace: "+dashboard["name"])
DTAPIURL = DTENV + "api/config/v1/dashboards/" + dashboard["id"]
print(make_request(DTAPIURL,DTAPIToken,True,"delete",None))
def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, templatename, dashname):
with open('./'+templatename) as file:
data = file.read()
data = data.replace("<CLUSTERNAME>", args.cluster)
tilesjson = json.loads(data)
if tilesjson:
if any(dashboard["name"] == dashname for dashboard in dashboards):
existingdashboard = next((dashboard for dashboard in dashboards if dashboard["name"] == dashname), None)
if existingdashboard:
print("Found dashboard, Name: "+ existingdashboard["name"])
DTAPIURL = DTENV + "api/config/v1/dashboards/" + existingdashboard["id"]
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
entityResponse = r.json()
entityResponse["tiles"] = tilesjson
print("Updating dashboard: "+entityResponse["dashboardMetadata"]["name"])
print(make_request(DTAPIURL,DTAPIToken,True,"put",json.dumps(entityResponse)))
else:
newdashboard = {
"dashboardMetadata":{
"name": dashname,
"owner": config("DASHBOARD_OWNER"),
"tags": ["Kubernetes"],
"preset": 'true',
"shared":'true'
},
"tiles":[]
}
DTAPIURL = DTENV + "api/config/v1/dashboards"
newdashboard["tiles"] = tilesjson
print("Creating dashboard: "+newdashboard["dashboardMetadata"]["name"])
print(make_request(DTAPIURL,DTAPIToken,True,"post",json.dumps(newdashboard)))
def main(slo_path):
print("Generating dashboard...")
if args.cluster:
FULL_DASHBOARD_NAME = args.cluster + DASHBOARD_NAME
print("Getting existing dashboards from Dynatrace")
with open('./environment.yaml') as file:
doc = yaml.safe_load(file)
for item, doc in doc.items():
token = dict(doc[2])
url = dict(doc[1])
print("Crawling through: " + item)
print("Gather data, hold on a minute")
DTTOKEN = config(token.get('env-token-name'))
DTURL = url.get('env-url')
existingdashboards = get_all_dashboards_withname(DTTOKEN, DTURL,FULL_DASHBOARD_NAME)
if not args.environment:
if not args.remove:
print("Uploading dashboard to Dynatrace...")
create_or_update_dashboard(DTTOKEN, DTURL, existingdashboards, "kubernetes_tiles_template.json", FULL_DASHBOARD_NAME)
else:
remove_dashboards(DTTOKEN, DTURL, existingdashboards)
else:
if args.environment == item:
if not args.remove:
print("Uploading dashboard to Dynatrace ("+item+")...")
create_or_update_dashboard(DTTOKEN, DTURL, existingdashboards, "kubernetes_tiles_template.json", FULL_DASHBOARD_NAME)
break
else:
remove_dashboards(DTTOKEN, DTURL, existingdashboards)
else:
print("ERROR: No cluster specified")
if __name__ == "__main__":
main('./shared_configuration/slo_parameter.yaml')

4
environment.yaml Normal file
View File

@ -0,0 +1,4 @@
eupreprod:
- name: "eupreprod"
- env-url: "https://qqk70169.live.dynatrace.com/"
- env-token-name: "EMEA_PREPROD_CONFIG_TOKEN"

File diff suppressed because it is too large Load Diff

57
readme.md Normal file
View File

@ -0,0 +1,57 @@
# Kubernetes dashboard as a code
This repository holds the code to generate the Kubernetes overwiew Dashboard as Code.
Dashboard is generated for specific cluster provided as parameter for the script.
Dashboard naming convention is: "[PROVIDED CLUSTER NAME] - Kubernetes cluster overview"
# Prerequisites
## Python packages
Before executing scripts, python requirements have to be satisfied. To do so, execute following command:
pip install -r requirements.txt
## .env file
To provide authentication for API calls, create ".env" file in the script directory with following definition:
<ENV NAME>=<ENV TOKEN>
<ENV NAME> is name of environment variable. This name should be passed to "environment.yaml" file as "env-token-name" parameter
Example:
environment.yaml file: "- env-token-name: "GLOBAL_CONFIG_TOKEN"
.env file: "GLOBAL_CONFIG_TOKEN=XXXXXXXXXXX"
# Usage
usage: createDash.py [-h] -C CLUSTER [--remove] [-E ENVIRONMENT]
Generate and deploy the Kubernetes Overview Dashboard as Code.
options:
-h, --help show this help message and exit
-C CLUSTER, --cluster CLUSTER
Name of the Kubernetes cluster (default: None)
--remove Remove dashboard for given cluster. If not specified dashboard will be created or updated (default: False)
-E ENVIRONMENT, --environment ENVIRONMENT
Name of the environment (the same as in environment.yaml file. Used to upload dashboard to specific Dynatrace environment. If not specified all environments in file will be used (default: None)
# Files
## createDash.py
This scripts generates Dashboard and uploads it to Dynatrace.
## environment.yaml
File containing environments to connect to Dynatrace tenant
Environment name:
name: string #name ov environment
env-url: str #url of environment
env-token-name: str #name of environment variable containing API token
## requirements.txt
File containing required python packages
## kubernetes_tiles_template.json
File containing template for dashboard

6
requirements.txt Normal file
View File

@ -0,0 +1,6 @@
python-decouple
pyyaml
requests
datetime
argparse
GitPython