Compare commits

..

No commits in common. "master" and "OPMAAS-3988" have entirely different histories.

12 changed files with 400 additions and 657 deletions

5
.gitignore vendored
View File

@ -1,9 +1,9 @@
dashboard_tiles_*
\[STAGING\]*
<<<<<<< HEAD
shared_configuration/
archive/
tiles/
.vscode/
=======
# Byte-compiled / optimized / DLL files
__pycache__/
@ -140,3 +140,4 @@ dmypy.json
crash.log
*.tfvars
>>>>>>> 746e496e7a7c5e8134cda7921311f6a9ba22f8d3

View File

@ -1,69 +0,0 @@
---
metadata:
basename: "Global Offboard Reliability 2.1"
stagingname: "[STAGING]Global Offboard Reliability 2.1"
owner: "PATRYK.GUDALEWICZ@partner.bmw.de"
#actual tile
single_value_header_1: "Last 1 h"
single_value_timeframe_1: "-1h"
#graph tile
graph_header: "Graph (3 days)"
graph_timeframe: "-3d"
#ytd tile
single_value_header_2: "Last 3 d"
single_value_timeframe_2: "-3d"
timeframe_ytd: "-3d"
repo:
config_repo_url: "atc.bmwgroup.net/bitbucket/scm/opapm/shared_configuration.git"
config_repo_name: "shared_configuration"
archive_repo_url: "atc.bmwgroup.net/bitbucket/scm/opapm/archive.git"
archive_repo_name: "archive"
slo_path: "./shared_configuration/slo_parameter.yaml"
#business lines are configured with scheme: DEPARTMENT PREFIX : "BUSINESS LINE NAME"
businesslines:
DE-3: "My Journey"
DE-7: "Connected Vehicle Platform"
DE-4: "My Life"
EC-DE: "China Services"
FG-6: "GCDM"
#Changing the width will cause automatic recalculation of columns position
visualconfig:
brick_size: 38
rowheight: 3
rowheight_wall: 3
singlevalue_width: 4
graph_width: 8
singlevalue_width_wall: 4
graph_width_wall: 12
description_tile_width: 7
spacing: 1
spacing_wall: 1
slo_line_color: "#9cd575"
request_count_color: "#74cff7"
response_time_color: "#c396e0"
image_width: 76
image_height: 114
image_data: "data:image/webp;base64,UklGRlYHAABXRUJQVlA4WAoAAAAQAAAAOQAAcAAAQUxQSAQBAAABgGNt2zHn2cDYUzM2O6tyNWVsLCJZAav5+2QWEHZONmBbpTHW95zvpExEoG3bNlS7FdmaIxCVylExPCKo4UqHCvFqXzp8Cgjr6XCpLZbe9x0Q3LdPH339AUL+qKMEKClhje8cb7XATIDUFHQrLIsa9xHLicd5wHLo+js59nnKb3kGAjzDUuogyjplDpJRwSVPv4SD9h1DNC+1PIERokGmYaLmVx7nPo/rkKdXyidDMhoi6uO5LSP+bmcPcaPknsu4OZ0H/5VxH7EcewxrLGt6zLHMAc2fHB9NABQOBWH2PYYtawRKF8Sbz0d0usmNs3dx3s/WJ3SIkyXdOzYqqHFvugWRAFZQOCAsBgAAkBoAnQEqOgBxAD5tMJNHJCKhoSYVXdCADYlpDbwAiVj9T4g+Rr19KlpQ3/9rj/S8IrYT0C+8HEJxzeAh9f5w/Pv9WewZ0rPRuDyMFLG6crgiIWlUyjpmL5HPrpvWBMvukVcE5YNx8ruVhPIdFYzcMcSKDt6WbR2pWVaPGgSC2qlIjC2BEG5M26HdCGzRgIB5zaaZ07nGiRZzz9vXMi/llb00XYLoUdkiCDl3QGQsoImUReZw/BdBhECojaxfssBPjD/e0Byn4skcilQkqjWBGiPtxxnxE2z2ij64eQAA/vk3/iQ3RiQi73vf1jyEzqxtdpC9c6W7f0YHI7IjGBZt8lJwFvqD0/RyKRTUZ4ca6J/cHX43YtfnES4IYCXsiBf7vE+FGn33JO2eppuFhHytXY+aZs2uPZIhEMc5ySwjBTgqO5YxzWiJWr6hciKsoE2DmjaW7ZPSVnPRABOAfnCyEZztvarofuVv87+Sextupq3XeaMmu7H//YGKpaVe/aFeYtQmF10uveMCMvSNBR5QMuewXj+DnANxqkjd00aBzK1cv0C5BR8mfGumw7T2+Yrprn8/SHljjxQPFpYNcJaS3fUH0UiT0VIjf4//6rP5chINAQ3XlqIpAI+js8ruy0zBjhtm1VmnmYY+apBmGU/nhYtgjVjUOm8eqbiRmB7224QhQ+ietF+iSxQOmZj8tpWItmbetKYu5fziovwyp6p3UwZS9bkm0EReYpdBOa71yeXOnryJyfsGm3Gjy1DO5XLYBYpv6/8qn2X3FLf+uH413VN/lTw3Xxq/HkZ8cVY7HNjnnDfqTMgsqhRqgD72PimwBmUVXjqbxnwEEsx/7u094xjgOP7o0NyO1i3Y55hgsBO0O3B0TzfxtARJ+gZ2HL1/P/rhre+AHTZflfJTOpY1tPVKbLwTcChEP+whaFs9njwG85AjgrVOKUWTGG9nIcDxB+vm9v89XxNHM9lZzP3z9rfdjK2AR8DUrCRWYQFaOx86Iu/OoUPVljX/HI+Z93EjoPz+SKT/AfaRtxr2N3OWeOA1X98dFmOhQkrdk5wZ9uedHQGRgHx0Wn1zQuMvV4vcquNcaWAbB8ZQaqiivy/X23br4HyvLDCc9m20dMXRSgyppfst2eCGWjysA+QengG61mxN+qFG8Bu3nRI+JX8Mzcbd5J1rmxydrnVvqO0NHcwF8i/MnRavxxoWfHbcimUOPOF6kPL3GEXV6n+5HNvTIjQmbPMWqlaBguD9P66FUyX3/kWBhPgIK5zh0CvLM4XZHIfjrqFe/AtvswFrtImSr03rjnqwnW2Hi1wvQ/31jUXwjTC0IXDD9xym6rk0FWxJpbdEapIYXqV+a5dSyJ6fTz+da8DYcF6T49O5dOSxB1//jPh6YOF5nqdGp8kJEdzOIvpOAlVY0IlHeRHo1RjWnkf5/AUGU5e+U9mZisn16llZslRbu+7wszY2HNDg8Am1OKUXeqUPrhoZk9XeN7e4IBkz4UxRDlR4AFjdguVyHqFYnRD9kSl+6LqSjNs+o3lQclsjtqAdomU57RrASjAKYfdFeyVDh+ZaWuANksKPrtNXV8ot8BUnu952F0PU7Zq7Vj+jLBw3myGlA6swuz+0bKItmuttXGxhx/Go+wAtOzWFWwjB2SdKNNpL/ovEF5ibZz+5Fzibio4yW8apoq+UkpCcLbkd5abaPjdrjIXelYG8EDHz402pdyJW8vk49MRRFOvKqyGHJVZY1JZON2Y2oosF+xNq96ekOv09l4mW4zQrUCBiq1MHeB6bTk3Ujc6/9W5W3YpjxPrkQyIykHM0imBojob2/II37UldcLDY8MuG3Dci8rbo8TASEZ/6vAYAvtE16IFVn2Yft/fM1p/aBQQh/6Auz+OqKcSZ4GaCPzmOYHsgIieWAsZJ93o6gdU/cl9NTD/m3ZqtX1n/kCwYHEXOAX9KyvqjXqg55cTK4GnWZzFtojcfG0Fd30O9AlVl0yzSwxwxFA128xJmPoCrDV339n/l1jnaYx3ivNfCCZU7qhzUpecgzq4p/VsMQX3Pb8elQ40J68H/2n/2/Nl4O/sRgFXm9Y3Qmj0Bs0wbYY7/3//aU7iSogAAAAA="
graph_axis_min: "97"
graph_axis_max: "100.1"
hubs:
- name: "EMEA"
remote_url: "https://xxu26128.live.dynatrace.com"
remote_url_gcdm: "https://moh22956.live.dynatrace.com"
name_short: "emea"
name_env: "euprod"
- name: "NORTH AMERICA"
remote_url: "https://wgv50241.live.dynatrace.com"
remote_url_gcdm: "https://pcj77768.live.dynatrace.com"
name_short: "na"
name_env: "naprod"
- name: "CHINA"
remote_url: "https://dynatrace-cn-int.bmwgroup.com:443/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
remote_url_gcdm: ""
name_short: "cn"
name_env: "cnprod"

View File

@ -1,105 +1,419 @@
#Main dashboard generation script
import yaml
from decouple import config
import json
from datetime import datetime
import argparse
from tileFactory import createHeaderTiles
from tileFactory import createImageTile
from tileFactory import createSloTileRow
from repoConfig import clone_repo_if_notexist
from repoConfig import pull_repo
from repoConfig import push_repo
from sloHelper import load_slo_parameter
from sloConfigLoader import getSloConfigurations
from remoteDashboard import get_all_dashboards_withname
from remoteDashboard import backup_dashboards
from remoteDashboard import create_or_update_dashboard
import warnings
warnings.filterwarnings("ignore")
#Script args definition
import requests
from datetime import datetime
from git import Repo
import os
#set STAGING global dashboard name
DASHBOARD_NAME = "[STAGING]Global Offboard Reliability 2.0"
AUTHSTRING = config("BITBUCKET_USERNAME")+":"+config("BITBUCKET_TOKEN")
CONFIG_REPO_URL = "https://"+AUTHSTRING+"@atc.bmwgroup.net/bitbucket/scm/opapm/shared_configuration.git"
CONFIG_REPO_NAME = "shared_configuration"
ARCHIVE_REPO_URL = "https://"+AUTHSTRING+"@atc.bmwgroup.net/bitbucket/scm/opapm/archive.git"
ARCHIVE_REPO_NAME = "archive"
BUSINESS_LINES = {'DE-3':'My Journey','DE-7':'Connected Vehicle Platform','DE-4':'My Life','EC-DE':'China Services'}
parser = argparse.ArgumentParser(description="Generate and deploy the Dynatrace Global Dashboard as Code. Auto deployment works only for STAGING dashboard",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-R", "--rows", type=int, help="Number of rows per dashboard. If not specified, all rows will be added to single dashboard")
parser.add_argument('--auto-upload', default=False, action='store_true', help="Auto upload to STAGING dashboard")
parser.add_argument('-D', '--department', type=str,default="ALL", required=False, help="Define department for which the dashboard should be updated: 'DE-3', 'DE-7', 'DE-4' or 'EC-DE'. Leave empty or use 'ALL' if you want to generate 1 cumulated dashboard")
parser.add_argument('--wall', default=False, action='store_true', help="By default script is generating desktop version. Use parameter to set dashboard generation to type 'Wall'.")
args = parser.parse_args()
def clone_repo_if_notexist(repourl, reponame):
if(not os.path.isdir(reponame)):
repo = Repo.clone_from(repourl, reponame)
return repo
return Repo(reponame)
def pull_repo(repo):
origin = repo.remotes.origin
origin.pull()
#Loads script config file
def loadConfig():
with open('./config.yaml') as file:
return yaml.safe_load(file)
def push_repo(repo, message):
repo.git.add(all=True)
repo.index.commit(message)
origin = repo.remotes.origin
origin.push()
def main():
#Load script config file
script_config = loadConfig()
def load_slo_parameter(path):
# the first part is to read a yaml and only select latest, valid config
with open(path) as file:
slo_doc = yaml.safe_load(file)
#Pull shared repositories
configrepo = clone_repo_if_notexist(script_config["repo"]["config_repo_url"], script_config["repo"]["config_repo_name"])
pull_repo(configrepo)
archiverepo = clone_repo_if_notexist(script_config["repo"]["archive_repo_url"], script_config["repo"]["archive_repo_name"])
pull_repo(archiverepo)
#Load env file
with open('./environment.yaml') as file:
doc = yaml.safe_load(file)
return slo_doc
def make_request(url, DTAPIToken,verify, method, jsondata):
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
try:
if method == "get":
response = requests.get(url, headers=headers,verify=verify)
elif method == "post":
response = requests.post(url, headers=headers,verify=verify, data=jsondata)
elif method == "put":
response = requests.put(url, headers=headers,verify=verify, data=jsondata)
elif method == "delete":
response = requests.delete(url, headers=headers,verify=verify)
response.raise_for_status()
except requests.exceptions.HTTPError as errh:
return "An Http Error occurred:" + repr(errh)
except requests.exceptions.ConnectionError as errc:
return "An Error Connecting to the API occurred:" + repr(errc)
except requests.exceptions.Timeout as errt:
return "A Timeout Error occurred:" + repr(errt)
except requests.exceptions.RequestException as err:
return "An Unknown Error occurred" + repr(err)
#Load SLO yaml file
slo_doc = load_slo_parameter(script_config["repo"]["slo_path"])
return response
#Create empty dashboards and fill with default headers
dashboard_json = []
dashboard_json.append(createImageTile(script_config))
dashboard_json = dashboard_json + createHeaderTiles(script_config,args.wall)
def get_all_dashboards_withname(DTAPIToken, DTENV,name):
DTAPIURL= DTENV + "api/config/v1/dashboards"
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
print(r)
entityResponse = r.json()
result = []
if("dashboards" in entityResponse):
for dashboard in entityResponse["dashboards"]:
if(dashboard["name"]).startswith(name):
result.append(dashboard)
result = sorted(result, key=lambda x : x['name'], reverse=False)
return result
def backup_dashboards(DTAPIToken, DTENV, dashboards):
for dashboard in dashboards:
DTAPIURL = DTENV + "api/config/v1/dashboards/" + dashboard["id"]
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
entityResponse = r.json()
print("Downloaded dashboard from Dynatrace: "+entityResponse["dashboardMetadata"]["name"]+", creating backup...")
now=datetime.now()
strnow = now.strftime("%Y%m%d_%H%M%S")
strnowdate = now.strftime("%Y%m%d")
if not os.path.isdir("./archive/"+strnowdate):
os.makedirs("./archive/"+strnowdate)
with open("./archive/"+strnowdate+"/"+entityResponse["dashboardMetadata"]["name"]+"_"+strnow+".json", "w") as file:
json.dump(entityResponse, file, indent=2)
def remove_dashboards(DTAPIToken, DTENV, dashboards):
for dashboard in dashboards:
print("Removing STAGING dashboard from Dynatrace: "+dashboard["name"])
DTAPIURL = DTENV + "api/config/v1/dashboards/" + dashboard["id"]
print(make_request(DTAPIURL,DTAPIToken,True,"delete",None))
def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files, businessline):
if(files):
for index, filename in enumerate(files,start=1):
with open('./'+filename) as file:
tilesjson = json.load(file)
if any(dashboard["name"].endswith("#"+str(index)) for dashboard in dashboards):
existingdashboard = next((dashboard for dashboard in dashboards if dashboard["name"].endswith("#"+str(index))), None)
if existingdashboard:
print("Found dashboard for file: "+filename + ", Name: "+ existingdashboard["name"])
DTAPIURL = DTENV + "api/config/v1/dashboards/" + existingdashboard["id"]
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
entityResponse = r.json()
entityResponse["tiles"] = tilesjson
print("Updating dashboard: "+entityResponse["dashboardMetadata"]["name"])
print(make_request(DTAPIURL,DTAPIToken,True,"put",json.dumps(entityResponse)))
dashboards.remove(existingdashboard)
else:
print("Dashboard for file: "+filename + " not found.")
if(args.department == "ALL"):
dashfullname = DASHBOARD_NAME
else:
dashfullname = DASHBOARD_NAME + " - " + businessline + " #" + str(index)
newdashboard = {
"dashboardMetadata":{
"name": dashfullname,
"owner": "PATRYK.GUDALEWICZ@partner.bmw.de"
},
"tiles":[]
}
DTAPIURL = DTENV + "api/config/v1/dashboards"
newdashboard["tiles"] = tilesjson
print("Creating dashboard: "+newdashboard["dashboardMetadata"]["name"])
creationresult = make_request(DTAPIURL,DTAPIToken,True,"post",json.dumps(newdashboard))
print(creationresult)
remove_dashboards(DTAPIToken, DTENV, dashboards)
def get_bounds (grid_row, grid_column, tile_columnwidth, tile_rowheight):
grid_brick = 38
grid_top = 0 if grid_row == 0 else grid_row * grid_brick
grod_left = 0 if grid_column == 0 else grid_column * grid_brick
grod_width = 0 if tile_columnwidth == 0 else tile_columnwidth * grid_brick
grod_height = 0 if tile_rowheight == 0 else tile_rowheight * grid_brick
bounds = { "top": grid_top, "left": grod_left, "width": grod_width, "height": grod_height }
return bounds
def get_dataExplorerTileSloThreshold(sloThresholdValuesAndColor):
value1 = int(str(sloThresholdValuesAndColor).split("|")[0].split("_")[0])
value2 = int(str(sloThresholdValuesAndColor).split("|")[1].split("_")[0])
value3 = int(str(sloThresholdValuesAndColor).split("|")[2].split("_")[0])
color1 = str(sloThresholdValuesAndColor).split("|")[0].split("_")[1]
color2 = str(sloThresholdValuesAndColor).split("|")[1].split("_")[1]
color3 = str(sloThresholdValuesAndColor).split("|")[2].split("_")[1]
dataExplorerTileThreshold = [ { "value": value1, "color": color1 }, { "value": value2, "color": color2 }, { "value": value3, "color": color3 } ]
return dataExplorerTileThreshold
def get_DataExplorerTile_Markdown(name_short, department, bounds, detailDashboardUrl_EMEA,detailDashboardUrl_NA, detailDashboardUrl_CN, docURL, slourl_EMEA, slourl_NA, slourl_CN,slorelevant, wall ):
# dataExplorerTile_Markdown = {
# "name": "Markdown",
# "tileType": "MARKDOWN",
# "configured": "true",
# "bounds": bounds,
# "tileFilter": {},
# "markdown": "___________\n## " + name_short + "\n\n" + department + " | --> [EMEA](" + detailDashboardUrl_EMEA + ") [NA](" + detailDashboardUrl_NA + ") [CN](" + detailDashboardUrl_CN + ")\n [Documentation](" + docURL + ")"
# }
#without team links
if(not wall):
markdown = "___________\n## " + name_short + "\n\n" + department + " [Documentation](" + docURL + ")"
else:
markdown = "___________\n## " + name_short + "\n\n" + department
if(slorelevant):
markdown = markdown + " [QM-Report] \n"
else:
markdown = markdown + " \n"
if(slourl_EMEA and not wall):
markdown = markdown + "[EMEA]("+slourl_EMEA+") "
if(slourl_NA and not wall):
markdown = markdown + "[NA]("+slourl_NA+") "
if(slourl_CN and not wall):
markdown = markdown + "[CN]("+slourl_CN+") "
dataExplorerTile_Markdown = {
"name": "Markdown",
"tileType": "MARKDOWN",
"configured": "true",
"bounds": bounds,
"tileFilter": {},
"markdown": markdown
}
return dataExplorerTile_Markdown
def get_DataExplorerTile_SingleValue(customName, metricSelector, remoteEnvironmentUrl, bounds, timeframe, graphThreshold):
dataExplorerTile_SingleValue = {
"name": "",
"tileType": "DATA_EXPLORER",
"configured": "true",
"bounds": bounds,
"tileFilter": { "timeframe": timeframe },
"remoteEnvironmentUri": remoteEnvironmentUrl,
"customName": metricSelector,
"queries": [
{
"id": "A",
"timeAggregation": "DEFAULT",
"metricSelector": metricSelector,
"foldTransformation": "TOTAL",
"enabled": "true"
}
],
"visualConfig": {
"type": "SINGLE_VALUE", "global": { "seriesType": "LINE", "hideLegend": "true" },
"rules": [ { "matcher": "A:", "properties": { "color": "DEFAULT", "seriesType": "LINE", "alias": "SLO" }, "seriesOverrides": [{"name": metricSelector, "color": "#ffffff"}] } ],
"axes": { "xAxis": { "visible": "true" }, "yAxes": [] },
"heatmapSettings": {},
"singleValueSettings": { "showTrend": "false", "showSparkLine": "false", "linkTileColorToThreshold": "true" },
"thresholds": [ { "axisTarget": "LEFT", "rules": graphThreshold, "queryId": "", "visible": "true" } ],
"tableSettings": { "isThresholdBackgroundAppliedToCell": "false" },
"graphChartSettings": { "connectNulls": "false" } },
"queriesSettings": { "resolution": "" }
}
return dataExplorerTile_SingleValue
def get_DataExplorerTile_Graph(customName, metricSelector, metricName, remoteEnvironmentUrl, bounds, timeframe, axisTargetMin, axisTargetMax, graphThreshold ):
dataExplorerTile_Graph = {
"name": "",
"tileType": "DATA_EXPLORER",
"configured": "true",
"bounds": bounds,
"tileFilter": { "timeframe": timeframe },
"remoteEnvironmentUri": remoteEnvironmentUrl,
"customName": metricSelector,
"queries": [
{
"id": "A",
"timeAggregation": "DEFAULT",
"metricSelector": metricSelector,
"foldTransformation": "TOTAL",
"enabled": "true"
}
],
"visualConfig": {
"type": "GRAPH_CHART", "global": { "seriesType": "LINE", "hideLegend": "true" },
"rules": [ { "matcher": "A:", "properties": { "color": "DEFAULT", "seriesType": "LINE", "alias": "SLO" }, "seriesOverrides": [{"name": customName, "color": "#ffffff"}] } ],
"axes": { "xAxis": { "visible": "true" }, "yAxes": [{ "displayName": "", "visible": "true", "min": axisTargetMin, "max": axisTargetMax, "position": "LEFT", "queryIds": [ "A" ], "defaultAxis": "true" }] },
"heatmapSettings": {},
"singleValueSettings": { "showTrend": "false", "showSparkLine": "false", "linkTileColorToThreshold": "true" },
"thresholds": [ { "axisTarget": "LEFT", "rules": graphThreshold, "queryId": "", "visible": "true" } ],
"tableSettings": { "isThresholdBackgroundAppliedToCell": "false" },
"graphChartSettings": { "connectNulls": "false" } },
"queriesSettings": { "resolution": "" }
}
return dataExplorerTile_Graph
def create_default_tiles():
newDashboardTiles = []
# HEADER TILES
# Picture Tile
newDashboardTiles.append(
{
"name": "Image", "tileType": "IMAGE", "configured": "true", "bounds": {"top": 0, "left": 0, "width": 76, "height": 114}, "tileFilter": {},
"image": "data:image/webp;base64,UklGRlYHAABXRUJQVlA4WAoAAAAQAAAAOQAAcAAAQUxQSAQBAAABgGNt2zHn2cDYUzM2O6tyNWVsLCJZAav5+2QWEHZONmBbpTHW95zvpExEoG3bNlS7FdmaIxCVylExPCKo4UqHCvFqXzp8Cgjr6XCpLZbe9x0Q3LdPH339AUL+qKMEKClhje8cb7XATIDUFHQrLIsa9xHLicd5wHLo+js59nnKb3kGAjzDUuogyjplDpJRwSVPv4SD9h1DNC+1PIERokGmYaLmVx7nPo/rkKdXyidDMhoi6uO5LSP+bmcPcaPknsu4OZ0H/5VxH7EcewxrLGt6zLHMAc2fHB9NABQOBWH2PYYtawRKF8Sbz0d0usmNs3dx3s/WJ3SIkyXdOzYqqHFvugWRAFZQOCAsBgAAkBoAnQEqOgBxAD5tMJNHJCKhoSYVXdCADYlpDbwAiVj9T4g+Rr19KlpQ3/9rj/S8IrYT0C+8HEJxzeAh9f5w/Pv9WewZ0rPRuDyMFLG6crgiIWlUyjpmL5HPrpvWBMvukVcE5YNx8ruVhPIdFYzcMcSKDt6WbR2pWVaPGgSC2qlIjC2BEG5M26HdCGzRgIB5zaaZ07nGiRZzz9vXMi/llb00XYLoUdkiCDl3QGQsoImUReZw/BdBhECojaxfssBPjD/e0Byn4skcilQkqjWBGiPtxxnxE2z2ij64eQAA/vk3/iQ3RiQi73vf1jyEzqxtdpC9c6W7f0YHI7IjGBZt8lJwFvqD0/RyKRTUZ4ca6J/cHX43YtfnES4IYCXsiBf7vE+FGn33JO2eppuFhHytXY+aZs2uPZIhEMc5ySwjBTgqO5YxzWiJWr6hciKsoE2DmjaW7ZPSVnPRABOAfnCyEZztvarofuVv87+Sextupq3XeaMmu7H//YGKpaVe/aFeYtQmF10uveMCMvSNBR5QMuewXj+DnANxqkjd00aBzK1cv0C5BR8mfGumw7T2+Yrprn8/SHljjxQPFpYNcJaS3fUH0UiT0VIjf4//6rP5chINAQ3XlqIpAI+js8ruy0zBjhtm1VmnmYY+apBmGU/nhYtgjVjUOm8eqbiRmB7224QhQ+ietF+iSxQOmZj8tpWItmbetKYu5fziovwyp6p3UwZS9bkm0EReYpdBOa71yeXOnryJyfsGm3Gjy1DO5XLYBYpv6/8qn2X3FLf+uH413VN/lTw3Xxq/HkZ8cVY7HNjnnDfqTMgsqhRqgD72PimwBmUVXjqbxnwEEsx/7u094xjgOP7o0NyO1i3Y55hgsBO0O3B0TzfxtARJ+gZ2HL1/P/rhre+AHTZflfJTOpY1tPVKbLwTcChEP+whaFs9njwG85AjgrVOKUWTGG9nIcDxB+vm9v89XxNHM9lZzP3z9rfdjK2AR8DUrCRWYQFaOx86Iu/OoUPVljX/HI+Z93EjoPz+SKT/AfaRtxr2N3OWeOA1X98dFmOhQkrdk5wZ9uedHQGRgHx0Wn1zQuMvV4vcquNcaWAbB8ZQaqiivy/X23br4HyvLDCc9m20dMXRSgyppfst2eCGWjysA+QengG61mxN+qFG8Bu3nRI+JX8Mzcbd5J1rmxydrnVvqO0NHcwF8i/MnRavxxoWfHbcimUOPOF6kPL3GEXV6n+5HNvTIjQmbPMWqlaBguD9P66FUyX3/kWBhPgIK5zh0CvLM4XZHIfjrqFe/AtvswFrtImSr03rjnqwnW2Hi1wvQ/31jUXwjTC0IXDD9xym6rk0FWxJpbdEapIYXqV+a5dSyJ6fTz+da8DYcF6T49O5dOSxB1//jPh6YOF5nqdGp8kJEdzOIvpOAlVY0IlHeRHo1RjWnkf5/AUGU5e+U9mZisn16llZslRbu+7wszY2HNDg8Am1OKUXeqUPrhoZk9XeN7e4IBkz4UxRDlR4AFjdguVyHqFYnRD9kSl+6LqSjNs+o3lQclsjtqAdomU57RrASjAKYfdFeyVDh+ZaWuANksKPrtNXV8ot8BUnu952F0PU7Zq7Vj+jLBw3myGlA6swuz+0bKItmuttXGxhx/Go+wAtOzWFWwjB2SdKNNpL/ovEF5ibZz+5Fzibio4yW8apoq+UkpCcLbkd5abaPjdrjIXelYG8EDHz402pdyJW8vk49MRRFOvKqyGHJVZY1JZON2Y2oosF+xNq96ekOv09l4mW4zQrUCBiq1MHeB6bTk3Ujc6/9W5W3YpjxPrkQyIykHM0imBojob2/II37UldcLDY8MuG3Dci8rbo8TASEZ/6vAYAvtE16IFVn2Yft/fM1p/aBQQh/6Auz+OqKcSZ4GaCPzmOYHsgIieWAsZJ93o6gdU/cl9NTD/m3ZqtX1n/kCwYHEXOAX9KyvqjXqg55cTK4GnWZzFtojcfG0Fd30O9AlVl0yzSwxwxFA128xJmPoCrDV339n/l1jnaYx3ivNfCCZU7qhzUpecgzq4p/VsMQX3Pb8elQ40J68H/2n/2/Nl4O/sRgFXm9Y3Qmj0Bs0wbYY7/3//aU7iSogAAAAA="
}
)
if(args.wall):
# EMEA HUB
newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 7 , 20 , 2), "tileFilter": {}, "markdown": "# EMEA" })
newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 7 , 4 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Reliability Graph (3 days)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 11 , 12 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Last 3 days" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 23 , 4 , 1), "tileFilter": {} })
# NORTH AMERICA HUB
newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 28 , 20 , 2), "tileFilter": {}, "markdown": "# NORTH AMERICA" })
newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 28 , 4 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Reliability Graph (3 days)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 32 , 12 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Last 3 days" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 44 , 4 , 1), "tileFilter": {} })
# CHINA HUB
newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 49 , 20 , 2), "tileFilter": {}, "markdown": "# CHINA" })
newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 49 , 4 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Reliability Graph (3 days)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 53 , 12 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Last 3 days" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 65 , 4 , 1), "tileFilter": {} })
else:
# EMEA HUB
newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 7 , 14 , 2), "tileFilter": {}, "markdown": "# EMEA" })
newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 7 , 4 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Reliability Graph (3 days)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 11 , 6 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Last 3 days" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 17 , 4 , 1), "tileFilter": {} })
# NORTH AMERICA HUB
newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 22 , 14 , 2), "tileFilter": {}, "markdown": "# NORTH AMERICA" })
newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 22 , 4 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Reliability Graph (3 days)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 26 , 6 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Last 3 days" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 32 , 4 , 1), "tileFilter": {} })
# CHINA HUB
newDashboardTiles.append({ "name": "Header" ,"tileType": "MARKDOWN" , "configured": "true" , "bounds": get_bounds(0 , 37 , 14 , 2), "tileFilter": {}, "markdown": "# CHINA" })
newDashboardTiles.append({ "name": "Last 1 h" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 37 , 4 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Reliability Graph (3 days)" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 41 , 6 , 1), "tileFilter": {} })
newDashboardTiles.append({ "name": "Last 3 days" ,"tileType": "HEADER" , "configured": "true" , "bounds": get_bounds(2 , 47 , 4 , 1), "tileFilter": {} })
return newDashboardTiles
def main(slo_path):
configrepo = clone_repo_if_notexist(CONFIG_REPO_URL, CONFIG_REPO_NAME)
pull_repo(configrepo)
archiverepo = clone_repo_if_notexist(ARCHIVE_REPO_URL, ARCHIVE_REPO_NAME)
pull_repo(archiverepo)
print("Generating dashboard tiles...")
#Configure counters for SLO loop
dahboardcount = boundindex = 1
slo_doc = load_slo_parameter(slo_path)
dashboard_json = create_default_tiles()
if(args.wall):
offsets = [0,21,42]
else:
offsets = [0,15,30]
hub_config = {
"euprod":
{
"offset": offsets[0],
"remote_url": 'https://xxu26128.live.dynatrace.com'
},
"naprod":
{
"offset": offsets[1],
"remote_url": 'https://wgv50241.live.dynatrace.com'
},
"cnprod":
{
"offset": offsets[2],
"remote_url": 'https://dynatrace-cn-int.bmwgroup.com:443/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b'
}
}
timeframe_actual = "-1h"
timeframe_graph = "-3d"
dahboardcount = 1
rowcount = 0
boundindex = 1
generatedfiles = []
if(args.rows is not None):
rowcount = args.rows
if(args.department == "ALL"):
blname = "ALL"
else:
blname = script_config["businesslines"][args.department]
slorelevant = False
blname = BUSINESS_LINES[args.department]
blvalue = args.department
#SLO loop
for slo_name, configuration in slo_doc.items():
if configuration['department'].startswith(blvalue) or blvalue == "ALL":
if(configuration['selector_var'] == "CoCo-QM-Report_Mobile"):
slorelevant = False
if(blname and blvalue):
for slo_name, configuration in slo_doc.items():
if configuration['department'].startswith(blvalue) or blvalue == "ALL":
if(configuration['selector_var'] == "CoCo-QM-Report_Mobile"):
slorelevant = True
print("Dashboard #"+str(dahboardcount)+" : Configurint SLO "+slo_name)
print("Dashboard #"+str(dahboardcount)+" : Configurint SLO "+str(boundindex) +" of "+str(rowcount))
if rowcount > 0 and boundindex > rowcount:
dashboard_json = create_default_tiles()
dahboardcount = dahboardcount+1
boundindex = 1
slo_display = configuration["displayname"]
slo_department = configuration["department"]
#timeframe_ytd = configuration["yearstart"] + " 00:00 to now"
timeframe_ytd = "-3d"
slo_graphThreshold_SingleValue = get_dataExplorerTileSloThreshold(configuration["thresholds"]["single_value"])
slo_graphThreshold_Graph = get_dataExplorerTileSloThreshold(configuration["thresholds"]["graph_value"])
emeaslourl = ""
naslourl = ""
cnslourl = ""
if len(configuration["hubs"]) > 0:
if(configuration["ids"]["emea"]):
emeaslourl = hub_config["euprod"]["remote_url"] + "/ui/slo?id="+configuration["ids"]["emea"]+"&sloexp="+configuration["ids"]["emea"]+"&slovis=Table"
if(configuration["ids"]["na"]):
naslourl = hub_config["naprod"]["remote_url"] + "/ui/slo?id="+configuration["ids"]["na"]+"&sloexp="+configuration["ids"]["na"]+"&slovis=Table"
if(configuration["ids"]["cn"]):
cnslourl = hub_config["cnprod"]["remote_url"] + "/ui/slo?id="+configuration["ids"]["cn"]+"&sloexp="+configuration["ids"]["cn"]+"&slovis=Table"
dashboard_json.append(get_DataExplorerTile_Markdown(slo_display, slo_department, get_bounds(((boundindex)*(3)) , 0 , 7 , 3), configuration["ops_dashboard"]["emea"], configuration["ops_dashboard"]["na"], configuration["ops_dashboard"]["cn"],configuration["doc_url"],emeaslourl,naslourl,cnslourl,slorelevant,args.wall))
for hub,tiles in configuration["hubs"].items():
if 'actual' in tiles["tiles"]:
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 7 + hub_config[hub]["offset"] , 4 , 3), timeframe_actual, slo_graphThreshold_SingleValue))
if "graph" in tiles["tiles"]:
if(args.wall):
dashboard_json.append(get_DataExplorerTile_Graph(slo_name, configuration["metric"], configuration["selector_var"].replace("~",""), hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 11 + hub_config[hub]["offset"] , 12 , 3), timeframe_graph, "97", "102", slo_graphThreshold_Graph))
else:
dashboard_json.append(get_DataExplorerTile_Graph(slo_name, configuration["metric"], configuration["selector_var"].replace("~",""), hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 11 + hub_config[hub]["offset"] , 6 , 3), timeframe_graph, "97", "102", slo_graphThreshold_Graph))
if "ytd" in tiles["tiles"]:
if(args.wall):
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 23 + hub_config[hub]["offset"] , 4 , 3), timeframe_ytd, slo_graphThreshold_SingleValue))
else:
dashboard_json.append(get_DataExplorerTile_SingleValue(slo_name, configuration["metric"], hub_config[hub]["remote_url"], get_bounds(((boundindex)*(3)) , 17 + hub_config[hub]["offset"] , 4 , 3), timeframe_ytd, slo_graphThreshold_SingleValue))
boundindex = boundindex+1
if rowcount > 0 and boundindex > rowcount:
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
json.dump(dashboard_json, file, indent=2)
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json")
#When row splitting is configured, generate default tiles for each partial dashboard
if rowcount > 0 and boundindex > rowcount:
dashboard_json = []
dahboardcount = dahboardcount+1
dashboard_json.append(createImageTile(config))
dashboard_json = dashboard_json + createHeaderTiles(config,args.wall)
boundindex = 1
#Load SLO config as object from yaml definition
sloconfigs = getSloConfigurations(configuration, script_config)
#Generate tile row including description, single value and graph tiles
dashboard_json = dashboard_json + createSloTileRow(boundindex,script_config,args.wall,sloconfigs,doc)
#Increment row index
boundindex = boundindex+1
#Save tile JSON to file
if rowcount == 0 or (args.rows is not None and boundindex%args.rows != 0):
with open("./tiles/dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
json.dump(dashboard_json, file, indent=2)
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json")
#Upload staging dashboard if auto upload is specified
if args.auto_upload:
print("Getting existing STAGING dashboards from Dynatrace")
for item, doc in doc.items():
if(item == "globaldashboard"):
if rowcount == 0 or (args.rows is not None and boundindex%args.rows != 0):
with open("dashboard_tiles_"+str(dahboardcount)+".json", "w") as file:
json.dump(dashboard_json, file, indent=2)
generatedfiles.append("dashboard_tiles_"+str(dahboardcount)+".json")
if args.auto_upload:
print("Getting existing STAGING dashboards from Dynatrace")
with open('./environment.yaml') as file:
doc = yaml.safe_load(file)
for item, doc in doc.items():
token = dict(doc[2])
url = dict(doc[1])
print("Crawling through: " + item)
@ -107,13 +421,16 @@ def main():
DTTOKEN = config(token.get('env-token-name'))
DTURL = url.get('env-url')
print("Downloading STAGING dashboards to local repo ("+blname+")...")
existingdashboards = get_all_dashboards_withname(DTTOKEN, DTURL,script_config["metadata"]["stagingname"]+" - " +blname)
existingdashboards = get_all_dashboards_withname(DTTOKEN, DTURL,DASHBOARD_NAME +blname)
print("Uploading STAGING dashboards to Dynatrace ("+blname+")...")
backup_dashboards(DTTOKEN, DTURL, existingdashboards)
now=datetime.now()
strnowdate = now.strftime("%Y%m%d")
push_repo(archiverepo, strnowdate+"_Global dashboard as code auto-upload backup")
create_or_update_dashboard(DTTOKEN, DTURL, existingdashboards, generatedfiles, blname,script_config,args.department)
print("DONE")
create_or_update_dashboard(DTTOKEN, DTURL, existingdashboards, generatedfiles, blname)
else:
print("ERROR: Could not find Business line for given department.")
if __name__ == "__main__":
main()
main('./shared_configuration/slo_parameter.yaml')

View File

@ -1,86 +0,0 @@
#Single value tile definition (for json parse purpose)
class SingleValueTile:
def __init__(self,name,bounds,timeframe,remoteEnvironmentUrl,metricSelector,graphThreshold):
self.name = name
self.tileType = "DATA_EXPLORER"
self.configured = "true"
self.bounds = bounds
self.tileFilter = { "timeframe": timeframe }
self.remoteEnvironmentUri = remoteEnvironmentUrl
self.customName = metricSelector
self.queries = [
{
"id": "A",
"timeAggregation": "DEFAULT",
"metricSelector": metricSelector,
"foldTransformation": "TOTAL",
"enabled": "true"
}
]
self.visualConfig = {
"type": "SINGLE_VALUE", "global": { "seriesType": "LINE", "hideLegend": "true" },
"rules": [ { "matcher": "A:", "properties": { "color": "DEFAULT", "seriesType": "LINE", "alias": "SLO" }, "seriesOverrides": [{"name": metricSelector, "color": "#ffffff"}] } ],
"axes": { "xAxis": { "visible": "true" }, "yAxes": [] },
"heatmapSettings": {},
"singleValueSettings": { "showTrend": "false", "showSparkLine": "false", "linkTileColorToThreshold": "true" },
"thresholds": [ { "axisTarget": "LEFT", "rules": graphThreshold, "queryId": "", "visible": "true" } ],
"tableSettings": { "isThresholdBackgroundAppliedToCell": "false" },
"graphChartSettings": { "connectNulls": "false" } }
self.queriesSettings = { "resolution": "","foldAggregation": "AVG" }
#Graph tile definition (for json parse purpose)
class GraphTile:
def __init__(self,name,bounds,remoteEnvironmentUrl,metricSelector, countMetricSelector, responseMetricSelector,graphThreshold,customName,axisTargetMin,axisTargetMax,config):
self.name = name
self.tileType = "DATA_EXPLORER"
self.configured = "true"
self.bounds = bounds
self.tileFilter = { "timeframe": config["metadata"]["graph_timeframe"] }
self.remoteEnvironmentUri = remoteEnvironmentUrl
self.customName = metricSelector
self.queries = [
{
"id": "A",
"timeAggregation": "DEFAULT",
"metricSelector": metricSelector,
"foldTransformation": "TOTAL",
"enabled": "true"
},
{
"id": "B",
"timeAggregation": "DEFAULT",
"metricSelector": countMetricSelector,
"foldTransformation": "TOTAL",
"enabled": "true"
},
{
"id": "C",
"timeAggregation": "DEFAULT",
"metricSelector": responseMetricSelector,
"foldTransformation": "TOTAL",
"enabled": "true"
}
]
self.visualConfig = {
"type": "GRAPH_CHART", "global": { "seriesType": "LINE", "hideLegend": "true" },
"rules": [ { "matcher": "A:", "properties": { "color": "GREEN", "seriesType": "LINE", "alias": customName }, "seriesOverrides": [{"name": customName, "color": config["visualconfig"]["slo_line_color"]}] },
{ "matcher": "B:", "properties": { "color": "BLUE", "seriesType": "COLUMN", "alias": "Request count - server" }, "seriesOverrides": [{"name": "Request count - server", "color": config["visualconfig"]["request_count_color"]}] },
{ "matcher": "C:", "properties": { "color": "PURPLE", "seriesType": "LINE", "alias": "Key request response time" }, "seriesOverrides": [{"name": "Key request response time", "color": config["visualconfig"]["response_time_color"]}] } ],
"axes": { "xAxis": { "visible": "true" }, "yAxes": [
{ "displayName": "", "visible": "true", "min": axisTargetMin, "max": axisTargetMax, "position": "LEFT", "queryIds": [ "A" ], "defaultAxis": "true" },
{ "displayName": "", "visible": "true", "min": "AUTO", "max": "AUTO", "position": "RIGHT", "queryIds": [ "B" ], "defaultAxis": "true" },
{ "displayName": "", "visible": "true", "min": "AUTO", "max": "AUTO", "position": "LEFT", "queryIds": [ "C" ], "defaultAxis": "true" }
] },
"heatmapSettings": {},
"singleValueSettings": { "showTrend": "false", "showSparkLine": "false", "linkTileColorToThreshold": "true" },
"thresholds": [ { "axisTarget": "LEFT", "rules": graphThreshold, "queryId": "", "visible": "false" } ],
"tableSettings": { "isThresholdBackgroundAppliedToCell": "false" },
"graphChartSettings": { "connectNulls": "false" } }
self.queriesSettings = { "resolution": "" }

View File

@ -1,18 +1,4 @@
globaldashboard:
- name: "globaldashboard"
- env-url: "https://jyy23483.live.dynatrace.com/"
- env-token-name: "GLOBAL_CONFIG_TOKEN"
euprod:
- name: "euprod"
- env-url: "https://xxu26128.live.dynatrace.com"
- env-token-name: "EUPROD_TOKEN"
- gcdm-token-name: "EUPROD_GCDM_TOKEN"
naprod:
- name: "naprod"
- env-url: "https://wgv50241.live.dynatrace.com"
- env-token-name: "NAPROD_TOKEN"
- gcdm-token-name: "NAPROD_GCDM_TOKEN"
cnprod:
- name: "cnprod"
- env-url: "https://dynatrace-cn-int.bmwgroup.com:443/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
- env-token-name: "CNPROD_TOKEN"
- env-token-name: "GLOBAL_CONFIG_TOKEN"

View File

@ -68,26 +68,6 @@ File containing environments to execute --auto-upload
name: string #name ov environment
env-url: str #url of environment
env-token-name: str #name of environment variable containing API token
## config.yaml
File containing script configuration, like visual config, metadata etc.
## dataExplorerTile.py
Library containing tile object definition for JSON parse purposes
## remoteDashboard.py
Library containing functions used to interact with Dynatrace Dashboard API
## repoConfig.py
Library containing functions used to interact with git repo
## sloConfigLoader.py
Library containing functions used to parse SLO yaml definition into object
## sloHelper.py
Library containing functions used to parse interact with SLO definitions
## tileFactory.py
Library containing functions used to generate tiles JSON definitions
## requirements.txt

View File

@ -1,103 +0,0 @@
#Library includes funtions used for interacting with Dashboard API
import requests
from datetime import datetime
import os
import json
#HTTP request definition
def make_request(url, DTAPIToken,verify, method, jsondata):
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
try:
if method == "get":
response = requests.get(url, headers=headers,verify=verify)
elif method == "post":
response = requests.post(url, headers=headers,verify=verify, data=jsondata)
elif method == "put":
response = requests.put(url, headers=headers,verify=verify, data=jsondata)
elif method == "delete":
response = requests.delete(url, headers=headers,verify=verify)
response.raise_for_status()
except requests.exceptions.HTTPError as errh:
return "An Http Error occurred:" + repr(errh)
except requests.exceptions.ConnectionError as errc:
return "An Error Connecting to the API occurred:" + repr(errc)
except requests.exceptions.Timeout as errt:
return "A Timeout Error occurred:" + repr(errt)
except requests.exceptions.RequestException as err:
return "An Unknown Error occurred" + repr(err)
return response
#Downloads dashboards from provided environment with given name
def get_all_dashboards_withname(DTAPIToken, DTENV,name):
DTAPIURL= DTENV + "api/config/v1/dashboards"
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
print(r)
entityResponse = r.json()
result = []
if("dashboards" in entityResponse):
for dashboard in entityResponse["dashboards"]:
if(dashboard["name"]).startswith(name):
result.append(dashboard)
result = sorted(result, key=lambda x : x['name'], reverse=False)
return result
#Stores current dashboard json files in archive repo
def backup_dashboards(DTAPIToken, DTENV, dashboards):
for dashboard in dashboards:
DTAPIURL = DTENV + "api/config/v1/dashboards/" + dashboard["id"]
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
entityResponse = r.json()
print("Downloaded dashboard from Dynatrace: "+entityResponse["dashboardMetadata"]["name"]+", creating backup...")
now=datetime.now()
strnow = now.strftime("%Y%m%d_%H%M%S")
strnowdate = now.strftime("%Y%m%d")
if not os.path.isdir("./archive/"+strnowdate):
os.makedirs("./archive/"+strnowdate)
with open("./archive/"+strnowdate+"/"+entityResponse["dashboardMetadata"]["name"]+"_"+strnow+".json", "w") as file:
json.dump(entityResponse, file, indent=2)
#Deletes dashboards from remote tenant
def remove_dashboards(DTAPIToken, DTENV, dashboards):
for dashboard in dashboards:
print("Removing STAGING dashboard from Dynatrace: "+dashboard["name"])
DTAPIURL = DTENV + "api/config/v1/dashboards/" + dashboard["id"]
print(make_request(DTAPIURL,DTAPIToken,True,"delete",None))
#Creates or updates staging dashboard on given tenant
def create_or_update_dashboard(DTAPIToken, DTENV, dashboards, files, businessline, config, department):
if(files):
for index, filename in enumerate(files,start=1):
with open('./tiles/'+filename) as file:
tilesjson = json.load(file)
if any(dashboard["name"].endswith("#"+str(index)) for dashboard in dashboards):
existingdashboard = next((dashboard for dashboard in dashboards if dashboard["name"].endswith("#"+str(index))), None)
if existingdashboard:
print("Found dashboard for file: "+filename + ", Name: "+ existingdashboard["name"])
DTAPIURL = DTENV + "api/config/v1/dashboards/" + existingdashboard["id"]
r = make_request(DTAPIURL,DTAPIToken,True,"get",None)
entityResponse = r.json()
entityResponse["tiles"] = tilesjson
print("Updating dashboard: "+entityResponse["dashboardMetadata"]["name"])
print(make_request(DTAPIURL,DTAPIToken,True,"put",json.dumps(entityResponse)))
dashboards.remove(existingdashboard)
else:
print("Dashboard for file: "+filename + " not found.")
if(department == "ALL"):
dashfullname = config["metadata"]["stagingname"]
else:
dashfullname = config["metadata"]["stagingname"] + " - " + businessline + " #" + str(index)
newdashboard = {
"dashboardMetadata":{
"name": dashfullname,
"owner": config["metadata"]["owner"]
},
"tiles":[]
}
DTAPIURL = DTENV + "api/config/v1/dashboards"
newdashboard["tiles"] = tilesjson
print("Creating dashboard: "+newdashboard["dashboardMetadata"]["name"])
creationresult = make_request(DTAPIURL,DTAPIToken,True,"post",json.dumps(newdashboard))
print(creationresult)
remove_dashboards(DTAPIToken, DTENV, dashboards)

View File

@ -1,19 +0,0 @@
#Library includes funtions for interacting with git repos
from git import Repo
import os
#Clones repo if not exists
def clone_repo_if_notexist(repourl, reponame):
if(not os.path.isdir(reponame)):
repo = Repo.clone_from(repourl, reponame)
return repo
return Repo(reponame)
#Performs pull on existing repo
def pull_repo(repo):
origin = repo.remotes.origin
origin.pull()
#Performs add, commit and push on existing repo
def push_repo(repo, message):
repo.git.add(all=True)
repo.index.commit(message)
origin = repo.remotes.origin
origin.push()

View File

@ -3,5 +3,4 @@ pyyaml
requests
datetime
argparse
GitPython
keyrequestparser @ git+https://atc.bmwgroup.net/bitbucket/scm/opapm/keyrequestparser.git
GitPython

View File

@ -1,62 +0,0 @@
#Library includes definition and function for parsing SLO yaml definition into object
from tileFactory import get_dataExplorerTileSloThreshold
class Sloconfig:
def __init__(self):
self.hub = ""
self.metric = ""
self.custom_traffic = ""
self.custom_latency = ""
self.selector = ""
self.sloid = ""
self.slorelevant = False
self.graph_threshold = ""
self.singlevalue_threshold = ""
self.displayName = ""
self.department = ""
self.remoteurl = ""
self.hubtype = ""
self.tiles_actual = False
self.tiles_graph = False
self.tiles_ytd = False
self.slourl = ""
self.docurl = ""
#Loads values from yaml config files and parses them into object
def getSloConfigurations(sloconfig, scriptconfig):
configs = []
for hub in sloconfig["hubs"]:
conf = Sloconfig()
conf.hub = hub
conf.metric = sloconfig["metric"]
if "custom_latency_metric" in sloconfig:
conf.custom_latency = sloconfig["custom_latency_metric"]
if "custom_traffic_metric" in sloconfig:
conf.custom_traffic = sloconfig["custom_traffic_metric"]
conf.selector = sloconfig["selector_var"].replace("~","")
conf.singlevalue_threshold = get_dataExplorerTileSloThreshold(sloconfig["thresholds"]["single_value"])
conf.graph_threshold = get_dataExplorerTileSloThreshold(sloconfig["thresholds"]["graph_value"])
conf.displayName = sloconfig["displayname"]
conf.department = sloconfig["department"]
conf.docurl = sloconfig["doc_url"]
for tile in sloconfig["hubs"][hub]["tiles"]:
if tile == "actual":
conf.tiles_actual =True
elif tile == "graph":
conf.tiles_graph = True
elif tile == "ytd":
conf.tiles_ytd = True
for env in scriptconfig["hubs"]:
if env["name_env"] == hub:
conf.sloid = sloconfig["ids"][env["name_short"]]
if sloconfig["hubs"][hub]["type"] == "coco" :
conf.remoteurl = env["remote_url"]
conf.hubtype = "coco"
elif sloconfig["hubs"][hub]["type"] == "gcdm":
conf.remoteurl = env["remote_url_gcdm"]
conf.hubtype = "gcdm"
conf.slourl = conf.remoteurl + "/ui/slo?id="+conf.sloid+"&sloexp="+conf.sloid+"&slovis=Table"
if conf.selector.startswith("CoCo-QM-Report_"):
conf.slorelevant = True
configs.append(conf)
return configs

View File

@ -1,63 +0,0 @@
#Library includes functions for interacting with SLOs
import yaml
from remoteDashboard import make_request
from KRParser import krparser
from decouple import config
#Loads SLOs definitions from yaml file
def load_slo_parameter(path):
with open(path) as file:
slo_doc = yaml.safe_load(file)
return slo_doc
#Downloads SLO definition from Dynatrace API
def getSLO(env, envurl,sloid, DTAPIToken):
url = envurl+"/api/v2/slo/"+sloid+"?timeFrame=CURRENT"
response = make_request(url, DTAPIToken,True, "get", "")
responseobj = response.json()
responseobj["env"] = env
return responseobj
#Generates Request Count selector for given service names
def getSloReqCountSelector(service_names):
selector = ""
if(service_names["selectortype"] == "KR"):
service_names = service_names["services"]
print("Building Keyrequest count selector for: "+service_names)
selector = "builtin:service.keyRequest.count.total:filter(and(or(in(\"dt.entity.service_method\",entitySelector(\"type(service_method), fromRelationship.isServiceMethodOfService( type(~\"SERVICE~\"),entityName.in("+service_names+"))\"))))):splitBy()"
elif(service_names["selectortype"] == "SRV"):
service_names = service_names["services"]
print("Building Service requests count selector for: "+service_names)
selector = "builtin:service.requestCount.total:filter(and(or(in(\"dt.entity.service\",entitySelector(\"type(service),entityName.in("+service_names+")\"))))):splitBy()"
return selector
#Generates Response Time selector for given service names
def getSloReqTimeSelector(service_names):
selector = ""
if(service_names["selectortype"] == "KR"):
selector = "builtin:service.keyRequest.response.server:filter(and(or(in(\"dt.entity.service_method\",entitySelector(\"type(service_method), fromRelationship.isServiceMethodOfService( type(~\"SERVICE~\"),entityName.in("+service_names["services"]+"))\"))))):splitBy()"
elif(service_names["selectortype"] == "SRV"):
selector = "builtin:service.response.server:filter(and(or(in(\"dt.entity.service\",entitySelector(\"type(service),entityName.in("+service_names["services"]+")\"))))):splitBy()"
return selector
#Parses SLO definition with KRParses and returns service name array
def getSloSrvNames(sloconfig, doc):
if(sloconfig.hubtype == "gcdm"):
krp = krparser.KRParser(name=sloconfig.hub,options=krparser.KROption.RESOLVESERVICES, config={"threads":10, "serviceLookupParams":{"fields":"tags"}, "extendResultObjects":{"env":sloconfig.hub}}, DTAPIURL=sloconfig.remoteurl, DTAPIToken=config(doc[sloconfig.hub][3].get('gcdm-token-name')))
sloobj = getSLO(sloconfig.hub,sloconfig.remoteurl,sloconfig.sloid,config(doc[sloconfig.hub][3].get('gcdm-token-name')))
else:
krp = krparser.KRParser(name=sloconfig.hub,options=krparser.KROption.RESOLVESERVICES, config={"threads":10, "serviceLookupParams":{"fields":"tags"}, "extendResultObjects":{"env":sloconfig.hub}}, DTAPIURL=sloconfig.remoteurl, DTAPIToken=config(doc[sloconfig.hub][2].get('env-token-name')))
sloobj = getSLO(sloconfig.hub,sloconfig.remoteurl,sloconfig.sloid,config(doc[sloconfig.hub][2].get('env-token-name')))
krs = krp.parse(sloobj)
slosrvnames = []
outputslos = []
selectortype = ""
for krslo in krs:
if("builtin:service.keyRequest" in krslo.metadata["metricExpression"]):
selectortype = "KR"
elif("builtin:service.keyRequest" not in krslo.metadata["metricExpression"]):
selectortype = "SRV"
for group in krslo.matchedGroups._list:
for srv in group["services"]:
slosrvnames.append(srv)
slosrvnames = list(dict.fromkeys(slosrvnames))
if(sloobj["name"] == "TP_Vehicle_Teleservices"):
print(sloobj["name"])
for srv in slosrvnames:
outputslos.append("~\""+srv+"~\"")
return {"selectortype":selectortype, "services":",".join(outputslos)}

View File

@ -1,138 +0,0 @@
#Library includes functions for tile generation
from dataExplorerTile import SingleValueTile
from dataExplorerTile import GraphTile
from sloHelper import getSloReqTimeSelector
from sloHelper import getSloReqCountSelector
from sloHelper import getSloSrvNames
#Load coloring thresholds from strings defined in yaml file
def get_dataExplorerTileSloThreshold(sloThresholdValuesAndColor):
value1 = int(str(sloThresholdValuesAndColor).split("|")[0].split("_")[0])
value2 = int(str(sloThresholdValuesAndColor).split("|")[1].split("_")[0])
value3 = int(str(sloThresholdValuesAndColor).split("|")[2].split("_")[0])
color1 = str(sloThresholdValuesAndColor).split("|")[0].split("_")[1]
color2 = str(sloThresholdValuesAndColor).split("|")[1].split("_")[1]
color3 = str(sloThresholdValuesAndColor).split("|")[2].split("_")[1]
dataExplorerTileThreshold = [ { "value": value1, "color": color1 }, { "value": value2, "color": color2 }, { "value": value3, "color": color3 } ]
return dataExplorerTileThreshold
#Translate row numbers into grid values
def get_bounds (grid_row, grid_column, tile_columnwidth, row_height):
grid_brick = 38
grid_top = 0 if grid_row == 0 else grid_row * grid_brick
grod_left = 0 if grid_column == 0 else grid_column * grid_brick
grod_width = 0 if tile_columnwidth == 0 else tile_columnwidth * grid_brick
grod_height = 0 if row_height == 0 else row_height * grid_brick
bounds = { "top": grid_top, "left": grod_left, "width": grod_width, "height": grod_height }
return bounds
#Generate image tile for dashboard
def createImageTile(config):
newimage = {}
newimage["name"] = "Image"
newimage["tileType"] = "IMAGE"
newimage["configured"] = "true"
newimage["bounds"] = {"top": 0,"left": 0, "width":config["visualconfig"]["image_width"],"height":config["visualconfig"]["image_height"]}
newimage["tileFilter"] = {}
newimage["image"] = config["visualconfig"]["image_data"]
return newimage
#Generate markdown tile for dashboard
def createMarkdownTile(row, name, markdown, column, width, height):
newmarkdown = {}
newmarkdown["name"] = name
newmarkdown["tileType"] = "MARKDOWN"
newmarkdown["configured"] = "true"
newmarkdown["bounds"] = get_bounds(row, column, width, height)
newmarkdown["markdown"] = markdown
return newmarkdown
#Generate header tile for dashboard
def createHeaderTile(row, name, column, width, height):
newheader= {}
newheader["name"] = name
newheader["tileType"] = "HEADER"
newheader["configured"] = "true"
newheader["bounds"] = get_bounds(row, column, width, height)
return newheader
#Generate markdown text for SLO description tile
def createSloDescriptionTile(index, name_short, department, config, docURL,slorelevant,slourls,wall): #TODO
if(not wall):
markdown = "___________\n## " + name_short + "\n\n" + department + " [Documentation](" + docURL + ")"
if(slorelevant):
markdown = markdown + " [QM-Report] \n"
else:
markdown = markdown + " \n"
for slourl in slourls:
markdown = markdown + "["+slourl.upper()+"]("+slourls[slourl]+") "
else:
markdown = "___________\n## " + name_short + "\n\n" + department
if(slorelevant):
markdown = markdown + " [QM-Report] \n"
else:
markdown = markdown + " \n"
return createMarkdownTile(index*config["visualconfig"]["rowheight"],"Markdown",markdown,0,config["visualconfig"]["description_tile_width"],config["visualconfig"]["rowheight"])
#Generate summed header tiles for all hubs
def createHeaderTiles(config,wall):
tiles = []
if(wall):
currentColumn = config["visualconfig"]["description_tile_width"]
for hub in config["hubs"]:
tiles.append(createMarkdownTile(0,"Header","# "+hub["name"]+"",currentColumn,2*config["visualconfig"]["singlevalue_width_wall"]+config["visualconfig"]["graph_width_wall"],2))
tiles.append(createHeaderTile(2,config["metadata"]["single_value_header_1"],currentColumn,config["visualconfig"]["singlevalue_width_wall"],1))
currentColumn = tiles[len(tiles)-1]["bounds"]["left"]/config["visualconfig"]["brick_size"] + tiles[len(tiles)-1]["bounds"]["width"]/config["visualconfig"]["brick_size"]
tiles.append(createHeaderTile(2,config["metadata"]["graph_header"],currentColumn,config["visualconfig"]["graph_width_wall"],1))
currentColumn = tiles[len(tiles)-1]["bounds"]["left"]/config["visualconfig"]["brick_size"] + tiles[len(tiles)-1]["bounds"]["width"]/config["visualconfig"]["brick_size"]
tiles.append(createHeaderTile(2,config["metadata"]["single_value_header_2"],currentColumn,config["visualconfig"]["singlevalue_width_wall"],1))
currentColumn = tiles[len(tiles)-1]["bounds"]["left"]/config["visualconfig"]["brick_size"] + tiles[len(tiles)-1]["bounds"]["width"]/config["visualconfig"]["brick_size"]+config["visualconfig"]["spacing"]
else:
currentColumn = config["visualconfig"]["description_tile_width"]
for hub in config["hubs"]:
tiles.append(createMarkdownTile(0,"Header","# "+hub["name"]+"",currentColumn,2*config["visualconfig"]["singlevalue_width"]+config["visualconfig"]["graph_width"],2))
tiles.append(createHeaderTile(2,config["metadata"]["single_value_header_1"],currentColumn,config["visualconfig"]["singlevalue_width"],1))
currentColumn = tiles[len(tiles)-1]["bounds"]["left"]/config["visualconfig"]["brick_size"] + tiles[len(tiles)-1]["bounds"]["width"]/config["visualconfig"]["brick_size"]
tiles.append(createHeaderTile(2,config["metadata"]["graph_header"],currentColumn,config["visualconfig"]["graph_width"],1))
currentColumn = tiles[len(tiles)-1]["bounds"]["left"]/config["visualconfig"]["brick_size"] + tiles[len(tiles)-1]["bounds"]["width"]/config["visualconfig"]["brick_size"]
tiles.append(createHeaderTile(2,config["metadata"]["single_value_header_2"],currentColumn,config["visualconfig"]["singlevalue_width"],1))
currentColumn = tiles[len(tiles)-1]["bounds"]["left"]/config["visualconfig"]["brick_size"] + tiles[len(tiles)-1]["bounds"]["width"]/config["visualconfig"]["brick_size"]+config["visualconfig"]["spacing"]
return tiles
#Create full SLO row
def createSloTileRow(index, config, wall, sloconfigs,doc):
tiles = []
sloUrls = {}
currentColumn = config["visualconfig"]["description_tile_width"]
for sloconfig in sloconfigs:
service_names = getSloSrvNames(sloconfig, doc)
sloUrls[sloconfig.hub] = sloconfig.slourl
if sloconfig.tiles_actual and wall:
tile = SingleValueTile(sloconfig.displayName,get_bounds(index*config["visualconfig"]["rowheight"],currentColumn,config["visualconfig"]["singlevalue_width_wall"],config["visualconfig"]["rowheight"]),config["metadata"]["single_value_timeframe_1"],sloconfig.remoteurl,sloconfig.metric,sloconfig.singlevalue_threshold)
tiles.append(vars(tile))
elif sloconfig.tiles_actual:
tile = SingleValueTile(sloconfig.displayName,get_bounds(index*config["visualconfig"]["rowheight"],currentColumn,config["visualconfig"]["singlevalue_width"],config["visualconfig"]["rowheight"]),config["metadata"]["single_value_timeframe_1"],sloconfig.remoteurl,sloconfig.metric,sloconfig.singlevalue_threshold)
tiles.append(vars(tile))
currentColumn = tiles[len(tiles)-1]["bounds"]["left"]/config["visualconfig"]["brick_size"] + tiles[len(tiles)-1]["bounds"]["width"]/config["visualconfig"]["brick_size"]
if sloconfig.tiles_graph:
if(sloconfig.custom_traffic):
traffic_selector = sloconfig.custom_traffic
else:
traffic_selector = getSloReqCountSelector(service_names)
if(sloconfig.custom_latency):
latency_selector = sloconfig.custom_latency
else:
latency_selector = getSloReqTimeSelector(service_names)
if wall:
tile = GraphTile(sloconfig.displayName,get_bounds(index*config["visualconfig"]["rowheight"],currentColumn,config["visualconfig"]["graph_width_wall"],config["visualconfig"]["rowheight"]),sloconfig.remoteurl,sloconfig.metric,traffic_selector,latency_selector,sloconfig.graph_threshold,sloconfig.metric,config["visualconfig"]["graph_axis_min"],config["visualconfig"]["graph_axis_max"],config)
else:
tile = GraphTile(sloconfig.displayName,get_bounds(index*config["visualconfig"]["rowheight"],currentColumn,config["visualconfig"]["graph_width"],config["visualconfig"]["rowheight"]),sloconfig.remoteurl,sloconfig.metric,traffic_selector,latency_selector,sloconfig.graph_threshold,sloconfig.metric,config["visualconfig"]["graph_axis_min"],config["visualconfig"]["graph_axis_max"],config)
tiles.append(vars(tile))
currentColumn = tiles[len(tiles)-1]["bounds"]["left"]/config["visualconfig"]["brick_size"] + tiles[len(tiles)-1]["bounds"]["width"]/config["visualconfig"]["brick_size"]
if sloconfig.tiles_ytd and wall:
tile = SingleValueTile(sloconfig.displayName,get_bounds(index*config["visualconfig"]["rowheight"],currentColumn,config["visualconfig"]["singlevalue_width_wall"],config["visualconfig"]["rowheight"]),config["metadata"]["single_value_timeframe_2"],sloconfig.remoteurl,sloconfig.metric,sloconfig.singlevalue_threshold)
tiles.append(vars(tile))
elif sloconfig.tiles_ytd:
tile = SingleValueTile(sloconfig.displayName,get_bounds(index*config["visualconfig"]["rowheight"],currentColumn,config["visualconfig"]["singlevalue_width"],config["visualconfig"]["rowheight"]),config["metadata"]["single_value_timeframe_2"],sloconfig.remoteurl,sloconfig.metric,sloconfig.singlevalue_threshold)
tiles.append(vars(tile))
currentColumn = tiles[len(tiles)-1]["bounds"]["left"]/config["visualconfig"]["brick_size"] + tiles[len(tiles)-1]["bounds"]["width"]/config["visualconfig"]["brick_size"]+config["visualconfig"]["spacing"]
tiles = [createSloDescriptionTile(index,sloconfigs[0].displayName,sloconfigs[0].department,config,sloconfigs[0].docurl,sloconfigs[0].slorelevant,sloUrls,wall)] +tiles
return tiles