300 lines
9.4 KiB
Python
300 lines
9.4 KiB
Python
from tracemalloc import start
|
|
from decouple import config
|
|
import sys
|
|
import yaml
|
|
import datetime
|
|
import time
|
|
import pandas as pd
|
|
#import requests
|
|
#import openpyxl
|
|
import argparse
|
|
import warnings
|
|
import os
|
|
import re
|
|
#import glob
|
|
|
|
import dynatraceAPI
|
|
from pagination import Pagionation
|
|
|
|
import types
|
|
import SLO
|
|
from patterns.Pattern1 import Pattern1, Pattern2, Pattern3, Pattern4
|
|
import urllib.parse
|
|
|
|
|
|
warnings.filterwarnings("ignore")
|
|
|
|
|
|
patterns=[Pattern1(), Pattern2(), Pattern3(), Pattern4()]
|
|
|
|
def get_request(url, headers):
|
|
try:
|
|
response = requests.get(url, headers=headers)
|
|
response.raise_for_status()
|
|
except requests.exceptions.HTTPError as errh:
|
|
return "An Http Error occurred:" + repr(errh)
|
|
except requests.exceptions.ConnectionError as errc:
|
|
return "An Error Connecting to the API occurred:" + repr(errc)
|
|
except requests.exceptions.Timeout as errt:
|
|
return "A Timeout Error occurred:" + repr(errt)
|
|
except requests.exceptions.RequestException as err:
|
|
return "An Unknown Error occurred" + repr(err)
|
|
|
|
return response
|
|
|
|
def getSLO(ENV, DTAPIToken, DTENV):
|
|
# DTENV = base url
|
|
# DTAPIToken = sec token
|
|
dtclient = dynatraceAPI.Dynatrace(DTENV, DTAPIToken)
|
|
my_params_report = {'pageSize': 25}
|
|
# gets all slos and filter later
|
|
api_url_report = "/api/v2/slo"
|
|
pages = dtclient.returnPageination(api_url_report, my_params_report, "slo")
|
|
#only_wanted = [x for x in pages.elements if str.lower(selector) in str.lower(x['description'])]
|
|
df = pd.DataFrame(pages.elements)
|
|
df["env"]=ENV
|
|
return df
|
|
|
|
|
|
|
|
|
|
# def init_argparse():
|
|
# parser = argparse.ArgumentParser(
|
|
# usage="%(prog)s [--fromDate] [toDate] or [preSelect]",
|
|
# description="gather SLO in daily slices for given Timeframe"
|
|
# )
|
|
# parser.add_argument(
|
|
# "-f","--fromDate",
|
|
# help = "YYYY-mm-dd e.g. 2022-01-01"
|
|
# )
|
|
# parser.add_argument(
|
|
# "-t","--toDate",
|
|
# help = "YYYY-mm-dd e.g. 2022-01-31"
|
|
# )
|
|
# parser.add_argument(
|
|
# "-p","--preSelect",
|
|
# help = "day | week | month - gathers the data for the last full day, week or month"
|
|
# )
|
|
# parser.add_argument(
|
|
# "-s","--slices",
|
|
# help = "h | d | t | y - writes the slices hourly, daily, total or year to date into ecxel. given in any order"
|
|
# )
|
|
|
|
|
|
# return parser
|
|
|
|
# def check_inputs(args):
|
|
# '''
|
|
# This functions is the single point of true for arguments. If new arguments are added they need to be added in here. Returns from and to date.
|
|
# '''
|
|
# if args.preSelect and (args.fromDate or args.toDate):
|
|
# print("--preSelect must not be used in conjuntion with --fromDate and/or --toDate")
|
|
# sys.exit()
|
|
|
|
# elif args.fromDate and not args.toDate:
|
|
# print("--fromDate only in conjunction with --toDate")
|
|
# sys.exit()
|
|
|
|
# elif args.toDate and not args.fromDate:
|
|
# print("--toDate only in conjunction with --fromDate")
|
|
# sys.exit()
|
|
|
|
# elif args.toDate and args.fromDate and not args.preSelect:
|
|
# try:
|
|
# #fromDate = datetime.date.fromisoformat(args.fromDate)
|
|
# fromDate = datetime.datetime.strptime(args.fromDate, "%Y-%m-%d")
|
|
|
|
# #toDate = datetime.date.fromisoformat(args.toDate)
|
|
# toDate = datetime.datetime.strptime(args.toDate, "%Y-%m-%d")
|
|
# except Exception as e:
|
|
# print("Progam closed: " + str(e))
|
|
# sys.exit()
|
|
|
|
# if toDate < fromDate:
|
|
# print("--toDate can't be older than --fromDate")
|
|
# sys.exit()
|
|
|
|
# if toDate > datetime.date.today() or fromDate > datetime.date.today():
|
|
# print("--toDate or --fromDate can't be in the future")
|
|
# sys.exit()
|
|
|
|
# elif args.preSelect and not args.fromDate and not args.toDate:
|
|
|
|
# date = datetime.date.today()
|
|
|
|
# if args.preSelect == "week":
|
|
# fromDate, toDate = previous_week_range(date)
|
|
# elif args.preSelect == "month":
|
|
# fromDate, toDate = previous_month_range(date)
|
|
# elif args.preSelect == "day":
|
|
# fromDate, toDate = previous_day_range(date)
|
|
# else:
|
|
# print("--preSelect must be week or month")
|
|
# sys.exit()
|
|
# else:
|
|
# print("Invalid arguments, please use --help")
|
|
# sys.exit()
|
|
# if args.slices == None:
|
|
# print("-s or --slices must not be null and needs at least one letter of h d t or y, lower- or uppercase.")
|
|
# sys.exit()
|
|
# elif sum([1 if one_inp in str.lower(args.slices) else 0 for one_inp in ['h','d','t','y'] ]) == 0:
|
|
# print("-s or --slices must has at least one letter of h d t or y, lower- or uppercase.")
|
|
# sys.exit()
|
|
# return fromDate, toDate
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def applyPatterns(subject):
|
|
groups=None
|
|
for p in patterns:
|
|
groups=p.parseServicesAndMethods(subject)
|
|
|
|
if len(groups) > 0:
|
|
break
|
|
|
|
return groups
|
|
|
|
|
|
def parseAndCreateSLOObject(row):
|
|
|
|
#normalize
|
|
normFilter=normalize(row['filter'])
|
|
normExpresseion=normalize(row['metricExpression'])
|
|
tmp_SLO=SLO.SLO(row["name"],row["env"],normExpresseion,normFilter, None)
|
|
|
|
#SLO with Filter
|
|
if normFilter.upper().startswith("TYPE(SERVICE_METHOD),"):
|
|
subject=normFilter
|
|
else:
|
|
subject=normExpresseion
|
|
|
|
groups=applyPatterns(subject)
|
|
# for p in patterns:
|
|
# #services, methods=p.parseServicesAndMethods(subject)
|
|
|
|
# groups=p.parseServicesAndMethods(subject)
|
|
[]
|
|
for g in groups:
|
|
if g["methods"] != None and len(g["methods"]) > 0:
|
|
#tmp_SLO.keyRequestGroup.append({"services":services,"methods":methods})
|
|
tmp_SLO.keyRequestGroup.append(g)
|
|
|
|
# if len(groups) > 0:
|
|
# break
|
|
|
|
|
|
return tmp_SLO
|
|
|
|
|
|
|
|
def normalize(x):
|
|
#tmp=x.replace("~","")
|
|
tmp=x.replace("\n","")
|
|
#tmp=tmp.replace("\"/","\"")
|
|
#tmp=tmp.replace("\"/","") -_>was active
|
|
#tmp=tmp.replace("/\"","\"")
|
|
tmp=tmp.replace("/\"","")
|
|
tmp=tmp.replace("\"","")
|
|
tmp=tmp.replace("\t","")
|
|
|
|
tmp=re.sub("([\s]*)\)", ")", tmp)
|
|
|
|
|
|
|
|
tmp=re.sub("\([\s\n\r]*", "(", tmp)
|
|
tmp=re.sub("\,[\s\n\r]*", ",", tmp)
|
|
tmp=re.sub("\)[\s\n\r]*,", "),", tmp)
|
|
tmp=re.sub("in[\s\n\r]*\(", "in(", tmp)
|
|
|
|
|
|
|
|
#tmp=re.sub(r'~([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)~', lambda m: "xxx", tmp,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
|
|
|
return tmp
|
|
|
|
def getParsedSLOs(ENV, DTTOKEN, DTURL):
|
|
slos=[]
|
|
slosF=getSLO(ENV, DTTOKEN, DTURL)
|
|
for index, row in slosF.iterrows():
|
|
#if row['id'] == "06292149-0f7f-34f6-b226-dfd9f680486d": #or row['id'] == "ab1bf34a-10fc-3446-9cc7-79d257498a52":
|
|
slos.append(parseAndCreateSLOObject(row))
|
|
return slos
|
|
|
|
def write_to_excel(ignored, notExists):
|
|
|
|
fileName = "./KeyRequest_Report_"+ str(datetime.date.today()) +".xlsx"
|
|
|
|
writer = pd.ExcelWriter(fileName)
|
|
notExists.to_excel(writer, sheet_name='notExists')
|
|
ignored.to_excel(writer, sheet_name='ignored')
|
|
|
|
writer.save()
|
|
writer.close()
|
|
|
|
|
|
def getStats(slos):
|
|
|
|
|
|
tmpIgnoredList=[]
|
|
for slo in [a for a in slos if len(a.keyRequests) == 0]:
|
|
tmpIgnoredList.append([slo.sloName, slo.env,"",slo.filter, slo.metricExpression])
|
|
|
|
keyReuqestsNotExists=[]
|
|
for slo in [s for s in slos if s.hasNotExistingKeyRequests()==True]:
|
|
for k in slo.getNotExistingKeyRequests():
|
|
keyReuqestsNotExists.append([slo.sloName, slo.env, k["displayName"], slo.filter, slo.metricExpression])
|
|
#keyrequestsNotExists=pd.DataFrame(flatternList([a for a in slos if len(a.keyRequests) == 0]), columns =['slo', 'env', 'keyRequest', 'filter', 'metricExpression'], dtype = float)
|
|
|
|
ignored=pd.DataFrame(tmpIgnoredList, columns =['slo', 'env', 'keyRequest', 'filter', 'metricExpression'], dtype = float)
|
|
notExists=pd.DataFrame(keyReuqestsNotExists, columns =['slo', 'env', 'keyRequest', 'filter', 'metricExpression'], dtype = float)
|
|
|
|
return ignored, notExists
|
|
|
|
def main(slo_path):
|
|
|
|
|
|
resultSlos=[]
|
|
|
|
|
|
with open('./environment.yaml') as file:
|
|
env_doc = yaml.safe_load(file)
|
|
|
|
|
|
|
|
#iterate through all environments
|
|
for env, doc in env_doc.items():
|
|
token = dict(doc[2])
|
|
url = dict(doc[1])
|
|
|
|
if(config(token.get('env-token-name')) != ""):
|
|
print("Gather data, hold on a minute")
|
|
DTTOKEN = config(token.get('env-token-name'))
|
|
DTURL = url.get('env-url')
|
|
|
|
#getParsedSLOs(DTTOKEN, DTURL)
|
|
slos=getParsedSLOs(env,DTTOKEN, DTURL)
|
|
|
|
for slo in slos:
|
|
slo.checkKeyRequetsExists(DTURL, DTTOKEN)
|
|
|
|
|
|
|
|
|
|
# count = sum(map(lambda x : len(x.keyRequests) > 0, slos))
|
|
# count_ignored=sum(map(lambda x : len(x.keyRequests) == 0, slos))
|
|
# ignored=[a for a in slos if len(a.keyRequests) == 0]
|
|
# for i in ignored:
|
|
# if i.metricExpression:
|
|
# print(i.metricExpression+" "+i.sloName+"\n\n")
|
|
# x=0
|
|
resultSlos.extend(slos)
|
|
|
|
ignoerd,notExists= getStats(resultSlos)
|
|
write_to_excel(ignoerd,notExists)
|
|
|
|
if __name__ == "__main__":
|
|
main('./slo_parameter.yaml') |