restructured

master
ermisw 2022-12-27 13:18:29 +01:00
parent 7e0581a907
commit d272ee6f08
6 changed files with 210 additions and 267 deletions

View File

@ -1,4 +1,4 @@
from tracemalloc import start
#from tracemalloc import start
from decouple import config
import sys
import yaml
@ -10,37 +10,38 @@ import pandas as pd
import argparse
import warnings
import os
import re
#import re
#import glob
import dynatraceAPI
from pagination import Pagionation
import types
import SLO
from patterns.Pattern1 import Pattern1, Pattern2, Pattern3, Pattern4
import urllib.parse
#import SLO
#from patterns.Pattern1 import Pattern1, Pattern2, Pattern3, Pattern4
#import urllib.parse
from key_request_parser import krparser
warnings.filterwarnings("ignore")
patterns=[Pattern1(), Pattern2(), Pattern3(), Pattern4()]
#patterns=[Pattern1(), Pattern2(), Pattern3(), Pattern4()]
def get_request(url, headers):
try:
response = requests.get(url, headers=headers)
response.raise_for_status()
except requests.exceptions.HTTPError as errh:
return "An Http Error occurred:" + repr(errh)
except requests.exceptions.ConnectionError as errc:
return "An Error Connecting to the API occurred:" + repr(errc)
except requests.exceptions.Timeout as errt:
return "A Timeout Error occurred:" + repr(errt)
except requests.exceptions.RequestException as err:
return "An Unknown Error occurred" + repr(err)
# def get_request(url, headers):
# try:
# response = requests.get(url, headers=headers)
# response.raise_for_status()
# except requests.exceptions.HTTPError as errh:
# return "An Http Error occurred:" + repr(errh)
# except requests.exceptions.ConnectionError as errc:
# return "An Error Connecting to the API occurred:" + repr(errc)
# except requests.exceptions.Timeout as errt:
# return "A Timeout Error occurred:" + repr(errt)
# except requests.exceptions.RequestException as err:
# return "An Unknown Error occurred" + repr(err)
return response
# return response
def getSLO(ENV, DTAPIToken, DTENV):
# DTENV = base url
@ -50,179 +51,10 @@ def getSLO(ENV, DTAPIToken, DTENV):
# gets all slos and filter later
api_url_report = "/api/v2/slo"
pages = dtclient.returnPageination(api_url_report, my_params_report, "slo")
#only_wanted = [x for x in pages.elements if str.lower(selector) in str.lower(x['description'])]
df = pd.DataFrame(pages.elements)
df["env"]=ENV
return df
# def init_argparse():
# parser = argparse.ArgumentParser(
# usage="%(prog)s [--fromDate] [toDate] or [preSelect]",
# description="gather SLO in daily slices for given Timeframe"
# )
# parser.add_argument(
# "-f","--fromDate",
# help = "YYYY-mm-dd e.g. 2022-01-01"
# )
# parser.add_argument(
# "-t","--toDate",
# help = "YYYY-mm-dd e.g. 2022-01-31"
# )
# parser.add_argument(
# "-p","--preSelect",
# help = "day | week | month - gathers the data for the last full day, week or month"
# )
# parser.add_argument(
# "-s","--slices",
# help = "h | d | t | y - writes the slices hourly, daily, total or year to date into ecxel. given in any order"
# )
# return parser
# def check_inputs(args):
# '''
# This functions is the single point of true for arguments. If new arguments are added they need to be added in here. Returns from and to date.
# '''
# if args.preSelect and (args.fromDate or args.toDate):
# print("--preSelect must not be used in conjuntion with --fromDate and/or --toDate")
# sys.exit()
# elif args.fromDate and not args.toDate:
# print("--fromDate only in conjunction with --toDate")
# sys.exit()
# elif args.toDate and not args.fromDate:
# print("--toDate only in conjunction with --fromDate")
# sys.exit()
# elif args.toDate and args.fromDate and not args.preSelect:
# try:
# #fromDate = datetime.date.fromisoformat(args.fromDate)
# fromDate = datetime.datetime.strptime(args.fromDate, "%Y-%m-%d")
# #toDate = datetime.date.fromisoformat(args.toDate)
# toDate = datetime.datetime.strptime(args.toDate, "%Y-%m-%d")
# except Exception as e:
# print("Progam closed: " + str(e))
# sys.exit()
# if toDate < fromDate:
# print("--toDate can't be older than --fromDate")
# sys.exit()
# if toDate > datetime.date.today() or fromDate > datetime.date.today():
# print("--toDate or --fromDate can't be in the future")
# sys.exit()
# elif args.preSelect and not args.fromDate and not args.toDate:
# date = datetime.date.today()
# if args.preSelect == "week":
# fromDate, toDate = previous_week_range(date)
# elif args.preSelect == "month":
# fromDate, toDate = previous_month_range(date)
# elif args.preSelect == "day":
# fromDate, toDate = previous_day_range(date)
# else:
# print("--preSelect must be week or month")
# sys.exit()
# else:
# print("Invalid arguments, please use --help")
# sys.exit()
# if args.slices == None:
# print("-s or --slices must not be null and needs at least one letter of h d t or y, lower- or uppercase.")
# sys.exit()
# elif sum([1 if one_inp in str.lower(args.slices) else 0 for one_inp in ['h','d','t','y'] ]) == 0:
# print("-s or --slices must has at least one letter of h d t or y, lower- or uppercase.")
# sys.exit()
# return fromDate, toDate
def applyPatterns(subject):
groups=None
for p in patterns:
groups=p.parseServicesAndMethods(subject)
if len(groups) > 0:
break
return groups
def parseAndCreateSLOObject(row):
#normalize
normFilter=normalize(row['filter'])
normExpresseion=normalize(row['metricExpression'])
tmp_SLO=SLO.SLO(row["name"],row["env"],normExpresseion,normFilter, None)
#SLO with Filter
if normFilter.upper().startswith("TYPE(SERVICE_METHOD),"):
subject=normFilter
else:
subject=normExpresseion
groups=applyPatterns(subject)
# for p in patterns:
# #services, methods=p.parseServicesAndMethods(subject)
# groups=p.parseServicesAndMethods(subject)
[]
for g in groups:
if g["methods"] != None and len(g["methods"]) > 0:
#tmp_SLO.keyRequestGroup.append({"services":services,"methods":methods})
tmp_SLO.keyRequestGroup.append(g)
# if len(groups) > 0:
# break
return tmp_SLO
def normalize(x):
#tmp=x.replace("~","")
tmp=x.replace("\n","")
#tmp=tmp.replace("\"/","\"")
#tmp=tmp.replace("\"/","") -_>was active
#tmp=tmp.replace("/\"","\"")
tmp=tmp.replace("/\"","")
tmp=tmp.replace("\"","")
tmp=tmp.replace("\t","")
tmp=re.sub("([\s]*)\)", ")", tmp)
tmp=re.sub("\([\s\n\r]*", "(", tmp)
tmp=re.sub("\,[\s\n\r]*", ",", tmp)
tmp=re.sub("\)[\s\n\r]*,", "),", tmp)
tmp=re.sub("in[\s\n\r]*\(", "in(", tmp)
#tmp=re.sub(r'~([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)~', lambda m: "xxx", tmp,flags=re.IGNORECASE|re.X|re.MULTILINE)
return tmp
def getParsedSLOs(ENV, DTTOKEN, DTURL):
slos=[]
slosF=getSLO(ENV, DTTOKEN, DTURL)
for index, row in slosF.iterrows():
#if row['id'] == "06292149-0f7f-34f6-b226-dfd9f680486d": #or row['id'] == "ab1bf34a-10fc-3446-9cc7-79d257498a52":
slos.append(parseAndCreateSLOObject(row))
return slos
def write_to_excel(ignored, notExists):
@ -236,35 +68,31 @@ def write_to_excel(ignored, notExists):
writer.close()
def getStats(slos):
def getStats(krs):
tmpIgnoredList=[]
for slo in [a for a in slos if len(a.keyRequests) == 0]:
tmpIgnoredList.append([slo.sloName, slo.env,"",slo.filter, slo.metricExpression])
for kr in [a for a in krs if len(a.keyRequests) == 0]:
tmpIgnoredList.append([kr.metadata["sloName"], kr.metadata["env"],"","",kr.metadata["filter"], kr.metadata["metricExpression"]])
keyReuqestsNotExists=[]
for slo in [s for s in slos if s.hasNotExistingKeyRequests()==True]:
for k in slo.getNotExistingKeyRequests():
keyReuqestsNotExists.append([slo.sloName, slo.env, k["displayName"], slo.filter, slo.metricExpression])
#keyrequestsNotExists=pd.DataFrame(flatternList([a for a in slos if len(a.keyRequests) == 0]), columns =['slo', 'env', 'keyRequest', 'filter', 'metricExpression'], dtype = float)
for kr in [s for s in krs if s.hasNotExistingKeyRequests()==True]:
for k in kr.getNotExistingKeyRequests():
keyReuqestsNotExists.append([kr.metadata["sloName"], kr.metadata["env"], k["displayName"], k["entityId"], kr.metadata["filter"], kr.metadata["metricExpression"]])
ignored=pd.DataFrame(tmpIgnoredList, columns =['slo', 'env', 'keyRequest', 'filter', 'metricExpression'], dtype = float)
notExists=pd.DataFrame(keyReuqestsNotExists, columns =['slo', 'env', 'keyRequest', 'filter', 'metricExpression'], dtype = float)
columns =['slo', 'env', 'displayName','entityId', 'filter', 'metricExpression']
ignored=pd.DataFrame(tmpIgnoredList, columns=columns, dtype = float)
notExists=pd.DataFrame(keyReuqestsNotExists, columns = columns, dtype = float)
return ignored, notExists
def main(slo_path):
resultSlos=[]
with open('./environment.yaml') as file:
env_doc = yaml.safe_load(file)
#iterate through all environments
for env, doc in env_doc.items():
token = dict(doc[2])
@ -275,23 +103,16 @@ def main(slo_path):
DTTOKEN = config(token.get('env-token-name'))
DTURL = url.get('env-url')
#getParsedSLOs(DTTOKEN, DTURL)
slos=getParsedSLOs(env,DTTOKEN, DTURL)
krs=[]
krp = krparser.KRParser(krparser.KROption.VALIDATE_EXISTS | krparser.KROption.VALIDATE_HASDATA ,DTURL, DTTOKEN)
for slo in slos:
slo.checkKeyRequetsExists(DTURL, DTTOKEN)
# count = sum(map(lambda x : len(x.keyRequests) > 0, slos))
# count_ignored=sum(map(lambda x : len(x.keyRequests) == 0, slos))
# ignored=[a for a in slos if len(a.keyRequests) == 0]
# for i in ignored:
# if i.metricExpression:
# print(i.metricExpression+" "+i.sloName+"\n\n")
# x=0
resultSlos.extend(slos)
slosF=getSLO(env, DTTOKEN, DTURL)
for index, row in slosF.iterrows():
#if row['id'] == "69c57b3f-725f-36fb-88ef-fb64b03eb0ce": #or row['id'] == "ab1bf34a-10fc-3446-9cc7-79d257498a52":
krs.append(krp.parseBySLO(row))
resultSlos.extend(krs)
ignoerd,notExists= getStats(resultSlos)
write_to_excel(ignoerd,notExists)

View File

@ -78,10 +78,9 @@ class KeyRequestGroup(MutableSequence):
if len(set(group["services"]) - set(val["services"])) > 0 or len(set(group["methods"]) - set(val["methods"])) > 0:
self.insert(len(self._list), val)
from helper import get_request,contains
class SLO:
class KR:
def getNotExistingKeyRequests(self):
return [k for k in self.keyRequests if k['exists']==False]
@ -91,59 +90,21 @@ class SLO:
if k['exists']==False:
return True
return False
def checkKeyRequetsExists(self, DTAPIURL, DTAPIToken):
DTAPIURL = DTAPIURL + "/api/v2/entities"
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
for group in self.keyRequestGroup:
params={"entitySelector": group["existsQuery"]}
response = get_request(DTAPIURL, headers, params)
entities = (response.json())['entities']
for method in group["methods"]:
comparer=None
if method.startswith('SERVICE_METHOD-'):
comparer="entityId"
else:
comparer="displayName"
found = [x for x in entities if x[comparer] == method]
if len(found) > 0:
#Keyrequest exists
tmp=found[0]
tmp["exists"]=True
else:
#Keyrequest not exists
tmp={"displayName":method,"type": "SERVICE_METHOD", "entityId":method, "exists":False}
self.keyRequests.append(tmp)
return False
def checkKeyRequestsHasData(self):
pass
def __init__(self,
sloName,
env,
metricExpression,
filter,
keyRequests_groups: KeyRequestGroup = None):
self.sloName=sloName
self.env=env
self.metricExpression=metricExpression
self.filter=filter
if keyRequests_groups == None:
self.keyRequestGroup = KeyRequestGroup()
metadata,
matchedGroups: KeyRequestGroup = None):
self.metadata=metadata
if matchedGroups == None:
self.matchedGroups = KeyRequestGroup()
else:
self.keyRequestGroup = keyRequests_groups
self.matchedGroups = keyRequests_groups
self.keyRequests=[]

View File

@ -0,0 +1,161 @@
import re
from key_request_parser import patterns, keyrequests, helper
from enum import Flag, auto
class KROption(Flag):
VALIDATE_EXISTS = auto()
VALIDATE_HASDATA = auto()
class KRParser:
patterns=[patterns.Pattern1(), patterns.Pattern2(), patterns.Pattern3(), patterns.Pattern4()]
def normalize(self,x):
#tmp=x.replace("~","")
tmp=x.replace("\n","")
#tmp=tmp.replace("\"/","\"")
#tmp=tmp.replace("\"/","") -_>was active
#tmp=tmp.replace("/\"","\"")
tmp=tmp.replace("/\"","")
tmp=tmp.replace("\"","")
tmp=tmp.replace("\t","")
tmp=re.sub("([\s]*)\)", ")", tmp)
tmp=re.sub("\([\s\n\r]*", "(", tmp)
tmp=re.sub("\,[\s\n\r]*", ",", tmp)
tmp=re.sub("\)[\s\n\r]*,", "),", tmp)
tmp=re.sub("in[\s\n\r]*\(", "in(", tmp)
return tmp
def applyPatterns(self,subject):
groups=None
for p in self.patterns:
groups=p.parseServicesAndMethods(subject)
if len(groups) > 0:
break
return groups
# def checkKeyRequetsExists(self,kr, DTAPIURL, DTAPIToken):
# DTAPIURL = DTAPIURL + "/api/v2/entities"
# headers = {
# 'Content-Type': 'application/json',
# 'Authorization': 'Api-Token ' + DTAPIToken
# }
# for group in kr.matchedGroups:
# params={"entitySelector": group["existsQuery"]}
# response = helper.get_request(DTAPIURL, headers, params)
# entities = (response.json())['entities']
# for method in group["methods"]:
# comparer=None
# if method.startswith('SERVICE_METHOD-'):
# comparer="entityId"
# else:
# comparer="displayName"
# found = [x for x in entities if x[comparer] == method]
# if len(found) > 0:
# #Keyrequest exists
# tmp=found[0]
# tmp["exists"]=True
# else:
# #Keyrequest not exists
# tmp={"displayName":method,"type": "SERVICE_METHOD", "entityId":method, "exists":False}
# kr.keyRequests.append(tmp)
# return kr
def checkKeyRequetsExists(self,kr, DTAPIURL, DTAPIToken):
DTAPIURL = DTAPIURL + "/api/v2/entities"
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
for gid, group in enumerate(kr.matchedGroups):
params={"entitySelector": group["existsQuery"]}
response = helper.get_request(DTAPIURL, headers, params)
entities = (response.json())['entities']
if len(entities) > 0:
y=0
for method in kr.keyRequests:
if method["groupId"] == gid:
found = [x for x in entities if x[method["comparer"]] == method[method["comparer"]]]
if len(found) > 0:
method["exists"]=True
method["displayName"]=found[0]["displayName"]
method["entityId"]=found[0]["entityId"]
else:
method["exists"]=False
def process(self, kr):
for gid, group in enumerate(kr.matchedGroups):
for method in group["methods"]:
if method.startswith('SERVICE_METHOD-'):
tmp={"displayName": None,"comparer": "entityId", "entityId":method, "exists":None, "groupId":gid}
else:
tmp={"displayName":method,"comparer": "displayName", "entityId":None, "exists":None, "groupId":gid}
kr.keyRequests.append(tmp)
if KROption.VALIDATE_EXISTS in self.options:
self.checkKeyRequetsExists(kr,self.DTAPIURL, self.DTAPIToken)
if KROption.VALIDATE_HASDATA in self.options:
x=0
return kr
def parseBySLO(self, row):
#normalize
normFilter=self.normalize(row['filter'])
normExpresseion=self.normalize(row['metricExpression'])
tmp_KR = keyrequests.KR({"sloName":row["name"], "env":row["env"], "metricExpression": normExpresseion, "filter": normFilter, "matchedGroups": None})
#SLO with Filter
if normFilter.upper().startswith("TYPE(SERVICE_METHOD),"):
subject=normFilter
else:
subject=normExpresseion
groups=self.applyPatterns(subject)
for g in groups:
if g["methods"] != None and len(g["methods"]) > 0:
tmp_KR.matchedGroups.append(g)
#self.process(tmp_KR)
return self.process(tmp_KR)
def __init__(self, options: KROption , DTAPIURL, DTAPIToken ):
self.DTAPIURL= DTAPIURL
self.DTAPIToken=DTAPIToken
self.options=options