a lot of fixes
parent
24e73e3317
commit
9c19e55bac
23
SLO.py
23
SLO.py
|
|
@ -44,6 +44,8 @@ class KeyRequestGroup(MutableSequence):
|
|||
|
||||
query="type(service_method)"
|
||||
|
||||
val['services'] = list(map(lambda x: x.replace("~","") , val['services']))
|
||||
val['methods'] = list(map(lambda x: x.replace("~","") , val['methods']))
|
||||
#case Service Names exists
|
||||
if len(val["services"]) > 0:
|
||||
if val["services"][0].startswith("SERVICE-"):
|
||||
|
|
@ -80,10 +82,19 @@ class KeyRequestGroup(MutableSequence):
|
|||
|
||||
from helper import get_request,contains
|
||||
class SLO:
|
||||
|
||||
def getNotExistingKeyRequests(self):
|
||||
return [k for k in self.keyRequests if k['exists']==False]
|
||||
|
||||
def hasNotExistingKeyRequests(self):
|
||||
for k in self.keyRequests:
|
||||
if k['exists']==False:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def checkKeyRequetsExists(self, DTAPIURL, DTAPIToken):
|
||||
|
||||
|
||||
|
||||
DTAPIURL = DTAPIURL + "/api/v2/entities"
|
||||
|
||||
headers = {
|
||||
|
|
@ -120,14 +131,20 @@ class SLO:
|
|||
|
||||
def __init__(self,
|
||||
sloName,
|
||||
env,
|
||||
metricExpression,
|
||||
filter,
|
||||
keyRequests_groups: KeyRequestGroup = None):
|
||||
self.sloName=sloName
|
||||
self.env=env
|
||||
self.metricExpression=metricExpression
|
||||
self.filter=filter
|
||||
|
||||
if keyRequests_groups == None:
|
||||
self.keyRequestGroup = KeyRequestGroup()
|
||||
else:
|
||||
self.keyRequestGroup = keyRequests_groups
|
||||
|
||||
self.keyRequests=[]
|
||||
self.keyRequests=[]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -18,13 +18,14 @@ from pagination import Pagionation
|
|||
|
||||
import types
|
||||
import SLO
|
||||
from patterns.Pattern1 import Pattern1, Pattern2, Pattern3
|
||||
from patterns.Pattern1 import Pattern1, Pattern2, Pattern3, Pattern4
|
||||
import urllib.parse
|
||||
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
patterns=[Pattern1(), Pattern2(), Pattern3()]
|
||||
patterns=[Pattern1(), Pattern2(), Pattern3(), Pattern4()]
|
||||
|
||||
def get_request(url, headers):
|
||||
try:
|
||||
|
|
@ -41,7 +42,7 @@ def get_request(url, headers):
|
|||
|
||||
return response
|
||||
|
||||
def getSLO(DTAPIToken, DTENV):
|
||||
def getSLO(ENV, DTAPIToken, DTENV):
|
||||
# DTENV = base url
|
||||
# DTAPIToken = sec token
|
||||
dtclient = dynatraceAPI.Dynatrace(DTENV, DTAPIToken)
|
||||
|
|
@ -51,6 +52,7 @@ def getSLO(DTAPIToken, DTENV):
|
|||
pages = dtclient.returnPageination(api_url_report, my_params_report, "slo")
|
||||
#only_wanted = [x for x in pages.elements if str.lower(selector) in str.lower(x['description'])]
|
||||
df = pd.DataFrame(pages.elements)
|
||||
df["env"]=ENV
|
||||
return df
|
||||
|
||||
|
||||
|
|
@ -143,50 +145,48 @@ def check_inputs(args):
|
|||
|
||||
|
||||
|
||||
def write_slo_to_excel(args, fromDate, hourlyall, dailyall, totalall, ytd):
|
||||
touchpoints = ['Vehicle' , 'Mobile']
|
||||
if args.preSelect == 'day':
|
||||
today = datetime.date.today()
|
||||
yesterday = today - datetime.timedelta(days = 1)
|
||||
fileName = "./QM_Report_"+ str(yesterday) +".xlsx"
|
||||
else:
|
||||
fileName = "./QM_Report_" + str(fromDate.isocalendar()[1]) + ".xlsx"
|
||||
|
||||
writer = pd.ExcelWriter(fileName)
|
||||
|
||||
if not totalall.empty and 't' in str.lower(args.slices):
|
||||
totalall = totalall[totalall['Touchpoint'].isin(touchpoints)]
|
||||
totalall.to_excel(writer, sheet_name='total')
|
||||
|
||||
if not dailyall.empty and 'd' in str.lower(args.slices):
|
||||
dailyall = dailyall[dailyall['Touchpoint'].isin(touchpoints)]
|
||||
dailyall.to_excel(writer, sheet_name='daily')
|
||||
def applyPatterns(subject):
|
||||
groups=None
|
||||
for p in patterns:
|
||||
groups=p.parseServicesAndMethods(subject)
|
||||
|
||||
if len(groups) > 0:
|
||||
break
|
||||
|
||||
return groups
|
||||
|
||||
|
||||
|
||||
if not hourlyall.empty and 'h' in str.lower(args.slices):
|
||||
hourlyall = hourlyall[hourlyall['Touchpoint'].isin(touchpoints)]
|
||||
hourlyall.to_excel(writer, sheet_name='hourly')
|
||||
|
||||
if not ytd.empty and 'y' in str.lower(args.slices):
|
||||
ytd = ytd[ytd['Touchpoint'].isin(touchpoints)]
|
||||
ytd.to_excel(writer, sheet_name='YTD')
|
||||
|
||||
writer.save()
|
||||
writer.close()
|
||||
|
||||
|
||||
|
||||
def parseAndCreateSLOObject(row):
|
||||
|
||||
#normalize
|
||||
normFilter=normalize(row['filter'])
|
||||
normExpresseion=normalize(row['metricExpression'])
|
||||
tmp_SLO=SLO.SLO(row["name"],row["env"],normExpresseion,normFilter, None)
|
||||
|
||||
normalizedMetric=normalize(row['metricExpression'])
|
||||
tmp_SLO=SLO.SLO(row["name"], normalizedMetric, None)
|
||||
|
||||
for p in patterns:
|
||||
services, methods=p.parseServicesAndMethods(normalizedMetric)
|
||||
|
||||
if methods != None and len(methods) > 0:
|
||||
tmp_SLO.keyRequestGroup.append({"services":services,"methods":methods})
|
||||
break
|
||||
#SLO with Filter
|
||||
if normFilter.upper().startswith("TYPE(SERVICE_METHOD),"):
|
||||
subject=normFilter
|
||||
else:
|
||||
subject=normExpresseion
|
||||
|
||||
groups=applyPatterns(subject)
|
||||
# for p in patterns:
|
||||
# #services, methods=p.parseServicesAndMethods(subject)
|
||||
|
||||
# groups=p.parseServicesAndMethods(subject)
|
||||
[]
|
||||
for g in groups:
|
||||
if g["methods"] != None and len(g["methods"]) > 0:
|
||||
#tmp_SLO.keyRequestGroup.append({"services":services,"methods":methods})
|
||||
tmp_SLO.keyRequestGroup.append(g)
|
||||
|
||||
# if len(groups) > 0:
|
||||
# break
|
||||
|
||||
|
||||
return tmp_SLO
|
||||
|
|
@ -194,25 +194,83 @@ def parseAndCreateSLOObject(row):
|
|||
|
||||
|
||||
def normalize(x):
|
||||
tmp=x.replace("~","")
|
||||
tmp=tmp.replace("\n","")
|
||||
#tmp=x.replace("~","")
|
||||
tmp=x.replace("\n","")
|
||||
#tmp=tmp.replace("\"/","\"")
|
||||
tmp=tmp.replace("\"/","")
|
||||
#tmp=tmp.replace("\"/","") -_>was active
|
||||
#tmp=tmp.replace("/\"","\"")
|
||||
tmp=tmp.replace("/\"","")
|
||||
tmp=tmp.replace("\"","")
|
||||
tmp=tmp.replace("\t","")
|
||||
|
||||
tmp=re.sub("([\s]*)\)", ")", tmp)
|
||||
|
||||
|
||||
|
||||
tmp=re.sub("\([\s\n\r]*", "(", tmp)
|
||||
tmp=re.sub("\,[\s\n\r]*", ",", tmp)
|
||||
tmp=re.sub("\)[\s\n\r]*,", "),", tmp)
|
||||
tmp=re.sub("in[\s\n\r]*\(", "in(", tmp)
|
||||
|
||||
|
||||
|
||||
#tmp=re.sub(r'~([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)~', lambda m: "xxx", tmp,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
|
||||
return tmp
|
||||
|
||||
def getParsedSLOs(ENV, DTTOKEN, DTURL):
|
||||
slos=[]
|
||||
slosF=getSLO(ENV, DTTOKEN, DTURL)
|
||||
for index, row in slosF.iterrows():
|
||||
#if row['id'] == "06292149-0f7f-34f6-b226-dfd9f680486d": #or row['id'] == "ab1bf34a-10fc-3446-9cc7-79d257498a52":
|
||||
slos.append(parseAndCreateSLOObject(row))
|
||||
#print("filter:"+row["filter"])
|
||||
|
||||
return slos
|
||||
|
||||
def write_to_excel(ignored, notExists):
|
||||
|
||||
fileName = "./KeyRequest_Report_"+ str(datetime.date.today()) +".xlsx"
|
||||
|
||||
writer = pd.ExcelWriter(fileName)
|
||||
notExists.to_excel(writer, sheet_name='notExists')
|
||||
ignored.to_excel(writer, sheet_name='ignored')
|
||||
|
||||
writer.save()
|
||||
writer.close()
|
||||
|
||||
|
||||
def getStats(slos):
|
||||
|
||||
|
||||
tmpIgnoredList=[]
|
||||
for slo in [a for a in slos if len(a.keyRequests) == 0]:
|
||||
tmpIgnoredList.append([slo.sloName, slo.env,"",slo.filter, slo.metricExpression])
|
||||
|
||||
keyReuqestsNotExists=[]
|
||||
for slo in [s for s in slos if s.hasNotExistingKeyRequests()==True]:
|
||||
for k in slo.getNotExistingKeyRequests():
|
||||
keyReuqestsNotExists.append([slo.sloName, slo.env, k["displayName"], slo.filter, slo.metricExpression])
|
||||
#keyrequestsNotExists=pd.DataFrame(flatternList([a for a in slos if len(a.keyRequests) == 0]), columns =['slo', 'env', 'keyRequest', 'filter', 'metricExpression'], dtype = float)
|
||||
|
||||
ignored=pd.DataFrame(tmpIgnoredList, columns =['slo', 'env', 'keyRequest', 'filter', 'metricExpression'], dtype = float)
|
||||
notExists=pd.DataFrame(keyReuqestsNotExists, columns =['slo', 'env', 'keyRequest', 'filter', 'metricExpression'], dtype = float)
|
||||
|
||||
return ignored, notExists
|
||||
|
||||
def main(slo_path):
|
||||
|
||||
|
||||
resultSlos=[]
|
||||
|
||||
|
||||
with open('./environment.yaml') as file:
|
||||
env_doc = yaml.safe_load(file)
|
||||
|
||||
slos=[]
|
||||
|
||||
|
||||
#iterate through all environments
|
||||
for item, doc in env_doc.items():
|
||||
for env, doc in env_doc.items():
|
||||
token = dict(doc[2])
|
||||
url = dict(doc[1])
|
||||
|
||||
|
|
@ -221,17 +279,26 @@ def main(slo_path):
|
|||
DTTOKEN = config(token.get('env-token-name'))
|
||||
DTURL = url.get('env-url')
|
||||
|
||||
slosF=getSLO(DTTOKEN, DTURL)
|
||||
for index, row in slosF.iterrows():
|
||||
#if row['id'] == "75165058-75c6-385e-a78e-b6ea3457f87d":
|
||||
slos.append(parseAndCreateSLOObject(row))
|
||||
print("huhu")
|
||||
#getParsedSLOs(DTTOKEN, DTURL)
|
||||
slos=getParsedSLOs(env,DTTOKEN, DTURL)
|
||||
|
||||
for slo in slos:
|
||||
slo.checkKeyRequetsExists(DTURL, DTTOKEN)
|
||||
|
||||
x=0
|
||||
|
||||
|
||||
|
||||
# count = sum(map(lambda x : len(x.keyRequests) > 0, slos))
|
||||
# count_ignored=sum(map(lambda x : len(x.keyRequests) == 0, slos))
|
||||
# ignored=[a for a in slos if len(a.keyRequests) == 0]
|
||||
# for i in ignored:
|
||||
# if i.metricExpression:
|
||||
# print(i.metricExpression+" "+i.sloName+"\n\n")
|
||||
# x=0
|
||||
resultSlos.extend(slos)
|
||||
|
||||
ignoerd,notExists= getStats(resultSlos)
|
||||
write_to_excel(ignoerd,notExists)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main('./slo_parameter.yaml')
|
||||
|
|
@ -14,18 +14,18 @@ euprod:
|
|||
- env-url: "https://onb44935.live.dynatrace.com"
|
||||
- env-token-name: "NAPREPROD_TOKEN_VAR"
|
||||
- jenkins: "https://jaws.bmwgroup.net/opapm/"
|
||||
naprod:
|
||||
- name: "naprod"
|
||||
- env-url: "https://wgv50241.live.dynatrace.com"
|
||||
- env-token-name: "NAPROD_TOKEN_VAR"
|
||||
- jenkins: "https://jaws.bmwgroup.net/opapm/"
|
||||
cnprod:
|
||||
- name: "cnprod"
|
||||
- env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
|
||||
- env-token-name: "CNPROD_TOKEN_VAR"
|
||||
- jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
|
||||
#cnpreprod:
|
||||
- name: "cnpreprod"
|
||||
- env-url: "https://dynatracemgd-tsp.bmwgroup.net/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35"
|
||||
- env-token-name: "CNPREPROD_TOKEN_VAR"
|
||||
- jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
|
||||
# naprod:
|
||||
# - name: "naprod"
|
||||
# - env-url: "https://wgv50241.live.dynatrace.com"
|
||||
# - env-token-name: "NAPROD_TOKEN_VAR"
|
||||
# - jenkins: "https://jaws.bmwgroup.net/opapm/"
|
||||
# cnprod:
|
||||
# - name: "cnprod"
|
||||
# - env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
|
||||
# - env-token-name: "CNPROD_TOKEN_VAR"
|
||||
# - jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
|
||||
# #cnpreprod:
|
||||
# - name: "cnpreprod"
|
||||
# - env-url: "https://dynatracemgd-tsp.bmwgroup.net/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35"
|
||||
# - env-token-name: "CNPREPROD_TOKEN_VAR"
|
||||
# - jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
|
||||
|
|
|
|||
|
|
@ -1,30 +1,46 @@
|
|||
import re
|
||||
import urllib
|
||||
|
||||
class Pattern1:
|
||||
|
||||
|
||||
def parseServicesAndMethods(self, metricExpression):
|
||||
result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*\.[\s\n\r]*in[\s\n\r]*\([\s\n\r]*([^\)]*)\)[\s\n\r]*\)[\s\n\r]*\,[\s\n\r]*entityName[\s\n\r]*\.[\s\n\r]*in\([\s\n\r]*([^\)]*)[\s\n\r]*\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
services=[]
|
||||
methods=[]
|
||||
|
||||
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)\)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*[\)]*", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([\"]*[^.*]*[\"]*)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\,[\s\n\r]*entityName[\s\n\r]*\.[in]*[\s\n\r]*\([\s\n\r]*([\"]*[^.*\)]*)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
|
||||
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*(\"[^.*]*\")[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\,[\s\n\r]*entityName[\s\n\r]*\.[in]*[\s\n\r]*\([\s\n\r]*(\"[^.*]*)\)\"", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
|
||||
#Endoce
|
||||
metricExpression=re.sub(r'~([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)~', lambda m: str(urllib.parse.quote_plus(m.group(1))), metricExpression, flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
result = re.findall(r"type\(\"?service_method\"?\),fromRelationship\.isServiceMethodOfService\(type\(\"?service\"?\),entityName[\.]*[in]*\(([^\)]*)\)\)\,entityName[\.]*[in]*\(([^\)]*)\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([^\)]*)\)[\s\n\r]*\)[\s\n\r]*\,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*\([\s\n\r]*([^\)]*)[\s\n\r]*\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
#services=[]
|
||||
#methods=[]
|
||||
groups=[]
|
||||
if result:
|
||||
for r in result:
|
||||
services=[s.strip() for s in r[0].split(",")]
|
||||
methods=[s.strip() for s in r[1].split(",")]
|
||||
|
||||
return services, methods
|
||||
services=[s.strip() for s in urllib.parse.unquote_plus(r[0]).split(",")]
|
||||
methods=[s.strip() for s in urllib.parse.unquote_plus(r[1]).split(",")]
|
||||
groups.append({"services":services, "methods":methods})
|
||||
#return services, methods
|
||||
return groups
|
||||
|
||||
class Pattern2:
|
||||
|
||||
def parseServicesAndMethods(self, metricExpression):
|
||||
result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*\.[\s\n\r]*in[\s\n\r]*\([\s\n\r]*([^\)]*)\),[\s\n\r]*tag\([^\)]*[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\,[\s\n\r]*entityName[\s\n\r]*\.[\s\n\r]*in\([\s\n\r]*([^\)]*)[\s\n\r]*\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
services=[]
|
||||
methods=[]
|
||||
metricExpression=re.sub(r'~([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)~', lambda m: str(urllib.parse.quote_plus(m.group(1))), metricExpression, flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
result = re.findall(r"type\(\"?service_method\"?\),fromRelationship\.isServiceMethodOfService\(type\([~]*service[~]*\),entityName[\.]*[in]*\(([^\)]*)\),tag\(([^\)]*)\)\),entityName[\.]*[in]*\(([^\)]*)\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
# services=[]
|
||||
# methods=[]
|
||||
groups=[]
|
||||
if result:
|
||||
for r in result:
|
||||
services=[s.strip() for s in r[0].split(",")]
|
||||
methods=[s.strip() for s in r[1].split(",")]
|
||||
|
||||
return services, methods
|
||||
services=[s.strip() for s in urllib.parse.unquote_plus(r[0]).split(",")]
|
||||
methods=[s.strip() for s in urllib.parse.unquote_plus(r[2]).split(",")]
|
||||
groups.append({"services":services, "methods":methods})
|
||||
|
||||
return groups
|
||||
|
||||
|
||||
|
||||
|
|
@ -32,10 +48,41 @@ class Pattern3:
|
|||
|
||||
def parseServicesAndMethods(self, metricExpression):
|
||||
result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*entityId[\s\n\r]*[\s\n\r]*\([\s\n\r]*[\s\n\r]*([^\)]*)[\s\n\r]*\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
services=[]
|
||||
# services=[]
|
||||
# methods=[]
|
||||
groups=[]
|
||||
if result:
|
||||
for r in result:
|
||||
methods=[s.strip() for s in r.split(",")]
|
||||
groups.append({"services":[], "methods":methods})
|
||||
|
||||
return groups
|
||||
|
||||
|
||||
|
||||
class Pattern4:
|
||||
|
||||
def parseServicesAndMethods(self, metricExpression):
|
||||
result = re.findall(r"service_method,([^\)]*)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
|
||||
groups=[]
|
||||
methods=[]
|
||||
if result:
|
||||
for r in result:
|
||||
methods=[s.strip() for s in r.split(",")]
|
||||
methods.append(r)
|
||||
|
||||
groups.append({"services":[], "methods":methods})
|
||||
|
||||
return services, methods
|
||||
return groups
|
||||
|
||||
# class FilterMethodPattern:
|
||||
|
||||
# def parseServicesAndMethods(self, metricExpression):
|
||||
# result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*entityId[\s\n\r]*[\s\n\r]*\([\s\n\r]*[\s\n\r]*([^\)]*)[\s\n\r]*\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
# services=[]
|
||||
# methods=[]
|
||||
# if result:
|
||||
# for r in result:
|
||||
# methods=[s.strip() for s in r.split(",")]
|
||||
|
||||
# return services, methods
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
import unittest
|
||||
#from patterns.Pattern1 import Pattern1, Pattern2, Pattern3
|
||||
from createKeyRequestReport import applyPatterns
|
||||
|
||||
|
||||
class PatternTestCases(unittest.TestCase):
|
||||
|
||||
# def setUp(self):
|
||||
# self.patterns=[Pattern1(), Pattern2(), Pattern3()]
|
||||
|
||||
def test_pattern1(self):
|
||||
# p=Pattern1()
|
||||
# for p in patterns:
|
||||
# groups=p.parseServicesAndMethods('100-(builtin:service.keyRequest.errors.server.rate:filter(and(or(in(dt.entity.service_method,entitySelector(type(service_method),fromRelationship.isServiceMethodOfService(type(SERVICE),entityName(api.bmwgroup.com - PROD)),entityName(remoteservices)))))):splitBy():sum:auto:sort(value(sum,descending)))')
|
||||
|
||||
result=applyPatterns('(100)*((builtin:service.keyRequest.errors.server.successCount:filter(and(or(in(dt.entity.service_method,entitySelector(type(service_method),fromRelationship.isServiceMethodOfService(type(~SERVICE~),entityName.in(~RemoteSoftwareUpgrade.WebApp - PROD~)),entityName.in(~POST /v2/vehicle/rsustatus/finishInstallation~,~POST /v2/vehicle/rsustatus/preparationCompleted~,~POST /v2/vehicle/rsustatus/updateAvailable~)))))):splitBy())/(builtin:service.keyRequest.count.server:filter(and(or(in(dt.entity.service_method,entitySelector(type(service_method),fromRelationship.isServiceMethodOfService(type(~SERVICE~),entityName.in(~RemoteSoftwareUpgrade.WebApp - PROD~)),entityName.in(~POST /v2/vehicle/rsustatus/finishInstallation~,~POST /v2/vehicle/rsustatus/preparationCompleted~,~POST /v2/vehicle/rsustatus/updateAvailable~)))))):splitBy()))')
|
||||
|
||||
expected=[{'services': ['RemoteSoftwareUpgrade.WebApp - PROD'], 'methods': ['POST /v2/vehicle/rsustatus/finishInstallation', 'POST /v2/vehicle/rsustatus/preparationCompleted', 'POST /v2/vehicle/rsustatus/updateAvailable']}, {'services': ['RemoteSoftwareUpgrade.WebApp - PROD'], 'methods': ['POST /v2/vehicle/rsustatus/finishInstallation', 'POST /v2/vehicle/rsustatus/preparationCompleted', 'POST /v2/vehicle/rsustatus/updateAvailable']}]
|
||||
|
||||
self.assertEqual(result.__str__(), expected.__str__())
|
||||
|
||||
def test_pattern2(self):
|
||||
|
||||
result=applyPatterns('(100)*((builtin:service.keyRequest.errors.server.successCount:filter(and(or(in(dt.entity.service_method,entitySelector(type(service_method),fromRelationship.isServiceMethodOfService(type(~SERVICE~),entityName.in(~Perseus (/service)- cub-personalization - prod~)),entityName.in(~GET /service/api/vehicle/v1/gcid/[UUID]/vin/<VIN>/pds~,~PUT /service/api/vehicle/v1/gcid/[UUID]/vin/<VIN>/pds~,~createAssociationForGcidAndVin~)))))):splitBy())/(builtin:service.keyRequest.count.server:filter(and(or(in(dt.entity.service_method,entitySelector(type(service_method),fromRelationship.isServiceMethodOfService(type(~SERVICE~),entityName.in(~Perseus (/service)- cub-personalization - prod~)),entityName.in(~GET /service/api/vehicle/v1/gcid/[UUID]/vin/<VIN>/pds~,~PUT /service/api/vehicle/v1/gcid/[UUID]/vin/<VIN>/pds~,~createAssociationForGcidAndVin~)))))):splitBy()))')
|
||||
expected=[{'services': ['Perseus (/service)- cub-personalization - prod'], 'methods': ['GET /service/api/vehicle/v1/gcid/[UUID]/vin/<VIN>/pds', 'PUT /service/api/vehicle/v1/gcid/[UUID]/vin/<VIN>/pds', 'createAssociationForGcidAndVin']}, {'services': ['Perseus (/service)- cub-personalization - prod'], 'methods': ['GET /service/api/vehicle/v1/gcid/[UUID]/vin/<VIN>/pds', 'PUT /service/api/vehicle/v1/gcid/[UUID]/vin/<VIN>/pds', 'createAssociationForGcidAndVin']}]
|
||||
|
||||
self.assertEqual(result.__str__(), expected.__str__())
|
||||
|
||||
def test_pattern3(self):
|
||||
result=applyPatterns('(100)* ((1)-(builtin:service.keyRequest.errors.fivexx.count:filter(and(or(in(dt.entity.service_method,entitySelector(type(service_method),fromRelationship.isServiceMethodOfService(type(~SERVICE~),entityName.in(~DownloadApi - ccis - prod~),tag(~Environment:PROD~)),entityName.in(~download~)))))):splitBy())/(builtin:service.keyRequest.count.server:filter(and(or(in(dt.entity.service_method,entitySelector(type(service_method),fromRelationship.isServiceMethodOfService(type(~SERVICE~),entityName.in(~DownloadApi - ccis - prod~),tag(~Environment:PROD~)),entityName.in(~download~)))))):splitBy()))')
|
||||
expected=[{'services': ['DownloadApi - ccis - prod'], 'methods': ['download']}, {'services': ['DownloadApi - ccis - prod'], 'methods': ['download']}]
|
||||
|
||||
self.assertEqual(result.__str__(), expected.__str__())
|
||||
|
||||
def test_pattern4(self):
|
||||
result=applyPatterns('((builtin:service.keyRequest.errors.server.successCount)/(builtin:service.keyRequest.count.server)*(100)):filter(or(eq(dt.entity.service_method,SERVICE_METHOD-02E88144CFC57AEA),eq(dt.entity.service_method,SERVICE_METHOD-2C51346F87339CE8),eq(dt.entity.service_method,SERVICE_METHOD-FE4E553F84167C2E),eq(dt.entity.service_method,SERVICE_METHOD-8A384C81D9AF38CA),eq(dt.entity.service_method,SERVICE_METHOD-3FAF84E444877083),eq(dt.entity.service_method,SERVICE_METHOD-220B20CF01912501),eq(dt.entity.service_method,SERVICE_METHOD-76B0223CDED11B41),eq(dt.entity.service_method,SERVICE_METHOD-ED80E83F71F9E8B7),eq(dt.entity.service_method,SERVICE_METHOD-C03CAD42FED5019E),eq(dt.entity.service_method,SERVICE_METHOD-82354FEF673A217F))):splitBy():avg')
|
||||
expected=[{'services': [], 'methods': ['SERVICE_METHOD-02E88144CFC57AEA', 'SERVICE_METHOD-2C51346F87339CE8', 'SERVICE_METHOD-FE4E553F84167C2E', 'SERVICE_METHOD-8A384C81D9AF38CA', 'SERVICE_METHOD-3FAF84E444877083', 'SERVICE_METHOD-220B20CF01912501', 'SERVICE_METHOD-76B0223CDED11B41', 'SERVICE_METHOD-ED80E83F71F9E8B7', 'SERVICE_METHOD-C03CAD42FED5019E', 'SERVICE_METHOD-82354FEF673A217F']}]
|
||||
|
||||
self.assertEqual(result.__str__(), expected.__str__())
|
||||
|
||||
|
||||
|
||||
|
||||
#'(100)*((1)-(builtin:service.keyRequest.errors.fivexx.count:filter(and(or(in(dt.entity.service_method,entitySelector(type(service_method), fromRelationship.isServiceMethodOfService( type(SERVICE),entityName.in( server (/cdp/release/vehicle/appstore) - vcis-information - prod ), tag(Environment:PROD) ) ,entityName.in(cdp/release/vehicle/appstore/servlet/manage, cdp/release/vehicle/appstore/servlet/start, cdp/release/vehicle/appstore/servlet/linkstoreimage, cdp/release/vehicle/appstore/servlet/ajaxReload )))))):splitBy())/(builtin:service.keyRequest.count.server:filter(and(or(in(dt.entity.service_method,entitySelector(type(service_method), fromRelationship.isServiceMethodOfService( type(SERVICE),entityName.in(server (/cdp/release/vehicle/appstore) - vcis-information - prod ), tag(Environment:PROD) ) ,entityName.in(cdp/release/vehicle/appstore/servlet/manage, cdp/release/vehicle/appstore/servlet/start, cdp/release/vehicle/appstore/servlet/linkstoreimage, cdp/release/vehicle/appstore/servlet/ajaxReload )))))):splitBy()))'
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
import unittest
|
||||
import yaml
|
||||
from decouple import config
|
||||
from patterns.Pattern1 import Pattern1, Pattern2, Pattern3
|
||||
from createKeyRequestReport import getParsedSLOs
|
||||
|
||||
class EvaluatingSLOTestCases(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(self):
|
||||
with open('./environment.yaml') as file:
|
||||
self.env_doc = yaml.safe_load(file)
|
||||
|
||||
token = self.env_doc['euprod'][2]
|
||||
url = self.env_doc['euprod'][1]
|
||||
|
||||
if(config(token.get('env-token-name')) != ""):
|
||||
print("Gather data, hold on a minute")
|
||||
DTTOKEN = config(token.get('env-token-name'))
|
||||
DTURL = url.get('env-url')
|
||||
|
||||
self.parsedSLOs=getParsedSLOs("Test",DTTOKEN, DTURL)
|
||||
|
||||
|
||||
def test_shouldNotBeIgnored(self):
|
||||
|
||||
#getParsedSLOs(DTTOKEN, DTURL)
|
||||
#slos=getParsedSLOs(DTTOKEN, DTURL)
|
||||
|
||||
ignored=[a for a in self.parsedSLOs if len(a.keyRequestGroup) == 0]
|
||||
|
||||
#check if service_method" is in spressenion
|
||||
val1=[i for i in ignored if "service_method" in i.metricExpression.lower() or "service_method" in i.filter.lower() ]
|
||||
|
||||
#print("asd")
|
||||
|
||||
self.assertEqual(0, len(val1))
|
||||
|
||||
# def test_checkParsedMethods(self):
|
||||
|
||||
# #getParsedSLOs(DTTOKEN, DTURL)
|
||||
# #slos=getParsedSLOs(DTTOKEN, DTURL)
|
||||
|
||||
# not_ignored=[a for a in self.parsedSLOs if len(a.keyRequestGroup) > 0]
|
||||
|
||||
# #check if service_method" is in spressenion
|
||||
# val1=[i for i in not_ignored if "service_method" in i.metricExpression.lower() ]
|
||||
|
||||
# print("asd")
|
||||
|
||||
# self.assertEqual(0, len(val1))
|
||||
Loading…
Reference in New Issue