parser to own module

master
ermisw 2023-05-08 19:05:22 +02:00
parent 310ea57d7c
commit 9d54904779
7 changed files with 61 additions and 626 deletions

View File

@ -1,16 +1,17 @@
from decouple import config
import sys
# import sys
import yaml
import datetime
import time
# import time
import pandas as pd
import argparse
# import argparse
import warnings
import os
# import os
import dynatraceAPI
from pagination import Pagionation
import types
from key_request_parser import krparser
# from pagination import Pagionation
# import types
from KRParser import krparser
#from key_request_parser import krparser
warnings.filterwarnings("ignore")
@ -28,16 +29,26 @@ def getSLO(ENV, DTAPIToken, DTENV):
return df
def write_to_excel(ignored, noData1M, noData1W):
def write_to_excel(ignored, noData1M, noData1W, all_TP_slos):
fileName = "./KeyRequest_Report_"+ str(datetime.date.today()) +".xlsx"
writer = pd.ExcelWriter(fileName)
noData1M.to_excel(writer, sheet_name='noData1M')
noData1W.to_excel(writer, sheet_name='noData1W')
ignored.to_excel(writer, sheet_name='ignored')
if noData1M["slo"].count() > 0:
noData1M.to_excel(writer, sheet_name='noData1M')
if noData1W["slo"].count() > 0:
noData1W.to_excel(writer, sheet_name='noData1W')
if ignored["slo"].count() > 0:
ignored.to_excel(writer, sheet_name='ignored')
if all_TP_slos["slo"].count() > 0:
all_TP_slos.to_excel(writer, sheet_name='TP_')
writer.save()
writer._save()
writer.close()
@ -46,31 +57,49 @@ def getStats(krs):
tmpIgnoredList=[]
for kr in [a for a in krs if len(a.keyRequests) == 0]:
tmpIgnoredList.append([kr.metadata["sloName"], kr.metadata["env"],"","","",kr.metadata["filter"], kr.metadata["metricExpression"]])
tmpIgnoredList.append([kr.metadata["sloName"], kr.metadata["sloId"], kr.metadata["env"],"","","","",kr.metadata["filter"], kr.metadata["metricExpression"]])
noData1M=[]
for kr in [s for s in krs if s.hasNoData("1M")==True]:
for k in kr.getKeyRequestByHasData("1M"):
noData1M.append([kr.metadata["sloName"], kr.metadata["env"], k["displayName"], k["entityId"],k["services"][0]["displayName"] if len(k["services"]) > 0 else "", kr.metadata["filter"], kr.metadata["metricExpression"]])
noData1M.append([kr.metadata["sloName"], kr.metadata["sloId"], kr.metadata["env"], k["displayName"], k["entityId"],k["services"][0]["displayName"] if len(k["services"]) > 0 else "",
[x for x in k["services"][0]["tags"] if x['key']=="compass-id"][0]["value"] if len(k["services"]) > 0 and len(k["services"][0]["tags"]) > 0 and len([x for x in k["services"][0]["tags"] if x['key']=="compass-id"]) > 0 else "",
kr.metadata["filter"],
kr.metadata["metricExpression"]])
noData1W=[]
for kr in [s for s in krs if s.hasNoData("1W")==True]:
for k in kr.getKeyRequestByHasData("1W"):
noData1W.append([kr.metadata["sloName"], kr.metadata["env"], k["displayName"], k["entityId"], k["services"][0]["displayName"] if len(k["services"]) > 0 else "", kr.metadata["filter"], kr.metadata["metricExpression"]])
noData1W.append([kr.metadata["sloName"], kr.metadata["sloId"], kr.metadata["env"], k["displayName"], k["entityId"], k["services"][0]["displayName"] if len(k["services"]) > 0 else "",
[x for x in k["services"][0]["tags"] if x['key']=="compass-id"][0]["value"] if len(k["services"]) > 0 and len(k["services"][0]["tags"]) > 0 and len([x for x in k["services"][0]["tags"] if x['key']=="compass-id"]) > 0 else "",
kr.metadata["filter"],
kr.metadata["metricExpression"]])
# noData1W=[]
# for kr in [s for s in krs if s.hasNoData1WKeyRequests()==True]:
# for k in kr.getNoData1WKeyRequests():
# noData1W.append([kr.metadata["sloName"], kr.metadata["env"], k["displayName"], k["entityId"], kr.metadata["filter"], kr.metadata["metricExpression"]])
columns =['slo', 'env', 'displayName','entityId','service','filter', 'metricExpression']
ignored=pd.DataFrame(tmpIgnoredList, columns=columns, dtype = float)
noData_1M=pd.DataFrame(noData1M, columns = columns, dtype = float)
noData_1W=pd.DataFrame(noData1W, columns = columns, dtype = float)
columns =['slo', 'sloID', 'env', 'displayName','entityId','service','compass-id','filter', 'metricExpression']
ignored=pd.DataFrame(tmpIgnoredList, columns=columns)
noData_1M=pd.DataFrame(noData1M, columns = columns)
noData_1W=pd.DataFrame(noData1W, columns = columns)
allTPSlos=[]
for kr in [a for a in krs if len(a.keyRequests) != 0 and a.metadata["sloName"].startswith("TP_")]:
for k in kr.keyRequests:
allTPSlos.append([kr.metadata["sloName"], kr.metadata["sloId"], kr.metadata["env"], k["displayName"], k["entityId"],
k["count"]["1W"], k["count"]["1M"],
k["services"][0]["displayName"] if len(k["services"]) > 0 else "",
[x for x in k["services"][0]["tags"] if x['key']=="compass-id"][0]["value"] if len(k["services"]) > 0 and len(k["services"][0]["tags"]) > 0 and len([x for x in k["services"][0]["tags"] if x['key']=="compass-id"]) > 0 else "",
kr.metadata["filter"], kr.metadata["metricExpression"]])
columns_ext =['slo', 'sloID', 'env', 'displayName','entityId','Count_1W', 'Count_1M','service','compass-id','filter', 'metricExpression']
all_TP_slos=pd.DataFrame(allTPSlos, columns = columns_ext)
return ignored, noData_1M, noData_1W
return ignored, noData_1M, noData_1W, all_TP_slos
def main():
@ -91,17 +120,18 @@ def main():
krs=[]
#krp = krparser.KRParser(krparser.KROption.VALIDATE_EXISTS | krparser.KROption.VALIDATE_HASDATA | krparser.KROption.RESOLVESERVICES ,DTURL, DTTOKEN)
krp = krparser.KRParser(options=krparser.KROption.RESOLVEKEYREQUETS | krparser.KROption.VALIDATE_HASDATA | krparser.KROption.RESOLVESERVICES, DTAPIURL=DTURL, DTAPIToken=DTTOKEN)
krp = krparser.KRParser(name=env,options=krparser.KROption.VALIDATE_HASDATA | krparser.KROption.RESOLVESERVICES, config={"threads":10, "serviceLookupParams":{"fields":"tags"}, "extendResultObjects":{"env":env}}, DTAPIURL=DTURL, DTAPIToken=DTTOKEN)
slosF=getSLO(env, DTTOKEN, DTURL)
#slosF=slosF.loc[slosF['id'] == "a96a4031-e201-3e98-b739-27be7dc85a09"]
krs=krp.parseBySLO_Threaded(slosF)
#slosF=slosF.loc[slosF['id'] == "0f23cb75-4c94-3204-944d-cdc751ea625d"]
krs=krp.parse(slosF.head(100))
# for index, row in slosF.iterrows():
# #if row['id'] == "1ec65bfa-8d66-3215-a094-c289da440f32": #"1de2685e-0f06-370c-8b25-2326426e89c3": #or row['id'] == "ab1bf34a-10fc-3446-9cc7-79d257498a52":
# #if row['id'] == "a01cc45e-8369-3623-a156-1b261e6f4c98":
# #if str.startswith(row["name"],"TP_"):
# krs.append(krp.parseBySLO(row))
resultSlos.extend(krs)
@ -109,8 +139,8 @@ def main():
# for slo in resultSlos:
# if slo.key
ignoerd,noData1M, noData1W= getStats(resultSlos)
write_to_excel(ignoerd, noData1M, noData1W)
ignoerd, noData1M, noData1W, all_TP_slos= getStats(resultSlos)
write_to_excel(ignoerd, noData1M, noData1W, all_TP_slos)
if __name__ == "__main__":
main()

View File

@ -1,46 +0,0 @@
import requests
from requests.adapters import HTTPAdapter, Retry
def get_requestOld(url, headers, params):
#try:
response = requests.get(url, headers=headers, params=params, verify=False)
response.raise_for_status()
# except requests.exceptions.HTTPError as errh:
# return "An Http Error occurred:" + repr(errh)
# except requests.exceptions.ConnectionError as errc:
# return "An Error Connecting to the API occurred:" + repr(errc)
# except requests.exceptions.Timeout as errt:
# return "A Timeout Error occurred:" + repr(errt)
# except requests.exceptions.RequestException as err:
# return "An Unknown Error occurred" + repr(err)
return response
def get_request(url, headers, params):
#try:
session = requests.Session()
retry = Retry(connect=3, backoff_factor=10)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
#response = requests.get(url, headers=headers, params=params, verify=False)
response = session.get(url,headers=headers, params=params, verify=False)
response.raise_for_status()
# except requests.exceptions.HTTPError as errh:
# return "An Http Error occurred:" + repr(errh)
# except requests.exceptions.ConnectionError as errc:
# return "An Error Connecting to the API occurred:" + repr(errc)
# except requests.exceptions.Timeout as errt:
# return "A Timeout Error occurred:" + repr(errt)
# except requests.exceptions.RequestException as err:
# return "An Unknown Error occurred" + repr(err)
return response
def contains(list, filter):
for x in list:
if filter(x):
return True
return False

View File

@ -1,157 +0,0 @@
try:
# Python 3
from collections.abc import MutableSequence
except ImportError:
# Python 2.7
from collections import MutableSequence
class KeyRequestGroup(MutableSequence):
"""A container for manipulating lists of hosts"""
def __init__(self, data=None):
"""Initialize the class"""
super(KeyRequestGroup, self).__init__()
if (data is not None):
self._list = list(data)
else:
self._list = list()
def __repr__(self):
return "<{0} {1}>".format(self.__class__.__name__, self._list)
def __len__(self):
"""List length"""
return len(self._list)
def __getitem__(self, ii):
"""Get a list item"""
if isinstance(ii, slice):
return self.__class__(self._list[ii])
else:
return self._list[ii]
def __delitem__(self, ii):
"""Delete an item"""
del self._list[ii]
def __setitem__(self, ii, val):
# optional: self._acl_check(val)
self._list[ii] = val
def __str__(self):
return str(self._list)
def createExistsQuery(self, val):
query="type(service_method)"
val['services'] = list(map(lambda x: x.replace("~","") , val['services']))
val['methods'] = list(map(lambda x: x.replace("~","") , val['methods']))
#case Service Names exists
if len(val["services"]) > 0:
if val["services"][0].startswith("SERVICE-"):
query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityId(\""+'","'.join(val["services"])+"\"))"
else:
query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityName.in(\""+'","'.join(val["services"])+"\"))"
if len(val["methods"]) > 0:
if val["methods"][0].startswith("SERVICE_METHOD-"):
query+=",entityId(\""+'","'.join(val["methods"])+"\")"
else:
query+=",entityName.in(\""+'","'.join(val["methods"])+"\")"
val["existsQuery"]= query
# def createServiceResolveQuery(self, val):
# query="type(SERVICE)"
# val['services'] = list(map(lambda x: x.replace("~","") , val['services']))
# if len(val["services"]) > 0:
# if val["services"][0].startswith("SERVICE-"):
# query+=",entityId(\""+'","'.join(val["services"])+"\")"
# else:
# query+=",entityName.in(\""+'","'.join(val["services"])+"\")"
# val["resolveServiceQuery"]= query
def insert(self, ii, val):
self.createExistsQuery(val)
self._list.insert(ii, val)
def append(self, val):
for g in val:
if len(self._list) == 0:
#self._list.insert(ii, val)
self.insert(len(self._list), g)
return
for group in self._list:
if len(set(group["services"]) - set(g["services"])) > 0 or len(set(group["methods"]) - set(g["methods"])) > 0:
self.insert(len(self._list), g)
class KR:
# def getNotExistingKeyRequests(self):
# return [k for k in self.keyRequests if k['exists']==False]
# def hasNotExistingKeyRequests(self):
# for k in self.keyRequests:
# if k['exists']==False:
# return True
# return False
# def getNoData1WKeyRequests(self):
# return [k for k in self.keyRequests if k['hasData_1W']==False and k['exists']==True]
# def hasNoData1WKeyRequests(self):
# for k in self.keyRequests:
# if k['hasData_1W']==False and k['exists'] == True:
# return True
# return False
def getKeyRequestByHasData(self,label):
return [k for k in self.keyRequests if k['hasData'][label]==False]
def hasNoData(self,label):
for k in self.keyRequests:
if k['hasData'][label]==False:
return True
return False
def checkKeyRequestsHasData(self):
pass
def mergeServices(self, listServices):
listOfServiceIds=[o["entityId"] for o in self.services]
for s in listServices:
if s["entityId"] not in listOfServiceIds:
self.services.append(s)
def __init__(self,
metadata,
matchedGroups: KeyRequestGroup = None):
self.metadata=metadata
if matchedGroups == None:
self.matchedGroups = KeyRequestGroup()
else:
self.matchedGroups = keyRequests_groups
self.keyRequests=[]
self.services=[]

View File

@ -1,299 +0,0 @@
import re
from key_request_parser import patterns, keyrequests, helper
from enum import Flag, auto
import logging
import threading
import concurrent.futures
import time
class KROption(Flag):
VALIDATE_EXISTS = auto()
VALIDATE_HASDATA = auto()
RESOLVEKEYREQUETS = auto()
RESOLVESERVICES = auto()
class KRParser:
#threadLimiter = threading.BoundedSemaphore(3)
patterns=[patterns.Pattern1(), patterns.Pattern2(), patterns.Pattern3(), patterns.Pattern5(), patterns.Pattern4() ]
lock = threading.Lock()
def normalize(self,x):
#tmp=x.replace("~","")
tmp=x.replace("\n","")
#tmp=tmp.replace("\"/","\"")
#tmp=tmp.replace("\"/","") -_>was active
#tmp=tmp.replace("/\"","\"")
tmp=tmp.replace("/\"","")
tmp=tmp.replace("\"","")
tmp=tmp.replace("\t","")
tmp=re.sub("([\s]*)\)", ")", tmp)
tmp=re.sub("\([\s\n\r]*", "(", tmp)
tmp=re.sub("\,[\s\n\r]*", ",", tmp)
tmp=re.sub("\)[\s\n\r]*,", "),", tmp)
tmp=re.sub("in[\s\n\r]*\(", "in(", tmp)
return tmp
def applyPatterns(self,subject):
groups=None
for p in self.patterns:
groups=p.parseServicesAndMethods(subject)
if len(groups) > 0:
break
return groups
def checkKeyRequetsHasData(self,kr, tfrom, DTAPIURL, DTAPIToken):
DTAPIURL = DTAPIURL + "/api/v2/entities"
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
for gid, group in enumerate(kr.matchedGroups):
params={"entitySelector": group["existsQuery"], "from":tfrom["tfrom"], "fields": "fromRelationships"}
response = helper.get_request(DTAPIURL, headers, params)
entities = (response.json())['entities']
if len(entities) > 0:
y=0
for method in kr.keyRequests:
if method["groupId"] == gid:
found = [x for x in entities if x[method["comparer"]] == method[method["comparer"]]]
if len(found) > 0:
method["hasData"][tfrom["label"]]=True
#method["displayName"]=found[0]["displayName"]
#method["entityId"]=found[0]["entityId"]
#method["services"]=found[0]["fromRelationships"]["isServiceMethodOfService"]
# for idx,o in enumerate(method["services"]):
# tmpS=[p for p in kr.services if p["entityId"]==o["id"]]
# if len(tmpS)>0:
# method["services"][idx]=tmpS[0]
else:
method["hasData"][tfrom["label"]]=False
def resolveServices(self,services, DTAPIURL, DTAPIToken):
#DTAPIURL = DTAPIURL + "/api/v2/entities"
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
for gid, service in enumerate(services):
query="type(SERVICE),entityId("+service["id"]+")"
params={"entitySelector": query,"from":"now-2y"}
response = helper.get_request(DTAPIURL, headers, params)
entities = (response.json())['entities']
if len(entities)>0:
services[gid]=entities[0]
def resolveKeyRequests(self,kr, DTAPIURL, DTAPIToken, options):
DTAPIURL = DTAPIURL + "/api/v2/entities"
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
for gid, k in enumerate(kr.keyRequests):
try:
query="type(service_method)"
group=kr.matchedGroups[k["groupId"]]
if len(group["services"])> 0:
if group["services"][0].startswith("SERVICE-"):
query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityId(\""+'","'.join(group["services"])+"\"))"
else:
query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityName.in(\""+'","'.join(group["services"])+"\"))"
if k["comparer"]=="entityId":
query+=",entityId("+k["entityId"]+")"
else:
query+=",entityName.in(\""+k["displayName"]+"\")"
params={"entitySelector": query, "from":"now-1y","fields": "fromRelationships"}
response = helper.get_request(DTAPIURL, headers, params)
entities = (response.json())['entities']
# if len(entities) > 1:
# kr.keyRequests[gid]['foundCount']=len(entities)
# print("Multiple keyrequest found: ")
if len(entities)> 0:
kr.keyRequests[gid]["found"]=True
kr.keyRequests[gid]['foundCount']=len(entities)
kr.keyRequests[gid]["displayName"]=entities[0]["displayName"]
kr.keyRequests[gid]["entityId"]=entities[0]["entityId"]
if "isServiceMethodOfService" in entities[0]["fromRelationships"]:
kr.keyRequests[gid]["services"]=entities[0]["fromRelationships"]["isServiceMethodOfService"]
if KROption.RESOLVESERVICES in options and len( kr.keyRequests[gid]["services"])>0:
self.resolveServices(kr.keyRequests[gid]["services"], DTAPIURL, DTAPIToken)
except Exception as err:
kr.keyRequests[gid]["exception"]="resolveKeyRequests failed: "+repr(err)
#kr.mergeServices(entities)
def getKeyRequestsByServices(self, services):
#type(SERVICE_METHOD),fromRelationship.isServiceMethodOfService(type("SERVICE"),entityName.in("btc-user-composite-service - PROD"))
DTAPIURL = self.DTAPIURL + "/api/v2/entities"
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + self.DTAPIToken
}
if len(services) > 0:
if services[0].startswith("SERVICE-"):
query="type(service_method),fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityId(\""+'","'.join(services)+"\"))"
else:
query="type(service_method),fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityName.in(\""+'","'.join(services)+"\"))"
params={"entitySelector": query}
response = helper.get_request(DTAPIURL, headers, params)
entities = (response.json())['entities']
return entities
def process(self, kr):
for gid, group in enumerate(kr.matchedGroups):
if len(group["services"]) > 0 and len(group["methods"])==0:
tmp_methods=self.getKeyRequestsByServices(group["services"])
for m in tmp_methods:
tmp={"displayName": None,"comparer": "entityId", "entityId":m["entityId"], "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None,
kr.keyRequests.append(tmp)
for method in group["methods"]:
if method.startswith('SERVICE_METHOD-'):
tmp={"displayName": None,"comparer": "entityId", "entityId":method, "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None,
else:
tmp={"displayName":method,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None,
kr.keyRequests.append(tmp)
# for service in group["services"]:
# if service.startswith('SERVICE-'):
# tmp={"displayName": None,"comparer": "entityId", "entityId":service, "groupId":gid, "hasData":{}, "keyReuqests":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None,
# else:
# tmp={"displayName":service,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "keyReuqests":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None,
# kr.services.append(tmp)
if self.options:
if KROption.RESOLVEKEYREQUETS in self.options:
self.resolveKeyRequests(kr,self.DTAPIURL, self.DTAPIToken, self.options)
if KROption.VALIDATE_HASDATA in self.options:
self.checkKeyRequetsHasData(kr,{"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken)
self.checkKeyRequetsHasData(kr,{"label":"1M", "tfrom":"now-1M"},self.DTAPIURL, self.DTAPIToken)
# elif KROption.RESOLVEKEYREQUETS in self.options:
# self.checkKeyRequetsHasData(kr, {"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken)
# if KROption.RESOLVESERVICES in self.options:
# self.resolveServices(kr,self.DTAPIURL, self.DTAPIToken)
return kr
def parseBySLO(self,index,row):
#normalize
print(index)
try:
normFilter=self.normalize(row['filter'])
normExpresseion=self.normalize(row['metricExpression'])
tmp_KR = keyrequests.KR({"sloName":row["name"], "env":row["env"], "metricExpression": normExpresseion, "filter": normFilter, "matchedGroups": None})
#SLO with Filter
if normFilter.upper().startswith("TYPE(SERVICE_METHOD),") or normFilter.upper().startswith("TYPE(SERVICE),"):
subject=normFilter
else:
subject=normExpresseion
groups=self.applyPatterns(subject)
tmp_KR.matchedGroups.append(groups)
# for g in groups:
# #if g["methods"] != None and len(g["methods"]) > 0:
# tmp_KR.matchedGroups.append(g)
#self.process(tmp_KR)
kr=self.process(tmp_KR)
with self.lock:
self.krs.append(kr)
except Exception as err:
print(repr(err))
#return self.process(tmp_KR)
def parseBySLO_Threaded(self, slosF):
self.krs=[]
#i=1
# threads = list()
# for index, row in slosF.iterrows():
# logging.info("Main : create and start thread %d.", index)
# x = threading.Thread(target=self.parseBySLO, args=(row,))
# threads.append(x)
# x.start()
# #krs.append(krp.parseBySLO(row))
# for index, thread in enumerate(threads):
# logging.info("Main : before joining thread %d.", index)
# thread.join()
# logging.info("Main : thread %d done", index)
# #resultSlos.extend(krs)
with concurrent.futures.ThreadPoolExecutor(10) as executor:
for index, row in slosF.iterrows():
# if i % 25 == 0:
# time.sleep(0)
#args={index:index, }
executor.submit(self.parseBySLO, index,row)
# print(str(i)+"\n")
# i=i+1
# x = threading.Thread(target=self.parseBySLO, args=(row,))
# threads.append(x)
# x.start()
return self.krs
def __init__(self, options: KROption=None , DTAPIURL=None, DTAPIToken=None ):
self.DTAPIURL= DTAPIURL
self.DTAPIToken=DTAPIToken
self.options=options
self.krs=[]

View File

@ -1,96 +0,0 @@
import re
import urllib
class Pattern1:
def parseServicesAndMethods(self, metricExpression):
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)\)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*[\)]*", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([\"]*[^.*]*[\"]*)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\,[\s\n\r]*entityName[\s\n\r]*\.[in]*[\s\n\r]*\([\s\n\r]*([\"]*[^.*\)]*)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*(\"[^.*]*\")[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\,[\s\n\r]*entityName[\s\n\r]*\.[in]*[\s\n\r]*\([\s\n\r]*(\"[^.*]*)\)\"", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
#Endoce
metricExpression=re.sub(r'~([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)~', lambda m: str(urllib.parse.quote_plus(m.group(1))), metricExpression, flags=re.IGNORECASE|re.X|re.MULTILINE)
result = re.findall(r"type\(\"?service_method\"?\),fromRelationship\.isServiceMethodOfService\(type\(\"?service\"?\),entityName[\.]*[in]*\(([^\)]*)\)\)\,entityName[\.]*[in]*\(([^\)]*)\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([^\)]*)\)[\s\n\r]*\)[\s\n\r]*\,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*\([\s\n\r]*([^\)]*)[\s\n\r]*\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
#services=[]
#methods=[]
groups=[]
if result:
for r in result:
services=[s.strip() for s in urllib.parse.unquote_plus(r[0]).split(",")]
methods=[s.strip() for s in urllib.parse.unquote_plus(r[1]).split(",")]
groups.append({"services":services, "methods":methods})
#return services, methods
return groups
class Pattern2:
def parseServicesAndMethods(self, metricExpression):
metricExpression=re.sub(r'~([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)~', lambda m: str(urllib.parse.quote_plus(m.group(1))), metricExpression, flags=re.IGNORECASE|re.X|re.MULTILINE)
result = re.findall(r"type\(\"?service_method\"?\),fromRelationship\.isServiceMethodOfService\(type\([~]*service[~]*\),entityName[\.]*[in]*\(([^\)]*)\),tag\(([^\)]*)\)\),entityName[\.]*[in]*\(([^\)]*)\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
# services=[]
# methods=[]
groups=[]
if result:
for r in result:
services=[s.strip() for s in urllib.parse.unquote_plus(r[0]).split(",")]
methods=[s.strip() for s in urllib.parse.unquote_plus(r[2]).split(",")]
groups.append({"services":services, "methods":methods})
return groups
class Pattern3:
def parseServicesAndMethods(self, metricExpression):
metricExpression=re.sub(r'~([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)~', lambda m: str(urllib.parse.quote_plus(m.group(1))), metricExpression, flags=re.IGNORECASE|re.X|re.MULTILINE)
result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*entityId[\s\n\r]*[\s\n\r]*\([\s\n\r]*[\s\n\r]*([^\)]*)[\s\n\r]*\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
# services=[]
# methods=[]
groups=[]
if result:
for r in result:
methods=[s.strip() for s in urllib.parse.unquote_plus(r).split(",")]
groups.append({"services":[], "methods":methods})
return groups
class Pattern4:
def parseServicesAndMethods(self, metricExpression):
metricExpression=re.sub(r'~([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)~', lambda m: str(urllib.parse.quote_plus(m.group(1))), metricExpression, flags=re.IGNORECASE|re.X|re.MULTILINE)
result = re.findall(r"type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*[entityName|entityId].*[equals|in]*\(([^\)]*)\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
groups=[]
if result:
for r in result:
if not r:
#methods=[s.strip() for s in r.split(",")]
services=[s.strip() for s in urllib.parse.unquote_plus(r).split(",")]
groups.append({"services":services, "methods":[]})
return groups
class Pattern5:
def parseServicesAndMethods(self, metricExpression):
#Endoce
metricExpression=re.sub(r'~([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)~', lambda m: str(urllib.parse.quote_plus(m.group(1))), metricExpression, flags=re.IGNORECASE|re.X|re.MULTILINE)
result = re.findall(r"type\(\"?service_method\"?\),fromRelationship\.isServiceMethodOfService\(type\(\"?service\"?\),entityName[\.]*[in]*\(([^\)]*)\)\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([^\)]*)\)[\s\n\r]*\)[\s\n\r]*\,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*\([\s\n\r]*([^\)]*)[\s\n\r]*\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
#services=[]
#methods=[]
groups=[]
if result:
for r in result:
if not r:
services=[s.strip() for s in urllib.parse.unquote_plus(r).split(",")]
#methods=[s.strip() for s in urllib.parse.unquote_plus(r[1]).split(",")]
groups.append({"services":services, "methods":[]})
#return services, methods
return groups

View File

@ -1,7 +1,10 @@
python-decouple
pyyaml
pandas
requests
#requests
datetime
argparse
openpyxl
#argparse
openpyxl
#jsonmerge
#KeyRequestParser
git+https://atc.bmwgroup.net/bitbucket/scm/opapm/keyrequestparser.git