added request count & some fixes

master
ermisw 2023-05-08 19:06:50 +02:00
parent dec4e7f136
commit 031fc0aa26
13 changed files with 415 additions and 224 deletions

138
.gitignore vendored Normal file
View File

@ -0,0 +1,138 @@
.vscode
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
### Terraform stuff
**/.terraform/*
crash.log
*.tfvars
#excel reports
*.xlsx

View File

@ -58,20 +58,24 @@ class KeyRequestGroup(MutableSequence):
query+=",entityId(\""+'","'.join(val["methods"])+"\")" query+=",entityId(\""+'","'.join(val["methods"])+"\")"
else: else:
query+=",entityName.in(\""+'","'.join(val["methods"])+"\")" query+=",entityName.in(\""+'","'.join(val["methods"])+"\")"
# query="builtin:service.keyRequest.count.total:filter(in(\"dt.entity.service_method\",entitySelector(\"type(service_method)"
# val['services'] = list(map(lambda x: x.replace("~","") , val['services']))
# val['methods'] = list(map(lambda x: x.replace("~","") , val['methods']))
# if len(val["services"]) > 0:
# if val["services"][0].startswith("SERVICE-"):
# query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityId(\""+'","'.join(val["services"])+"\"))"
# else:
# query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityName.in(\""+'","'.join(val["services"])+"\"))"
# if len(val["methods"]) > 0:
# if val["methods"][0].startswith("SERVICE_METHOD-"):
# query+=",entityId(\""+'","'.join(val["methods"])+"\")"
# else:
# query+=",entityName.in(\""+'","'.join(val["methods"])+"\")"
# query+="\")))"
val["existsQuery"]= query val["existsQuery"]= query
# def createServiceResolveQuery(self, val):
# query="type(SERVICE)"
# val['services'] = list(map(lambda x: x.replace("~","") , val['services']))
# if len(val["services"]) > 0:
# if val["services"][0].startswith("SERVICE-"):
# query+=",entityId(\""+'","'.join(val["services"])+"\")"
# else:
# query+=",entityName.in(\""+'","'.join(val["services"])+"\")"
# val["resolveServiceQuery"]= query
def insert(self, ii, val): def insert(self, ii, val):

View File

@ -1,14 +1,12 @@
import re import re
from KRParser import patterns, keyrequests, helper from KRParser import patterns, keyrequests, helper
from enum import Flag, auto from enum import Flag, auto
import logging
import threading import threading
import concurrent.futures import concurrent.futures
import time
from jsonmerge import merge from jsonmerge import merge
import pandas as pd
from tqdm import *
@ -19,24 +17,15 @@ class KROption(Flag):
RESOLVESERVICES = auto() RESOLVESERVICES = auto()
class KRParser: class KRParser:
#threadLimiter = threading.BoundedSemaphore(3)
patterns=[patterns.Pattern1(), patterns.Pattern2(), patterns.Pattern3(), patterns.Pattern5(), patterns.Pattern4() ] patterns=[patterns.Pattern1(), patterns.Pattern2(), patterns.Pattern3(), patterns.Pattern5(), patterns.Pattern4() ]
lock = threading.Lock() lock = threading.Lock()
def normalize(self,x): def normalize(self,x):
#tmp=x.replace("~","")
tmp=x.replace("\n","") tmp=x.replace("\n","")
#tmp=tmp.replace("\"/","\"")
#tmp=tmp.replace("\"/","") -_>was active
#tmp=tmp.replace("/\"","\"")
tmp=tmp.replace("/\"","") tmp=tmp.replace("/\"","")
tmp=tmp.replace("\"","") tmp=tmp.replace("\"","")
tmp=tmp.replace("\t","") tmp=tmp.replace("\t","")
tmp=re.sub("([\s]*)\)", ")", tmp) tmp=re.sub("([\s]*)\)", ")", tmp)
tmp=re.sub("\([\s\n\r]*", "(", tmp) tmp=re.sub("\([\s\n\r]*", "(", tmp)
tmp=re.sub("\,[\s\n\r]*", ",", tmp) tmp=re.sub("\,[\s\n\r]*", ",", tmp)
tmp=re.sub("\)[\s\n\r]*,", "),", tmp) tmp=re.sub("\)[\s\n\r]*,", "),", tmp)
@ -58,7 +47,30 @@ class KRParser:
def checkKeyRequetsHasData(self,kr, tfrom, DTAPIURL, DTAPIToken): def checkKeyRequetsHasData(self,kr, tfrom, DTAPIURL, DTAPIToken):
DTAPIURL = DTAPIURL + "/api/v2/entities" # DTAPIURL = DTAPIURL + "/api/v2/entities"
# headers = {
# 'Content-Type': 'application/json',
# 'Authorization': 'Api-Token ' + DTAPIToken
# }
# for gid, group in enumerate(kr.matchedGroups):
# params={"entitySelector": group["existsQuery"], "from":tfrom["tfrom"], "fields": "fromRelationships"}
# response = helper.get_request(DTAPIURL, headers, params)
# entities = (response.json())['entities']
# if len(entities) > 0:
# y=0
# for method in kr.keyRequests:
# if method["groupId"] == gid:
# found = [x for x in entities if x[method["comparer"]] == method[method["comparer"]]]
# if len(found) > 0:
# method["hasData"][tfrom["label"]]=True
# else:
# method["hasData"][tfrom["label"]]=False
DTAPIURL = DTAPIURL + "/api/v2/metrics/query"
headers = { headers = {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -66,32 +78,23 @@ class KRParser:
} }
for gid, group in enumerate(kr.matchedGroups): for gid, group in enumerate(kr.matchedGroups):
params={"entitySelector": group["existsQuery"], "from":tfrom["tfrom"], "fields": "fromRelationships"} params={"entitySelector": group["existsQuery"], "resolution":"1d", "metricSelector": "builtin:service.keyRequest.count.total", "from":tfrom["tfrom"]}
response = helper.get_request(DTAPIURL, headers, params) response = helper.get_request(DTAPIURL, headers, params)
entities = (response.json())['entities'] entities = (response.json())["result"][0]["data"]
if len(entities) > 0:
y=0
for method in kr.keyRequests: for method in kr.keyRequests:
if method["groupId"] == gid: if method["groupId"] == gid:
found = [x for x in entities if x[method["comparer"]] == method[method["comparer"]]]
found = [x for x in entities if x["dimensions"][0] == method["entityId"]]
if len(found) > 0: if len(found) > 0:
method["hasData"][tfrom["label"]]=True method["hasData"][tfrom["label"]]=True
#method["displayName"]=found[0]["displayName"] method["count"][tfrom["label"]]=sum([x for x in found[0]['values'] if x != None])
#method["entityId"]=found[0]["entityId"]
#method["services"]=found[0]["fromRelationships"]["isServiceMethodOfService"]
# for idx,o in enumerate(method["services"]):
# tmpS=[p for p in kr.services if p["entityId"]==o["id"]]
# if len(tmpS)>0:
# method["services"][idx]=tmpS[0]
else: else:
method["hasData"][tfrom["label"]]=False method["hasData"][tfrom["label"]]=False
method["count"][tfrom["label"]]=0
def resolveServices(self,services, DTAPIURL, DTAPIToken): def resolveServices(self,services, DTAPIURL, DTAPIToken):
#DTAPIURL = DTAPIURL + "/api/v2/entities"
headers = { headers = {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -101,8 +104,8 @@ class KRParser:
for gid, service in enumerate(services): for gid, service in enumerate(services):
query="type(SERVICE),entityId("+service["id"]+")" query="type(SERVICE),entityId("+service["id"]+")"
params=merge(self.serviceLookupParams,{"entitySelector": query}) params=merge(self.config["serviceLookupParams"],{"entitySelector": query})
#params={"entitySelector": query,"from":"now-2y", "fields":"tags"}
response = helper.get_request(DTAPIURL, headers, params) response = helper.get_request(DTAPIURL, headers, params)
entities = (response.json())['entities'] entities = (response.json())['entities']
@ -138,9 +141,6 @@ class KRParser:
response = helper.get_request(DTAPIURL, headers, params) response = helper.get_request(DTAPIURL, headers, params)
entities = (response.json())['entities'] entities = (response.json())['entities']
# if len(entities) > 1:
# kr.keyRequests[gid]['foundCount']=len(entities)
# print("Multiple keyrequest found: ")
if len(entities)> 0: if len(entities)> 0:
kr.keyRequests[gid]["found"]=True kr.keyRequests[gid]["found"]=True
@ -151,7 +151,7 @@ class KRParser:
if "isServiceMethodOfService" in entities[0]["fromRelationships"]: if "isServiceMethodOfService" in entities[0]["fromRelationships"]:
kr.keyRequests[gid]["services"]=entities[0]["fromRelationships"]["isServiceMethodOfService"] kr.keyRequests[gid]["services"]=entities[0]["fromRelationships"]["isServiceMethodOfService"]
if KROption.RESOLVESERVICES in options and len( kr.keyRequests[gid]["services"])>0: if options and KROption.RESOLVESERVICES in options and len( kr.keyRequests[gid]["services"])>0:
self.resolveServices(kr.keyRequests[gid]["services"], DTAPIURL, DTAPIToken) self.resolveServices(kr.keyRequests[gid]["services"], DTAPIURL, DTAPIToken)
except Exception as err: except Exception as err:
@ -191,53 +191,37 @@ class KRParser:
tmp_methods=self.getKeyRequestsByServices(group["services"]) tmp_methods=self.getKeyRequestsByServices(group["services"])
for m in tmp_methods: for m in tmp_methods:
tmp={"displayName": None,"comparer": "entityId", "entityId":m["entityId"], "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, tmp=merge({"displayName": None,"comparer": "entityId", "entityId":m["entityId"], "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""},self.config["extendResultObjects"]) #"exists":None, 'hasData_1W':None,
kr.keyRequests.append(tmp) kr.keyRequests.append(tmp)
for method in group["methods"]: for method in group["methods"]:
if method.startswith('SERVICE_METHOD-'): if method.startswith('SERVICE_METHOD-'):
tmp={"displayName": None,"comparer": "entityId", "entityId":method, "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, tmp=merge({"displayName": None,"comparer": "entityId", "entityId":method, "groupId":gid, "hasData":{},"count":{},"services":[], "found":False, "foundCount":0, "exception":""}, self.config["extendResultObjects"]) #"exists":None, 'hasData_1W':None,
else: else:
tmp={"displayName":method,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, tmp=merge({"displayName":method,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "count":{},"services":[], "found":False, "foundCount":0, "exception":""}, self.config["extendResultObjects"]) #"exists":None, 'hasData_1W':None,
kr.keyRequests.append(tmp) kr.keyRequests.append(tmp)
# for service in group["services"]: self.resolveKeyRequests(kr,self.DTAPIURL, self.DTAPIToken, self.options)
# if service.startswith('SERVICE-'):
# tmp={"displayName": None,"comparer": "entityId", "entityId":service, "groupId":gid, "hasData":{}, "keyReuqests":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None,
# else:
# tmp={"displayName":service,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "keyReuqests":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None,
# kr.services.append(tmp)
if self.options: if self.options:
if KROption.RESOLVEKEYREQUETS in self.options:
self.resolveKeyRequests(kr,self.DTAPIURL, self.DTAPIToken, self.options)
if KROption.VALIDATE_HASDATA in self.options: if KROption.VALIDATE_HASDATA in self.options:
self.checkKeyRequetsHasData(kr,{"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken) self.checkKeyRequetsHasData(kr,{"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken)
self.checkKeyRequetsHasData(kr,{"label":"1M", "tfrom":"now-1M"},self.DTAPIURL, self.DTAPIToken) self.checkKeyRequetsHasData(kr,{"label":"1M", "tfrom":"now-1M"},self.DTAPIURL, self.DTAPIToken)
# elif KROption.RESOLVEKEYREQUETS in self.options:
# self.checkKeyRequetsHasData(kr, {"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken)
# if KROption.RESOLVESERVICES in self.options:
# self.resolveServices(kr,self.DTAPIURL, self.DTAPIToken)
return kr return kr
def parseBySLO(self,index,row): def parseBySLO(self,row):
#normalize
print(index)
try: try:
normFilter=self.normalize(row['filter']) normFilter=self.normalize(row['filter'])
normExpresseion=self.normalize(row['metricExpression']) normExpresseion=self.normalize(row['metricExpression'])
tmp_KR = keyrequests.KR({"sloName":row["name"], "env":row["env"], "metricExpression": normExpresseion, "filter": normFilter, "matchedGroups": None}) tmp_KR = keyrequests.KR(merge({"sloName":row["name"], "sloId":row["id"], "metricExpression": normExpresseion, "filter": normFilter, "matchedGroups": None}, self.config["extendResultObjects"]))
#SLO with Filter #SLO with Filter
if normFilter.upper().startswith("TYPE(SERVICE_METHOD),") or normFilter.upper().startswith("TYPE(SERVICE),"): if normFilter.upper().startswith("TYPE(SERVICE_METHOD),") or normFilter.upper().startswith("TYPE(SERVICE),"):
@ -248,57 +232,46 @@ class KRParser:
groups=self.applyPatterns(subject) groups=self.applyPatterns(subject)
tmp_KR.matchedGroups.append(groups) tmp_KR.matchedGroups.append(groups)
# for g in groups:
# #if g["methods"] != None and len(g["methods"]) > 0:
# tmp_KR.matchedGroups.append(g)
#self.process(tmp_KR)
kr=self.process(tmp_KR) kr=self.process(tmp_KR)
with self.lock: with self.lock:
self.krs.append(kr) self.krs.append(kr)
self.pbar.update()
except Exception as err: except Exception as err:
print(repr(err)) print(repr(err))
#return self.process(tmp_KR)
def parseBySLO_Threaded(self, slosF): def parse(self, input):
self.krs=[]
#i=1
# threads = list()
# for index, row in slosF.iterrows():
# logging.info("Main : create and start thread %d.", index)
# x = threading.Thread(target=self.parseBySLO, args=(row,))
# threads.append(x)
# x.start()
# #krs.append(krp.parseBySLO(row)) with concurrent.futures.ThreadPoolExecutor(self.config["threads"]) as executor:
# for index, thread in enumerate(threads): if type(input) == pd.DataFrame:
# logging.info("Main : before joining thread %d.", index) self.pbar = tqdm(total=input["id"].count(),desc=self.name)
# thread.join()
# logging.info("Main : thread %d done", index)
# #resultSlos.extend(krs) for index, row in input.iterrows():
executor.submit(self.parseBySLO, row)
elif type(input)== list:
self.pbar = tqdm(total=len(input), desc=self.name)
for slo in input:
executor.submit(self.parseBySLO, slo)
elif type(input) == dict:
self.pbar = tqdm(total=1, desc=self.name)
executor.submit(self.parseBySLO, row)
with concurrent.futures.ThreadPoolExecutor(10) as executor: return self.krs
for index, row in slosF.iterrows():
# if i % 25 == 0:
# time.sleep(0)
#args={index:index, }
executor.submit(self.parseBySLO, index,row)
# print(str(i)+"\n")
# i=i+1
# x = threading.Thread(target=self.parseBySLO, args=(row,))
# threads.append(x)
# x.start()
return self.krs
def __init__(self, options: KROption=None ,serviceLookupParams={}, DTAPIURL=None, DTAPIToken=None ):
def __init__(self, name="Default Parser", options: KROption=None ,config={}, DTAPIURL=None, DTAPIToken=None ):
self.name=name
self.DTAPIURL= DTAPIURL self.DTAPIURL= DTAPIURL
self.DTAPIToken=DTAPIToken self.DTAPIToken=DTAPIToken
self.options=options self.options=options
self.serviceLookupParams=merge({"from":"now-2y"},serviceLookupParams) self.config=merge({"threads": 3,
"serviceLookupParams":{"from":"now-2y"},
"extendResultObjects":{}}, config)
self.krs=[] self.krs=[]

View File

@ -1,6 +1,6 @@
Metadata-Version: 2.1 Metadata-Version: 2.1
Name: KeyRequestParser Name: KeyRequestParser
Version: 0.4 Version: 0.5
License: MIT License: MIT
Parses Keyrequests Parses Keyrequests

View File

@ -7,4 +7,5 @@ KRParser/patterns.py
KeyRequestParser.egg-info/PKG-INFO KeyRequestParser.egg-info/PKG-INFO
KeyRequestParser.egg-info/SOURCES.txt KeyRequestParser.egg-info/SOURCES.txt
KeyRequestParser.egg-info/dependency_links.txt KeyRequestParser.egg-info/dependency_links.txt
KeyRequestParser.egg-info/requires.txt
KeyRequestParser.egg-info/top_level.txt KeyRequestParser.egg-info/top_level.txt

View File

@ -58,20 +58,24 @@ class KeyRequestGroup(MutableSequence):
query+=",entityId(\""+'","'.join(val["methods"])+"\")" query+=",entityId(\""+'","'.join(val["methods"])+"\")"
else: else:
query+=",entityName.in(\""+'","'.join(val["methods"])+"\")" query+=",entityName.in(\""+'","'.join(val["methods"])+"\")"
# query="builtin:service.keyRequest.count.total:filter(in(\"dt.entity.service_method\",entitySelector(\"type(service_method)"
# val['services'] = list(map(lambda x: x.replace("~","") , val['services']))
# val['methods'] = list(map(lambda x: x.replace("~","") , val['methods']))
# if len(val["services"]) > 0:
# if val["services"][0].startswith("SERVICE-"):
# query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityId(\""+'","'.join(val["services"])+"\"))"
# else:
# query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityName.in(\""+'","'.join(val["services"])+"\"))"
# if len(val["methods"]) > 0:
# if val["methods"][0].startswith("SERVICE_METHOD-"):
# query+=",entityId(\""+'","'.join(val["methods"])+"\")"
# else:
# query+=",entityName.in(\""+'","'.join(val["methods"])+"\")"
# query+="\")))"
val["existsQuery"]= query val["existsQuery"]= query
# def createServiceResolveQuery(self, val):
# query="type(SERVICE)"
# val['services'] = list(map(lambda x: x.replace("~","") , val['services']))
# if len(val["services"]) > 0:
# if val["services"][0].startswith("SERVICE-"):
# query+=",entityId(\""+'","'.join(val["services"])+"\")"
# else:
# query+=",entityName.in(\""+'","'.join(val["services"])+"\")"
# val["resolveServiceQuery"]= query
def insert(self, ii, val): def insert(self, ii, val):

View File

@ -1,14 +1,12 @@
import re import re
from KRParser import patterns, keyrequests, helper from KRParser import patterns, keyrequests, helper
from enum import Flag, auto from enum import Flag, auto
import logging
import threading import threading
import concurrent.futures import concurrent.futures
import time
from jsonmerge import merge from jsonmerge import merge
import pandas as pd
from tqdm import *
@ -19,24 +17,15 @@ class KROption(Flag):
RESOLVESERVICES = auto() RESOLVESERVICES = auto()
class KRParser: class KRParser:
#threadLimiter = threading.BoundedSemaphore(3)
patterns=[patterns.Pattern1(), patterns.Pattern2(), patterns.Pattern3(), patterns.Pattern5(), patterns.Pattern4() ] patterns=[patterns.Pattern1(), patterns.Pattern2(), patterns.Pattern3(), patterns.Pattern5(), patterns.Pattern4() ]
lock = threading.Lock() lock = threading.Lock()
def normalize(self,x): def normalize(self,x):
#tmp=x.replace("~","")
tmp=x.replace("\n","") tmp=x.replace("\n","")
#tmp=tmp.replace("\"/","\"")
#tmp=tmp.replace("\"/","") -_>was active
#tmp=tmp.replace("/\"","\"")
tmp=tmp.replace("/\"","") tmp=tmp.replace("/\"","")
tmp=tmp.replace("\"","") tmp=tmp.replace("\"","")
tmp=tmp.replace("\t","") tmp=tmp.replace("\t","")
tmp=re.sub("([\s]*)\)", ")", tmp) tmp=re.sub("([\s]*)\)", ")", tmp)
tmp=re.sub("\([\s\n\r]*", "(", tmp) tmp=re.sub("\([\s\n\r]*", "(", tmp)
tmp=re.sub("\,[\s\n\r]*", ",", tmp) tmp=re.sub("\,[\s\n\r]*", ",", tmp)
tmp=re.sub("\)[\s\n\r]*,", "),", tmp) tmp=re.sub("\)[\s\n\r]*,", "),", tmp)
@ -58,7 +47,30 @@ class KRParser:
def checkKeyRequetsHasData(self,kr, tfrom, DTAPIURL, DTAPIToken): def checkKeyRequetsHasData(self,kr, tfrom, DTAPIURL, DTAPIToken):
DTAPIURL = DTAPIURL + "/api/v2/entities" # DTAPIURL = DTAPIURL + "/api/v2/entities"
# headers = {
# 'Content-Type': 'application/json',
# 'Authorization': 'Api-Token ' + DTAPIToken
# }
# for gid, group in enumerate(kr.matchedGroups):
# params={"entitySelector": group["existsQuery"], "from":tfrom["tfrom"], "fields": "fromRelationships"}
# response = helper.get_request(DTAPIURL, headers, params)
# entities = (response.json())['entities']
# if len(entities) > 0:
# y=0
# for method in kr.keyRequests:
# if method["groupId"] == gid:
# found = [x for x in entities if x[method["comparer"]] == method[method["comparer"]]]
# if len(found) > 0:
# method["hasData"][tfrom["label"]]=True
# else:
# method["hasData"][tfrom["label"]]=False
DTAPIURL = DTAPIURL + "/api/v2/metrics/query"
headers = { headers = {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -66,32 +78,23 @@ class KRParser:
} }
for gid, group in enumerate(kr.matchedGroups): for gid, group in enumerate(kr.matchedGroups):
params={"entitySelector": group["existsQuery"], "from":tfrom["tfrom"], "fields": "fromRelationships"} params={"entitySelector": group["existsQuery"], "resolution":"1d", "metricSelector": "builtin:service.keyRequest.count.total", "from":tfrom["tfrom"]}
response = helper.get_request(DTAPIURL, headers, params) response = helper.get_request(DTAPIURL, headers, params)
entities = (response.json())['entities'] entities = (response.json())["result"][0]["data"]
if len(entities) > 0:
y=0
for method in kr.keyRequests: for method in kr.keyRequests:
if method["groupId"] == gid: if method["groupId"] == gid:
found = [x for x in entities if x[method["comparer"]] == method[method["comparer"]]]
found = [x for x in entities if x["dimensions"][0] == method["entityId"]]
if len(found) > 0: if len(found) > 0:
method["hasData"][tfrom["label"]]=True method["hasData"][tfrom["label"]]=True
#method["displayName"]=found[0]["displayName"] method["count"][tfrom["label"]]=sum([x for x in found[0]['values'] if x != None])
#method["entityId"]=found[0]["entityId"]
#method["services"]=found[0]["fromRelationships"]["isServiceMethodOfService"]
# for idx,o in enumerate(method["services"]):
# tmpS=[p for p in kr.services if p["entityId"]==o["id"]]
# if len(tmpS)>0:
# method["services"][idx]=tmpS[0]
else: else:
method["hasData"][tfrom["label"]]=False method["hasData"][tfrom["label"]]=False
method["count"][tfrom["label"]]=0
def resolveServices(self,services, DTAPIURL, DTAPIToken): def resolveServices(self,services, DTAPIURL, DTAPIToken):
#DTAPIURL = DTAPIURL + "/api/v2/entities"
headers = { headers = {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -101,8 +104,8 @@ class KRParser:
for gid, service in enumerate(services): for gid, service in enumerate(services):
query="type(SERVICE),entityId("+service["id"]+")" query="type(SERVICE),entityId("+service["id"]+")"
params=merge(self.serviceLookupParams,{"entitySelector": query}) params=merge(self.config["serviceLookupParams"],{"entitySelector": query})
#params={"entitySelector": query,"from":"now-2y", "fields":"tags"}
response = helper.get_request(DTAPIURL, headers, params) response = helper.get_request(DTAPIURL, headers, params)
entities = (response.json())['entities'] entities = (response.json())['entities']
@ -138,9 +141,6 @@ class KRParser:
response = helper.get_request(DTAPIURL, headers, params) response = helper.get_request(DTAPIURL, headers, params)
entities = (response.json())['entities'] entities = (response.json())['entities']
# if len(entities) > 1:
# kr.keyRequests[gid]['foundCount']=len(entities)
# print("Multiple keyrequest found: ")
if len(entities)> 0: if len(entities)> 0:
kr.keyRequests[gid]["found"]=True kr.keyRequests[gid]["found"]=True
@ -151,7 +151,7 @@ class KRParser:
if "isServiceMethodOfService" in entities[0]["fromRelationships"]: if "isServiceMethodOfService" in entities[0]["fromRelationships"]:
kr.keyRequests[gid]["services"]=entities[0]["fromRelationships"]["isServiceMethodOfService"] kr.keyRequests[gid]["services"]=entities[0]["fromRelationships"]["isServiceMethodOfService"]
if KROption.RESOLVESERVICES in options and len( kr.keyRequests[gid]["services"])>0: if options and KROption.RESOLVESERVICES in options and len( kr.keyRequests[gid]["services"])>0:
self.resolveServices(kr.keyRequests[gid]["services"], DTAPIURL, DTAPIToken) self.resolveServices(kr.keyRequests[gid]["services"], DTAPIURL, DTAPIToken)
except Exception as err: except Exception as err:
@ -191,53 +191,37 @@ class KRParser:
tmp_methods=self.getKeyRequestsByServices(group["services"]) tmp_methods=self.getKeyRequestsByServices(group["services"])
for m in tmp_methods: for m in tmp_methods:
tmp={"displayName": None,"comparer": "entityId", "entityId":m["entityId"], "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, tmp=merge({"displayName": None,"comparer": "entityId", "entityId":m["entityId"], "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""},self.config["extendResultObjects"]) #"exists":None, 'hasData_1W':None,
kr.keyRequests.append(tmp) kr.keyRequests.append(tmp)
for method in group["methods"]: for method in group["methods"]:
if method.startswith('SERVICE_METHOD-'): if method.startswith('SERVICE_METHOD-'):
tmp={"displayName": None,"comparer": "entityId", "entityId":method, "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, tmp=merge({"displayName": None,"comparer": "entityId", "entityId":method, "groupId":gid, "hasData":{},"count":{},"services":[], "found":False, "foundCount":0, "exception":""}, self.config["extendResultObjects"]) #"exists":None, 'hasData_1W':None,
else: else:
tmp={"displayName":method,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, tmp=merge({"displayName":method,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "count":{},"services":[], "found":False, "foundCount":0, "exception":""}, self.config["extendResultObjects"]) #"exists":None, 'hasData_1W':None,
kr.keyRequests.append(tmp) kr.keyRequests.append(tmp)
# for service in group["services"]: self.resolveKeyRequests(kr,self.DTAPIURL, self.DTAPIToken, self.options)
# if service.startswith('SERVICE-'):
# tmp={"displayName": None,"comparer": "entityId", "entityId":service, "groupId":gid, "hasData":{}, "keyReuqests":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None,
# else:
# tmp={"displayName":service,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "keyReuqests":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None,
# kr.services.append(tmp)
if self.options: if self.options:
if KROption.RESOLVEKEYREQUETS in self.options:
self.resolveKeyRequests(kr,self.DTAPIURL, self.DTAPIToken, self.options)
if KROption.VALIDATE_HASDATA in self.options: if KROption.VALIDATE_HASDATA in self.options:
self.checkKeyRequetsHasData(kr,{"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken) self.checkKeyRequetsHasData(kr,{"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken)
self.checkKeyRequetsHasData(kr,{"label":"1M", "tfrom":"now-1M"},self.DTAPIURL, self.DTAPIToken) self.checkKeyRequetsHasData(kr,{"label":"1M", "tfrom":"now-1M"},self.DTAPIURL, self.DTAPIToken)
# elif KROption.RESOLVEKEYREQUETS in self.options:
# self.checkKeyRequetsHasData(kr, {"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken)
# if KROption.RESOLVESERVICES in self.options:
# self.resolveServices(kr,self.DTAPIURL, self.DTAPIToken)
return kr return kr
def parseBySLO(self,index,row): def parseBySLO(self,row):
#normalize
print(index)
try: try:
normFilter=self.normalize(row['filter']) normFilter=self.normalize(row['filter'])
normExpresseion=self.normalize(row['metricExpression']) normExpresseion=self.normalize(row['metricExpression'])
tmp_KR = keyrequests.KR({"sloName":row["name"], "env":row["env"], "metricExpression": normExpresseion, "filter": normFilter, "matchedGroups": None}) tmp_KR = keyrequests.KR(merge({"sloName":row["name"], "sloId":row["id"], "metricExpression": normExpresseion, "filter": normFilter, "matchedGroups": None}, self.config["extendResultObjects"]))
#SLO with Filter #SLO with Filter
if normFilter.upper().startswith("TYPE(SERVICE_METHOD),") or normFilter.upper().startswith("TYPE(SERVICE),"): if normFilter.upper().startswith("TYPE(SERVICE_METHOD),") or normFilter.upper().startswith("TYPE(SERVICE),"):
@ -248,57 +232,46 @@ class KRParser:
groups=self.applyPatterns(subject) groups=self.applyPatterns(subject)
tmp_KR.matchedGroups.append(groups) tmp_KR.matchedGroups.append(groups)
# for g in groups:
# #if g["methods"] != None and len(g["methods"]) > 0:
# tmp_KR.matchedGroups.append(g)
#self.process(tmp_KR)
kr=self.process(tmp_KR) kr=self.process(tmp_KR)
with self.lock: with self.lock:
self.krs.append(kr) self.krs.append(kr)
self.pbar.update()
except Exception as err: except Exception as err:
print(repr(err)) print(repr(err))
#return self.process(tmp_KR)
def parseBySLO_Threaded(self, slosF): def parse(self, input):
self.krs=[]
#i=1
# threads = list()
# for index, row in slosF.iterrows():
# logging.info("Main : create and start thread %d.", index)
# x = threading.Thread(target=self.parseBySLO, args=(row,))
# threads.append(x)
# x.start()
# #krs.append(krp.parseBySLO(row)) with concurrent.futures.ThreadPoolExecutor(self.config["threads"]) as executor:
# for index, thread in enumerate(threads): if type(input) == pd.DataFrame:
# logging.info("Main : before joining thread %d.", index) self.pbar = tqdm(total=input["id"].count(),desc=self.name)
# thread.join()
# logging.info("Main : thread %d done", index)
# #resultSlos.extend(krs) for index, row in input.iterrows():
executor.submit(self.parseBySLO, row)
elif type(input)== list:
self.pbar = tqdm(total=len(input), desc=self.name)
for slo in input:
executor.submit(self.parseBySLO, slo)
elif type(input) == dict:
self.pbar = tqdm(total=1, desc=self.name)
executor.submit(self.parseBySLO, row)
with concurrent.futures.ThreadPoolExecutor(10) as executor: return self.krs
for index, row in slosF.iterrows():
# if i % 25 == 0:
# time.sleep(0)
#args={index:index, }
executor.submit(self.parseBySLO, index,row)
# print(str(i)+"\n")
# i=i+1
# x = threading.Thread(target=self.parseBySLO, args=(row,))
# threads.append(x)
# x.start()
return self.krs
def __init__(self, options: KROption=None ,serviceLookupParams={}, DTAPIURL=None, DTAPIToken=None ):
def __init__(self, name="Default Parser", options: KROption=None ,config={}, DTAPIURL=None, DTAPIToken=None ):
self.name=name
self.DTAPIURL= DTAPIURL self.DTAPIURL= DTAPIURL
self.DTAPIToken=DTAPIToken self.DTAPIToken=DTAPIToken
self.options=options self.options=options
self.serviceLookupParams=merge({"from":"now-2y"},serviceLookupParams) self.config=merge({"threads": 3,
"serviceLookupParams":{"from":"now-2y"},
"extendResultObjects":{}}, config)
self.krs=[] self.krs=[]

Binary file not shown.

View File

@ -1,8 +1,13 @@
from setuptools import setup, find_packages from setuptools import setup, find_packages
setup( setup(
name='KeyRequestParser', name='KeyRequestParser',
version='0.4', version='0.5',
packages=find_packages(include=["KRParser"]), packages=find_packages(include=["KRParser"]),
license='MIT', license='MIT',
long_description="Parses Keyrequests", long_description="Parses Keyrequests",
install_requires=[
'requests',
'jsonmerge',
'tqdm'
],
) )

View File

@ -0,0 +1,66 @@
import sys
import os
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/..")
#sys.path.append('..')
import unittest
import yaml
#from helper import get_request
from KRParser.helper import get_request
from KRParser.krparser import KRParser, KROption
import pandas as pd
#from KRParser import helper, krparser
from decouple import config
class TestStringMethods(unittest.TestCase):
def setUp(self):
with open('./tests/environment.yaml') as file:
self.env_doc = yaml.safe_load(file)
def test_upper(self):
self.assertEqual('foo'.upper(), 'FOO')
def test_isupper(self):
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
def test_simplifySLOs(self):
DTURL=self.env_doc['euprod'][1]["env-url"]
DTTOKEN = config(self.env_doc['euprod'][2].get('env-token-name'))
api_url = DTURL+"/api/v2/slo/15c29ec3-71a7-3298-9e99-aad2e5bf347c"
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTTOKEN
}
result=get_request(api_url, headers, {})
krp = KRParser(options=KROption.RESOLVEKEYREQUETS | KROption.VALIDATE_HASDATA | KROption.RESOLVESERVICES, config={"threads":10,"serviceLookupParams":{"fields":"tags"},"extendResultObjects":{"env":"emea"}}, DTAPIURL=DTURL, DTAPIToken=DTTOKEN)
#df = pd.DataFrame.from_dict(result.json().items())
#for index, row in pd.DataFrame(result.json().items()).iterrows():
slo=result.json()
slo["env"]="emea"
listSlo=[slo, slo]
krp.parse(listSlo)
#krp.parseBySLO(0,slo)
#krp.parseBySLO_Threaded(0, )
#self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a strin
if __name__ == '__main__':
unittest.main()

0
tests/__init__.py Normal file
View File

21
tests/environment.yaml Normal file
View File

@ -0,0 +1,21 @@
---
euprod:
- name: "EUprod"
- env-url: "https://xxu26128.live.dynatrace.com"
- env-token-name: "EUPROD_TOKEN_VAR"
- jenkins: "https://jaws.bmwgroup.net/opapm/"
naprod:
- name: "naprod"
- env-url: "https://wgv50241.live.dynatrace.com"
- env-token-name: "NAPROD_TOKEN_VAR"
- jenkins: "https://jaws.bmwgroup.net/opapm/"
cnprod:
- name: "cnprod"
- env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
- env-token-name: "CNPROD_TOKEN_VAR"
- jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
# #cnpreprod:
# - name: "cnpreprod"
# - env-url: "https://dynatracemgd-tsp.bmwgroup.net/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35"
# - env-token-name: "CNPREPROD_TOKEN_VAR"
# - jenkins: "https://jaws-china.bmwgroup.net/opmaas/"

6
tests/requirements.txt Normal file
View File

@ -0,0 +1,6 @@
pyyaml
python-decouple
requests
jsonmerge
pandas
tqdm