diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..0738441 --- /dev/null +++ b/.gitignore @@ -0,0 +1,138 @@ +.vscode +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +### Terraform stuff +**/.terraform/* +crash.log +*.tfvars + +#excel reports +*.xlsx \ No newline at end of file diff --git a/KRParser/keyrequests.py b/KRParser/keyrequests.py index a877da3..432ba16 100644 --- a/KRParser/keyrequests.py +++ b/KRParser/keyrequests.py @@ -58,20 +58,24 @@ class KeyRequestGroup(MutableSequence): query+=",entityId(\""+'","'.join(val["methods"])+"\")" else: query+=",entityName.in(\""+'","'.join(val["methods"])+"\")" - + # query="builtin:service.keyRequest.count.total:filter(in(\"dt.entity.service_method\",entitySelector(\"type(service_method)" + # val['services'] = list(map(lambda x: x.replace("~","") , val['services'])) + # val['methods'] = list(map(lambda x: x.replace("~","") , val['methods'])) + # if len(val["services"]) > 0: + # if val["services"][0].startswith("SERVICE-"): + # query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityId(\""+'","'.join(val["services"])+"\"))" + # else: + # query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityName.in(\""+'","'.join(val["services"])+"\"))" + + # if len(val["methods"]) > 0: + # if val["methods"][0].startswith("SERVICE_METHOD-"): + # query+=",entityId(\""+'","'.join(val["methods"])+"\")" + # else: + # query+=",entityName.in(\""+'","'.join(val["methods"])+"\")" + + # query+="\")))" val["existsQuery"]= query - - # def createServiceResolveQuery(self, val): - # query="type(SERVICE)" - # val['services'] = list(map(lambda x: x.replace("~","") , val['services'])) - - # if len(val["services"]) > 0: - # if val["services"][0].startswith("SERVICE-"): - # query+=",entityId(\""+'","'.join(val["services"])+"\")" - # else: - # query+=",entityName.in(\""+'","'.join(val["services"])+"\")" - - # val["resolveServiceQuery"]= query + def insert(self, ii, val): diff --git a/KRParser/krparser.py b/KRParser/krparser.py index 7305ba0..b634a6d 100644 --- a/KRParser/krparser.py +++ b/KRParser/krparser.py @@ -1,14 +1,12 @@ import re from KRParser import patterns, keyrequests, helper - - from enum import Flag, auto -import logging import threading import concurrent.futures -import time from jsonmerge import merge +import pandas as pd +from tqdm import * @@ -19,24 +17,15 @@ class KROption(Flag): RESOLVESERVICES = auto() class KRParser: - #threadLimiter = threading.BoundedSemaphore(3) patterns=[patterns.Pattern1(), patterns.Pattern2(), patterns.Pattern3(), patterns.Pattern5(), patterns.Pattern4() ] lock = threading.Lock() def normalize(self,x): - #tmp=x.replace("~","") tmp=x.replace("\n","") - #tmp=tmp.replace("\"/","\"") - #tmp=tmp.replace("\"/","") -_>was active - #tmp=tmp.replace("/\"","\"") tmp=tmp.replace("/\"","") tmp=tmp.replace("\"","") tmp=tmp.replace("\t","") - tmp=re.sub("([\s]*)\)", ")", tmp) - - - tmp=re.sub("\([\s\n\r]*", "(", tmp) tmp=re.sub("\,[\s\n\r]*", ",", tmp) tmp=re.sub("\)[\s\n\r]*,", "),", tmp) @@ -58,40 +47,54 @@ class KRParser: def checkKeyRequetsHasData(self,kr, tfrom, DTAPIURL, DTAPIToken): - DTAPIURL = DTAPIURL + "/api/v2/entities" + # DTAPIURL = DTAPIURL + "/api/v2/entities" + # headers = { + # 'Content-Type': 'application/json', + # 'Authorization': 'Api-Token ' + DTAPIToken + # } + + # for gid, group in enumerate(kr.matchedGroups): + # params={"entitySelector": group["existsQuery"], "from":tfrom["tfrom"], "fields": "fromRelationships"} + # response = helper.get_request(DTAPIURL, headers, params) + # entities = (response.json())['entities'] + + # if len(entities) > 0: + # y=0 + # for method in kr.keyRequests: + # if method["groupId"] == gid: + # found = [x for x in entities if x[method["comparer"]] == method[method["comparer"]]] + + # if len(found) > 0: + # method["hasData"][tfrom["label"]]=True + # else: + # method["hasData"][tfrom["label"]]=False + + DTAPIURL = DTAPIURL + "/api/v2/metrics/query" + headers = { 'Content-Type': 'application/json', 'Authorization': 'Api-Token ' + DTAPIToken } for gid, group in enumerate(kr.matchedGroups): - params={"entitySelector": group["existsQuery"], "from":tfrom["tfrom"], "fields": "fromRelationships"} + params={"entitySelector": group["existsQuery"], "resolution":"1d", "metricSelector": "builtin:service.keyRequest.count.total", "from":tfrom["tfrom"]} response = helper.get_request(DTAPIURL, headers, params) - entities = (response.json())['entities'] + entities = (response.json())["result"][0]["data"] - if len(entities) > 0: - y=0 for method in kr.keyRequests: if method["groupId"] == gid: - found = [x for x in entities if x[method["comparer"]] == method[method["comparer"]]] + + found = [x for x in entities if x["dimensions"][0] == method["entityId"]] if len(found) > 0: method["hasData"][tfrom["label"]]=True - #method["displayName"]=found[0]["displayName"] - #method["entityId"]=found[0]["entityId"] - #method["services"]=found[0]["fromRelationships"]["isServiceMethodOfService"] - - # for idx,o in enumerate(method["services"]): - # tmpS=[p for p in kr.services if p["entityId"]==o["id"]] - # if len(tmpS)>0: - # method["services"][idx]=tmpS[0] - + method["count"][tfrom["label"]]=sum([x for x in found[0]['values'] if x != None]) else: - method["hasData"][tfrom["label"]]=False + method["hasData"][tfrom["label"]]=False + method["count"][tfrom["label"]]=0 def resolveServices(self,services, DTAPIURL, DTAPIToken): - #DTAPIURL = DTAPIURL + "/api/v2/entities" headers = { 'Content-Type': 'application/json', @@ -101,8 +104,8 @@ class KRParser: for gid, service in enumerate(services): query="type(SERVICE),entityId("+service["id"]+")" - params=merge(self.serviceLookupParams,{"entitySelector": query}) - #params={"entitySelector": query,"from":"now-2y", "fields":"tags"} + params=merge(self.config["serviceLookupParams"],{"entitySelector": query}) + response = helper.get_request(DTAPIURL, headers, params) entities = (response.json())['entities'] @@ -138,9 +141,6 @@ class KRParser: response = helper.get_request(DTAPIURL, headers, params) entities = (response.json())['entities'] - # if len(entities) > 1: - # kr.keyRequests[gid]['foundCount']=len(entities) - # print("Multiple keyrequest found: ") if len(entities)> 0: kr.keyRequests[gid]["found"]=True @@ -151,7 +151,7 @@ class KRParser: if "isServiceMethodOfService" in entities[0]["fromRelationships"]: kr.keyRequests[gid]["services"]=entities[0]["fromRelationships"]["isServiceMethodOfService"] - if KROption.RESOLVESERVICES in options and len( kr.keyRequests[gid]["services"])>0: + if options and KROption.RESOLVESERVICES in options and len( kr.keyRequests[gid]["services"])>0: self.resolveServices(kr.keyRequests[gid]["services"], DTAPIURL, DTAPIToken) except Exception as err: @@ -191,53 +191,37 @@ class KRParser: tmp_methods=self.getKeyRequestsByServices(group["services"]) for m in tmp_methods: - tmp={"displayName": None,"comparer": "entityId", "entityId":m["entityId"], "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, + tmp=merge({"displayName": None,"comparer": "entityId", "entityId":m["entityId"], "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""},self.config["extendResultObjects"]) #"exists":None, 'hasData_1W':None, kr.keyRequests.append(tmp) for method in group["methods"]: if method.startswith('SERVICE_METHOD-'): - tmp={"displayName": None,"comparer": "entityId", "entityId":method, "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, + tmp=merge({"displayName": None,"comparer": "entityId", "entityId":method, "groupId":gid, "hasData":{},"count":{},"services":[], "found":False, "foundCount":0, "exception":""}, self.config["extendResultObjects"]) #"exists":None, 'hasData_1W':None, else: - tmp={"displayName":method,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, + tmp=merge({"displayName":method,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "count":{},"services":[], "found":False, "foundCount":0, "exception":""}, self.config["extendResultObjects"]) #"exists":None, 'hasData_1W':None, kr.keyRequests.append(tmp) - - - # for service in group["services"]: - # if service.startswith('SERVICE-'): - # tmp={"displayName": None,"comparer": "entityId", "entityId":service, "groupId":gid, "hasData":{}, "keyReuqests":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, - # else: - # tmp={"displayName":service,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "keyReuqests":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, - - # kr.services.append(tmp) + + self.resolveKeyRequests(kr,self.DTAPIURL, self.DTAPIToken, self.options) if self.options: - if KROption.RESOLVEKEYREQUETS in self.options: - self.resolveKeyRequests(kr,self.DTAPIURL, self.DTAPIToken, self.options) if KROption.VALIDATE_HASDATA in self.options: self.checkKeyRequetsHasData(kr,{"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken) self.checkKeyRequetsHasData(kr,{"label":"1M", "tfrom":"now-1M"},self.DTAPIURL, self.DTAPIToken) - # elif KROption.RESOLVEKEYREQUETS in self.options: - # self.checkKeyRequetsHasData(kr, {"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken) - # if KROption.RESOLVESERVICES in self.options: - # self.resolveServices(kr,self.DTAPIURL, self.DTAPIToken) return kr - def parseBySLO(self,index,row): - #normalize - print(index) + def parseBySLO(self,row): try: normFilter=self.normalize(row['filter']) normExpresseion=self.normalize(row['metricExpression']) - tmp_KR = keyrequests.KR({"sloName":row["name"], "env":row["env"], "metricExpression": normExpresseion, "filter": normFilter, "matchedGroups": None}) - + tmp_KR = keyrequests.KR(merge({"sloName":row["name"], "sloId":row["id"], "metricExpression": normExpresseion, "filter": normFilter, "matchedGroups": None}, self.config["extendResultObjects"])) #SLO with Filter if normFilter.upper().startswith("TYPE(SERVICE_METHOD),") or normFilter.upper().startswith("TYPE(SERVICE),"): @@ -248,57 +232,46 @@ class KRParser: groups=self.applyPatterns(subject) tmp_KR.matchedGroups.append(groups) - # for g in groups: - # #if g["methods"] != None and len(g["methods"]) > 0: - # tmp_KR.matchedGroups.append(g) - - #self.process(tmp_KR) + kr=self.process(tmp_KR) + with self.lock: self.krs.append(kr) + self.pbar.update() except Exception as err: print(repr(err)) - #return self.process(tmp_KR) - def parseBySLO_Threaded(self, slosF): - self.krs=[] - #i=1 - # threads = list() - # for index, row in slosF.iterrows(): - # logging.info("Main : create and start thread %d.", index) - # x = threading.Thread(target=self.parseBySLO, args=(row,)) - # threads.append(x) - # x.start() - - # #krs.append(krp.parseBySLO(row)) + def parse(self, input): - # for index, thread in enumerate(threads): - # logging.info("Main : before joining thread %d.", index) - # thread.join() - # logging.info("Main : thread %d done", index) - - # #resultSlos.extend(krs) - - - with concurrent.futures.ThreadPoolExecutor(10) as executor: - for index, row in slosF.iterrows(): - # if i % 25 == 0: - # time.sleep(0) - #args={index:index, } - executor.submit(self.parseBySLO, index,row) - # print(str(i)+"\n") - # i=i+1 - # x = threading.Thread(target=self.parseBySLO, args=(row,)) - # threads.append(x) - # x.start() + with concurrent.futures.ThreadPoolExecutor(self.config["threads"]) as executor: + + if type(input) == pd.DataFrame: + self.pbar = tqdm(total=input["id"].count(),desc=self.name) + + for index, row in input.iterrows(): + executor.submit(self.parseBySLO, row) + + elif type(input)== list: + self.pbar = tqdm(total=len(input), desc=self.name) + for slo in input: + executor.submit(self.parseBySLO, slo) + + elif type(input) == dict: + self.pbar = tqdm(total=1, desc=self.name) + executor.submit(self.parseBySLO, row) + + + return self.krs - return self.krs - def __init__(self, options: KROption=None ,serviceLookupParams={}, DTAPIURL=None, DTAPIToken=None ): + def __init__(self, name="Default Parser", options: KROption=None ,config={}, DTAPIURL=None, DTAPIToken=None ): + self.name=name self.DTAPIURL= DTAPIURL self.DTAPIToken=DTAPIToken self.options=options - self.serviceLookupParams=merge({"from":"now-2y"},serviceLookupParams) + self.config=merge({"threads": 3, + "serviceLookupParams":{"from":"now-2y"}, + "extendResultObjects":{}}, config) self.krs=[] \ No newline at end of file diff --git a/KeyRequestParser.egg-info/PKG-INFO b/KeyRequestParser.egg-info/PKG-INFO index eb80030..7886544 100644 --- a/KeyRequestParser.egg-info/PKG-INFO +++ b/KeyRequestParser.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: KeyRequestParser -Version: 0.4 +Version: 0.5 License: MIT Parses Keyrequests diff --git a/KeyRequestParser.egg-info/SOURCES.txt b/KeyRequestParser.egg-info/SOURCES.txt index edb4624..5e4aeb8 100644 --- a/KeyRequestParser.egg-info/SOURCES.txt +++ b/KeyRequestParser.egg-info/SOURCES.txt @@ -7,4 +7,5 @@ KRParser/patterns.py KeyRequestParser.egg-info/PKG-INFO KeyRequestParser.egg-info/SOURCES.txt KeyRequestParser.egg-info/dependency_links.txt +KeyRequestParser.egg-info/requires.txt KeyRequestParser.egg-info/top_level.txt \ No newline at end of file diff --git a/build/lib/KRParser/keyrequests.py b/build/lib/KRParser/keyrequests.py index a877da3..432ba16 100644 --- a/build/lib/KRParser/keyrequests.py +++ b/build/lib/KRParser/keyrequests.py @@ -58,20 +58,24 @@ class KeyRequestGroup(MutableSequence): query+=",entityId(\""+'","'.join(val["methods"])+"\")" else: query+=",entityName.in(\""+'","'.join(val["methods"])+"\")" - + # query="builtin:service.keyRequest.count.total:filter(in(\"dt.entity.service_method\",entitySelector(\"type(service_method)" + # val['services'] = list(map(lambda x: x.replace("~","") , val['services'])) + # val['methods'] = list(map(lambda x: x.replace("~","") , val['methods'])) + # if len(val["services"]) > 0: + # if val["services"][0].startswith("SERVICE-"): + # query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityId(\""+'","'.join(val["services"])+"\"))" + # else: + # query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityName.in(\""+'","'.join(val["services"])+"\"))" + + # if len(val["methods"]) > 0: + # if val["methods"][0].startswith("SERVICE_METHOD-"): + # query+=",entityId(\""+'","'.join(val["methods"])+"\")" + # else: + # query+=",entityName.in(\""+'","'.join(val["methods"])+"\")" + + # query+="\")))" val["existsQuery"]= query - - # def createServiceResolveQuery(self, val): - # query="type(SERVICE)" - # val['services'] = list(map(lambda x: x.replace("~","") , val['services'])) - - # if len(val["services"]) > 0: - # if val["services"][0].startswith("SERVICE-"): - # query+=",entityId(\""+'","'.join(val["services"])+"\")" - # else: - # query+=",entityName.in(\""+'","'.join(val["services"])+"\")" - - # val["resolveServiceQuery"]= query + def insert(self, ii, val): diff --git a/build/lib/KRParser/krparser.py b/build/lib/KRParser/krparser.py index 7305ba0..b634a6d 100644 --- a/build/lib/KRParser/krparser.py +++ b/build/lib/KRParser/krparser.py @@ -1,14 +1,12 @@ import re from KRParser import patterns, keyrequests, helper - - from enum import Flag, auto -import logging import threading import concurrent.futures -import time from jsonmerge import merge +import pandas as pd +from tqdm import * @@ -19,24 +17,15 @@ class KROption(Flag): RESOLVESERVICES = auto() class KRParser: - #threadLimiter = threading.BoundedSemaphore(3) patterns=[patterns.Pattern1(), patterns.Pattern2(), patterns.Pattern3(), patterns.Pattern5(), patterns.Pattern4() ] lock = threading.Lock() def normalize(self,x): - #tmp=x.replace("~","") tmp=x.replace("\n","") - #tmp=tmp.replace("\"/","\"") - #tmp=tmp.replace("\"/","") -_>was active - #tmp=tmp.replace("/\"","\"") tmp=tmp.replace("/\"","") tmp=tmp.replace("\"","") tmp=tmp.replace("\t","") - tmp=re.sub("([\s]*)\)", ")", tmp) - - - tmp=re.sub("\([\s\n\r]*", "(", tmp) tmp=re.sub("\,[\s\n\r]*", ",", tmp) tmp=re.sub("\)[\s\n\r]*,", "),", tmp) @@ -58,40 +47,54 @@ class KRParser: def checkKeyRequetsHasData(self,kr, tfrom, DTAPIURL, DTAPIToken): - DTAPIURL = DTAPIURL + "/api/v2/entities" + # DTAPIURL = DTAPIURL + "/api/v2/entities" + # headers = { + # 'Content-Type': 'application/json', + # 'Authorization': 'Api-Token ' + DTAPIToken + # } + + # for gid, group in enumerate(kr.matchedGroups): + # params={"entitySelector": group["existsQuery"], "from":tfrom["tfrom"], "fields": "fromRelationships"} + # response = helper.get_request(DTAPIURL, headers, params) + # entities = (response.json())['entities'] + + # if len(entities) > 0: + # y=0 + # for method in kr.keyRequests: + # if method["groupId"] == gid: + # found = [x for x in entities if x[method["comparer"]] == method[method["comparer"]]] + + # if len(found) > 0: + # method["hasData"][tfrom["label"]]=True + # else: + # method["hasData"][tfrom["label"]]=False + + DTAPIURL = DTAPIURL + "/api/v2/metrics/query" + headers = { 'Content-Type': 'application/json', 'Authorization': 'Api-Token ' + DTAPIToken } for gid, group in enumerate(kr.matchedGroups): - params={"entitySelector": group["existsQuery"], "from":tfrom["tfrom"], "fields": "fromRelationships"} + params={"entitySelector": group["existsQuery"], "resolution":"1d", "metricSelector": "builtin:service.keyRequest.count.total", "from":tfrom["tfrom"]} response = helper.get_request(DTAPIURL, headers, params) - entities = (response.json())['entities'] + entities = (response.json())["result"][0]["data"] - if len(entities) > 0: - y=0 for method in kr.keyRequests: if method["groupId"] == gid: - found = [x for x in entities if x[method["comparer"]] == method[method["comparer"]]] + + found = [x for x in entities if x["dimensions"][0] == method["entityId"]] if len(found) > 0: method["hasData"][tfrom["label"]]=True - #method["displayName"]=found[0]["displayName"] - #method["entityId"]=found[0]["entityId"] - #method["services"]=found[0]["fromRelationships"]["isServiceMethodOfService"] - - # for idx,o in enumerate(method["services"]): - # tmpS=[p for p in kr.services if p["entityId"]==o["id"]] - # if len(tmpS)>0: - # method["services"][idx]=tmpS[0] - + method["count"][tfrom["label"]]=sum([x for x in found[0]['values'] if x != None]) else: - method["hasData"][tfrom["label"]]=False + method["hasData"][tfrom["label"]]=False + method["count"][tfrom["label"]]=0 def resolveServices(self,services, DTAPIURL, DTAPIToken): - #DTAPIURL = DTAPIURL + "/api/v2/entities" headers = { 'Content-Type': 'application/json', @@ -101,8 +104,8 @@ class KRParser: for gid, service in enumerate(services): query="type(SERVICE),entityId("+service["id"]+")" - params=merge(self.serviceLookupParams,{"entitySelector": query}) - #params={"entitySelector": query,"from":"now-2y", "fields":"tags"} + params=merge(self.config["serviceLookupParams"],{"entitySelector": query}) + response = helper.get_request(DTAPIURL, headers, params) entities = (response.json())['entities'] @@ -138,9 +141,6 @@ class KRParser: response = helper.get_request(DTAPIURL, headers, params) entities = (response.json())['entities'] - # if len(entities) > 1: - # kr.keyRequests[gid]['foundCount']=len(entities) - # print("Multiple keyrequest found: ") if len(entities)> 0: kr.keyRequests[gid]["found"]=True @@ -151,7 +151,7 @@ class KRParser: if "isServiceMethodOfService" in entities[0]["fromRelationships"]: kr.keyRequests[gid]["services"]=entities[0]["fromRelationships"]["isServiceMethodOfService"] - if KROption.RESOLVESERVICES in options and len( kr.keyRequests[gid]["services"])>0: + if options and KROption.RESOLVESERVICES in options and len( kr.keyRequests[gid]["services"])>0: self.resolveServices(kr.keyRequests[gid]["services"], DTAPIURL, DTAPIToken) except Exception as err: @@ -191,53 +191,37 @@ class KRParser: tmp_methods=self.getKeyRequestsByServices(group["services"]) for m in tmp_methods: - tmp={"displayName": None,"comparer": "entityId", "entityId":m["entityId"], "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, + tmp=merge({"displayName": None,"comparer": "entityId", "entityId":m["entityId"], "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""},self.config["extendResultObjects"]) #"exists":None, 'hasData_1W':None, kr.keyRequests.append(tmp) for method in group["methods"]: if method.startswith('SERVICE_METHOD-'): - tmp={"displayName": None,"comparer": "entityId", "entityId":method, "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, + tmp=merge({"displayName": None,"comparer": "entityId", "entityId":method, "groupId":gid, "hasData":{},"count":{},"services":[], "found":False, "foundCount":0, "exception":""}, self.config["extendResultObjects"]) #"exists":None, 'hasData_1W':None, else: - tmp={"displayName":method,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, + tmp=merge({"displayName":method,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "count":{},"services":[], "found":False, "foundCount":0, "exception":""}, self.config["extendResultObjects"]) #"exists":None, 'hasData_1W':None, kr.keyRequests.append(tmp) - - - # for service in group["services"]: - # if service.startswith('SERVICE-'): - # tmp={"displayName": None,"comparer": "entityId", "entityId":service, "groupId":gid, "hasData":{}, "keyReuqests":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, - # else: - # tmp={"displayName":service,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "keyReuqests":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None, - - # kr.services.append(tmp) + + self.resolveKeyRequests(kr,self.DTAPIURL, self.DTAPIToken, self.options) if self.options: - if KROption.RESOLVEKEYREQUETS in self.options: - self.resolveKeyRequests(kr,self.DTAPIURL, self.DTAPIToken, self.options) if KROption.VALIDATE_HASDATA in self.options: self.checkKeyRequetsHasData(kr,{"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken) self.checkKeyRequetsHasData(kr,{"label":"1M", "tfrom":"now-1M"},self.DTAPIURL, self.DTAPIToken) - # elif KROption.RESOLVEKEYREQUETS in self.options: - # self.checkKeyRequetsHasData(kr, {"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken) - # if KROption.RESOLVESERVICES in self.options: - # self.resolveServices(kr,self.DTAPIURL, self.DTAPIToken) return kr - def parseBySLO(self,index,row): - #normalize - print(index) + def parseBySLO(self,row): try: normFilter=self.normalize(row['filter']) normExpresseion=self.normalize(row['metricExpression']) - tmp_KR = keyrequests.KR({"sloName":row["name"], "env":row["env"], "metricExpression": normExpresseion, "filter": normFilter, "matchedGroups": None}) - + tmp_KR = keyrequests.KR(merge({"sloName":row["name"], "sloId":row["id"], "metricExpression": normExpresseion, "filter": normFilter, "matchedGroups": None}, self.config["extendResultObjects"])) #SLO with Filter if normFilter.upper().startswith("TYPE(SERVICE_METHOD),") or normFilter.upper().startswith("TYPE(SERVICE),"): @@ -248,57 +232,46 @@ class KRParser: groups=self.applyPatterns(subject) tmp_KR.matchedGroups.append(groups) - # for g in groups: - # #if g["methods"] != None and len(g["methods"]) > 0: - # tmp_KR.matchedGroups.append(g) - - #self.process(tmp_KR) + kr=self.process(tmp_KR) + with self.lock: self.krs.append(kr) + self.pbar.update() except Exception as err: print(repr(err)) - #return self.process(tmp_KR) - def parseBySLO_Threaded(self, slosF): - self.krs=[] - #i=1 - # threads = list() - # for index, row in slosF.iterrows(): - # logging.info("Main : create and start thread %d.", index) - # x = threading.Thread(target=self.parseBySLO, args=(row,)) - # threads.append(x) - # x.start() - - # #krs.append(krp.parseBySLO(row)) + def parse(self, input): - # for index, thread in enumerate(threads): - # logging.info("Main : before joining thread %d.", index) - # thread.join() - # logging.info("Main : thread %d done", index) - - # #resultSlos.extend(krs) - - - with concurrent.futures.ThreadPoolExecutor(10) as executor: - for index, row in slosF.iterrows(): - # if i % 25 == 0: - # time.sleep(0) - #args={index:index, } - executor.submit(self.parseBySLO, index,row) - # print(str(i)+"\n") - # i=i+1 - # x = threading.Thread(target=self.parseBySLO, args=(row,)) - # threads.append(x) - # x.start() + with concurrent.futures.ThreadPoolExecutor(self.config["threads"]) as executor: + + if type(input) == pd.DataFrame: + self.pbar = tqdm(total=input["id"].count(),desc=self.name) + + for index, row in input.iterrows(): + executor.submit(self.parseBySLO, row) + + elif type(input)== list: + self.pbar = tqdm(total=len(input), desc=self.name) + for slo in input: + executor.submit(self.parseBySLO, slo) + + elif type(input) == dict: + self.pbar = tqdm(total=1, desc=self.name) + executor.submit(self.parseBySLO, row) + + + return self.krs - return self.krs - def __init__(self, options: KROption=None ,serviceLookupParams={}, DTAPIURL=None, DTAPIToken=None ): + def __init__(self, name="Default Parser", options: KROption=None ,config={}, DTAPIURL=None, DTAPIToken=None ): + self.name=name self.DTAPIURL= DTAPIURL self.DTAPIToken=DTAPIToken self.options=options - self.serviceLookupParams=merge({"from":"now-2y"},serviceLookupParams) + self.config=merge({"threads": 3, + "serviceLookupParams":{"from":"now-2y"}, + "extendResultObjects":{}}, config) self.krs=[] \ No newline at end of file diff --git a/dist/KeyRequestParser-0.4-py3.10.egg b/dist/KeyRequestParser-0.4-py3.10.egg index 6157478..2c585a4 100644 Binary files a/dist/KeyRequestParser-0.4-py3.10.egg and b/dist/KeyRequestParser-0.4-py3.10.egg differ diff --git a/setup.py b/setup.py index 1732ce4..367c67c 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,13 @@ from setuptools import setup, find_packages setup( name='KeyRequestParser', - version='0.4', + version='0.5', packages=find_packages(include=["KRParser"]), license='MIT', long_description="Parses Keyrequests", + install_requires=[ + 'requests', + 'jsonmerge', + 'tqdm' + ], ) diff --git a/tests/TestStringMethods.py b/tests/TestStringMethods.py new file mode 100644 index 0000000..6fbfeac --- /dev/null +++ b/tests/TestStringMethods.py @@ -0,0 +1,66 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/..") +#sys.path.append('..') + +import unittest +import yaml +#from helper import get_request +from KRParser.helper import get_request +from KRParser.krparser import KRParser, KROption + +import pandas as pd + + +#from KRParser import helper, krparser +from decouple import config + + + +class TestStringMethods(unittest.TestCase): + + + def setUp(self): + with open('./tests/environment.yaml') as file: + self.env_doc = yaml.safe_load(file) + + + def test_upper(self): + self.assertEqual('foo'.upper(), 'FOO') + + def test_isupper(self): + self.assertTrue('FOO'.isupper()) + self.assertFalse('Foo'.isupper()) + + def test_simplifySLOs(self): + DTURL=self.env_doc['euprod'][1]["env-url"] + DTTOKEN = config(self.env_doc['euprod'][2].get('env-token-name')) + + api_url = DTURL+"/api/v2/slo/15c29ec3-71a7-3298-9e99-aad2e5bf347c" + + headers = { + 'Content-Type': 'application/json', + 'Authorization': 'Api-Token ' + DTTOKEN + } + + + result=get_request(api_url, headers, {}) + + krp = KRParser(options=KROption.RESOLVEKEYREQUETS | KROption.VALIDATE_HASDATA | KROption.RESOLVESERVICES, config={"threads":10,"serviceLookupParams":{"fields":"tags"},"extendResultObjects":{"env":"emea"}}, DTAPIURL=DTURL, DTAPIToken=DTTOKEN) + + #df = pd.DataFrame.from_dict(result.json().items()) + #for index, row in pd.DataFrame(result.json().items()).iterrows(): + slo=result.json() + slo["env"]="emea" + + listSlo=[slo, slo] + krp.parse(listSlo) + + #krp.parseBySLO(0,slo) + #krp.parseBySLO_Threaded(0, ) + + #self.assertEqual(s.split(), ['hello', 'world']) + # check that s.split fails when the separator is not a strin + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/environment.yaml b/tests/environment.yaml new file mode 100644 index 0000000..7e9829a --- /dev/null +++ b/tests/environment.yaml @@ -0,0 +1,21 @@ +--- +euprod: + - name: "EUprod" + - env-url: "https://xxu26128.live.dynatrace.com" + - env-token-name: "EUPROD_TOKEN_VAR" + - jenkins: "https://jaws.bmwgroup.net/opapm/" +naprod: + - name: "naprod" + - env-url: "https://wgv50241.live.dynatrace.com" + - env-token-name: "NAPROD_TOKEN_VAR" + - jenkins: "https://jaws.bmwgroup.net/opapm/" +cnprod: + - name: "cnprod" + - env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b" + - env-token-name: "CNPROD_TOKEN_VAR" + - jenkins: "https://jaws-china.bmwgroup.net/opmaas/" +# #cnpreprod: +# - name: "cnpreprod" +# - env-url: "https://dynatracemgd-tsp.bmwgroup.net/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35" +# - env-token-name: "CNPREPROD_TOKEN_VAR" +# - jenkins: "https://jaws-china.bmwgroup.net/opmaas/" diff --git a/tests/requirements.txt b/tests/requirements.txt new file mode 100644 index 0000000..4cdb679 --- /dev/null +++ b/tests/requirements.txt @@ -0,0 +1,6 @@ +pyyaml +python-decouple +requests +jsonmerge +pandas +tqdm \ No newline at end of file