Initial Commit
commit
2c7d991258
|
|
@ -0,0 +1,138 @@
|
|||
.vscode
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
### Terraform stuff
|
||||
**/.terraform/*
|
||||
crash.log
|
||||
*.tfvars
|
||||
|
||||
#excel reports
|
||||
*.xlsx
|
||||
|
|
@ -0,0 +1,153 @@
|
|||
import yaml
|
||||
from decouple import config
|
||||
import dynatraceAPI
|
||||
import pandas as pd
|
||||
from pagination import Pagionation
|
||||
from key_request_parser import krparser
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
import datetime
|
||||
import re
|
||||
|
||||
def getSLO(ENV, DTAPIToken, DTENV):
|
||||
# DTENV = base url
|
||||
# DTAPIToken = sec token
|
||||
dtclient = dynatraceAPI.Dynatrace(DTENV, DTAPIToken)
|
||||
my_params_report = {'pageSize': 25}
|
||||
# gets all slos and filter later
|
||||
api_url_report = "/api/v2/slo"
|
||||
pages = dtclient.returnPageination(api_url_report, my_params_report, "slo")
|
||||
df = pd.DataFrame(pages.elements)
|
||||
df["env"]=ENV
|
||||
return df
|
||||
|
||||
def previous_week_range(date: datetime):
|
||||
start_date = date + timedelta(-date.weekday(), weeks=-1)
|
||||
end_date = date + timedelta(-date.weekday() -1)
|
||||
return start_date, end_date
|
||||
|
||||
|
||||
def main():
|
||||
#Get All SLOs
|
||||
|
||||
reportItem={}
|
||||
with open('./environment.yaml') as file:
|
||||
env_doc = yaml.safe_load(file)
|
||||
|
||||
for env, doc in env_doc.items():
|
||||
token = dict(doc[2])
|
||||
url = dict(doc[1])
|
||||
|
||||
if(config(token.get('env-token-name')) != ""):
|
||||
print("Gather data, hold on a minute")
|
||||
DTTOKEN = config(token.get('env-token-name'))
|
||||
DTURL = url.get('env-url')
|
||||
|
||||
#krp = krparser.KRParser(krparser.KROption.VALIDATE_EXISTS | krparser.KROption.VALIDATE_HASDATA ,DTURL, DTTOKEN)
|
||||
|
||||
slosF = getSLO(env, DTTOKEN, DTURL)
|
||||
slosF = slosF[slosF['name'].str.startswith('TP_')]
|
||||
|
||||
|
||||
|
||||
#parse the metric Expression to get Services and Requests
|
||||
|
||||
krs=[]
|
||||
krp = krparser.KRParser(krparser.KROption.VALIDATE_EXISTS | krparser.KROption.VALIDATE_HASDATA | krparser.KROption.RESOLVESERVICES, DTURL, DTTOKEN)
|
||||
|
||||
for index, row in slosF.iterrows():
|
||||
krs.append(krp.parseBySLO(row))
|
||||
|
||||
x=0
|
||||
|
||||
#SLO Name | SERVICE | PROCESS GRUPPE | TAGS
|
||||
{"sloname": {
|
||||
"sloname":$sloname$,
|
||||
"services":[{
|
||||
"serviceName": "$servicename$"
|
||||
}]
|
||||
},
|
||||
"sloname": {
|
||||
"sloname":$sloname$,
|
||||
"services":[{
|
||||
"serviceName": "$servicename$"
|
||||
}]
|
||||
# {
|
||||
"SLO 1": {
|
||||
"sloname": "SLO 1",
|
||||
"services": [
|
||||
{
|
||||
"serviceName": "Service 1",
|
||||
"processGroup": "Process Group 1",
|
||||
"tags": ["tag1", "tag2"]
|
||||
},
|
||||
{
|
||||
"serviceName": "Service 2",
|
||||
"processGroup": "Process Group 2",
|
||||
"tags": ["tag3", "tag4"]
|
||||
}
|
||||
]
|
||||
},
|
||||
"SLO 2": {
|
||||
"sloname": "SLO 2",
|
||||
"services": [
|
||||
{
|
||||
"serviceName": "Service 3",
|
||||
"processGroup": "Process Group 3",
|
||||
"tags": ["tag5", "tag6"]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
#
|
||||
}}
|
||||
for kr in krs:
|
||||
reportItem[kr.metadata.name]=
|
||||
|
||||
# DEBUG
|
||||
# print(krs)
|
||||
for x in krs:
|
||||
print(x.metadata)
|
||||
|
||||
|
||||
# regex ~ XYZ ~
|
||||
|
||||
#filtering the SLO dataframe based on those dates and the service name
|
||||
|
||||
|
||||
# import datetime
|
||||
|
||||
|
||||
# import re
|
||||
|
||||
#extract all the service names that start with "TP_"
|
||||
metric_expression = 'sum(duration), filter(entity.service.name, beginsWith("TP_"))'
|
||||
# Define regex pattern to match service names
|
||||
regex_pattern = r'beginsWith\("TP_(.*?)"\)'
|
||||
# Find all matches of pattern in metric expression
|
||||
matches = re.findall(regex_pattern, metric_expression)
|
||||
|
||||
print(matches)
|
||||
|
||||
|
||||
#get detail info service names
|
||||
import requests
|
||||
import json
|
||||
environment_id = "ENVIRONMENT_ID"
|
||||
api_token = "dt0c01.MBX344ELRJNDGSWGOSK3JBE5.KHJ6BLKLZ6UB6NKJQRXYINLDCKN36HGOK6ECYAOZ3REF2KHWM2NIBIKOITVXCWUT"
|
||||
headers = {"Authorization": "Api-Token {}".format(api_token)}
|
||||
|
||||
# url = "https://xxu26128.live.dynatrace.com/api/v1/entity/services".format(environment_id)
|
||||
params = {"filter": "displayName LIKE 'TP_%'"}
|
||||
|
||||
# response = requests.get(url, headers=headers, params=params)
|
||||
# response_json = json.loads(response.text)
|
||||
|
||||
# for service in response_json["entities"]:
|
||||
# display_name = service["displayName"]
|
||||
# print("Service Name: {0}".format(display_name))
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
import logging
|
||||
from typing import Dict
|
||||
import os
|
||||
import sys
|
||||
|
||||
file_dir = os.path.dirname(__file__)
|
||||
sys.path.append(file_dir)
|
||||
|
||||
from more_utils.httpClient import HttpClient
|
||||
import pagination
|
||||
|
||||
|
||||
class Dynatrace:
|
||||
def __init__(
|
||||
self,
|
||||
base_url: str,
|
||||
token: str,
|
||||
log: logging.Logger = None,
|
||||
proxies: Dict = None,
|
||||
too_many_requests_strategy=None,
|
||||
retries: int = 0,
|
||||
retry_delay_ms: int = 0,
|
||||
|
||||
):
|
||||
self.__http_client = HttpClient(
|
||||
base_url, token, log, proxies, too_many_requests_strategy, retries, retry_delay_ms
|
||||
)
|
||||
|
||||
def returnPageination(self,path,params,list_item):
|
||||
page = pagination.Pagionation(self.__http_client,path,params,list_item=list_item)
|
||||
return page
|
||||
|
||||
def returnSingle(self,path):
|
||||
response = self.__http_client.make_request(path)
|
||||
json_response = response.json()
|
||||
return json_response
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
---
|
||||
euprod:
|
||||
- name: "emeaprod"
|
||||
- env-url: "https://xxu26128.live.dynatrace.com"
|
||||
- env-token-name: "EUPROD_TOKEN_VAR"
|
||||
- jenkins: "https://jaws.bmwgroup.net/opapm/"
|
||||
naprod:
|
||||
- name: "naprod"
|
||||
- env-url: "https://wgv50241.live.dynatrace.com"
|
||||
- env-token-name: "NAPROD_TOKEN_VAR"
|
||||
- jenkins: "https://jaws.bmwgroup.net/opapm/"
|
||||
cnprod:
|
||||
- name: "cnprod"
|
||||
- env-url: "https://dyna-synth-cn.bmwgroup.com.cn/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
|
||||
- env-token-name: "CNPROD_TOKEN_VAR"
|
||||
- jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
import requests
|
||||
|
||||
def get_request(url, headers, params):
|
||||
#try:
|
||||
response = requests.get(url, headers=headers, params=params, verify=False)
|
||||
response.raise_for_status()
|
||||
# except requests.exceptions.HTTPError as errh:
|
||||
# return "An Http Error occurred:" + repr(errh)
|
||||
# except requests.exceptions.ConnectionError as errc:
|
||||
# return "An Error Connecting to the API occurred:" + repr(errc)
|
||||
# except requests.exceptions.Timeout as errt:
|
||||
# return "A Timeout Error occurred:" + repr(errt)
|
||||
# except requests.exceptions.RequestException as err:
|
||||
# return "An Unknown Error occurred" + repr(err)
|
||||
|
||||
return response
|
||||
|
||||
def contains(list, filter):
|
||||
for x in list:
|
||||
if filter(x):
|
||||
return True
|
||||
return False
|
||||
|
|
@ -0,0 +1,154 @@
|
|||
try:
|
||||
# Python 3
|
||||
from collections.abc import MutableSequence
|
||||
except ImportError:
|
||||
# Python 2.7
|
||||
from collections import MutableSequence
|
||||
|
||||
class KeyRequestGroup(MutableSequence):
|
||||
"""A container for manipulating lists of hosts"""
|
||||
def __init__(self, data=None):
|
||||
"""Initialize the class"""
|
||||
super(KeyRequestGroup, self).__init__()
|
||||
if (data is not None):
|
||||
self._list = list(data)
|
||||
else:
|
||||
self._list = list()
|
||||
|
||||
def __repr__(self):
|
||||
return "<{0} {1}>".format(self.__class__.__name__, self._list)
|
||||
|
||||
def __len__(self):
|
||||
"""List length"""
|
||||
return len(self._list)
|
||||
|
||||
def __getitem__(self, ii):
|
||||
"""Get a list item"""
|
||||
if isinstance(ii, slice):
|
||||
return self.__class__(self._list[ii])
|
||||
else:
|
||||
return self._list[ii]
|
||||
|
||||
def __delitem__(self, ii):
|
||||
"""Delete an item"""
|
||||
del self._list[ii]
|
||||
|
||||
def __setitem__(self, ii, val):
|
||||
# optional: self._acl_check(val)
|
||||
self._list[ii] = val
|
||||
|
||||
def __str__(self):
|
||||
return str(self._list)
|
||||
|
||||
def createExistsQuery(self, val):
|
||||
|
||||
query="type(service_method)"
|
||||
|
||||
val['services'] = list(map(lambda x: x.replace("~","") , val['services']))
|
||||
val['methods'] = list(map(lambda x: x.replace("~","") , val['methods']))
|
||||
#case Service Names exists
|
||||
if len(val["services"]) > 0:
|
||||
if val["services"][0].startswith("SERVICE-"):
|
||||
query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityId(\""+'","'.join(val["services"])+"\"))"
|
||||
else:
|
||||
query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityName.in(\""+'","'.join(val["services"])+"\"))"
|
||||
|
||||
if val["methods"][0].startswith("SERVICE_METHOD-"):
|
||||
query+=",entityId(\""+'","'.join(val["methods"])+"\")"
|
||||
else:
|
||||
query+=",entityName.in(\""+'","'.join(val["methods"])+"\")"
|
||||
|
||||
val["existsQuery"]= query
|
||||
|
||||
# def createServiceResolveQuery(self, val):
|
||||
# query="type(SERVICE)"
|
||||
# val['services'] = list(map(lambda x: x.replace("~","") , val['services']))
|
||||
|
||||
# if len(val["services"]) > 0:
|
||||
# if val["services"][0].startswith("SERVICE-"):
|
||||
# query+=",entityId(\""+'","'.join(val["services"])+"\")"
|
||||
# else:
|
||||
# query+=",entityName.in(\""+'","'.join(val["services"])+"\")"
|
||||
|
||||
# val["resolveServiceQuery"]= query
|
||||
|
||||
|
||||
def insert(self, ii, val):
|
||||
self.createExistsQuery(val)
|
||||
#self.createServiceResolveQuery(val)
|
||||
self._list.insert(ii, val)
|
||||
|
||||
|
||||
|
||||
def append(self, val):
|
||||
|
||||
if len(self._list) == 0:
|
||||
#self._list.insert(ii, val)
|
||||
self.insert(len(self._list), val)
|
||||
return
|
||||
|
||||
for group in self._list:
|
||||
if len(set(group["services"]) - set(val["services"])) > 0 or len(set(group["methods"]) - set(val["methods"])) > 0:
|
||||
self.insert(len(self._list), val)
|
||||
|
||||
|
||||
|
||||
class KR:
|
||||
|
||||
# def getNotExistingKeyRequests(self):
|
||||
# return [k for k in self.keyRequests if k['exists']==False]
|
||||
|
||||
# def hasNotExistingKeyRequests(self):
|
||||
# for k in self.keyRequests:
|
||||
# if k['exists']==False:
|
||||
# return True
|
||||
|
||||
# return False
|
||||
|
||||
# def getNoData1WKeyRequests(self):
|
||||
# return [k for k in self.keyRequests if k['hasData_1W']==False and k['exists']==True]
|
||||
|
||||
# def hasNoData1WKeyRequests(self):
|
||||
# for k in self.keyRequests:
|
||||
# if k['hasData_1W']==False and k['exists'] == True:
|
||||
# return True
|
||||
|
||||
# return False
|
||||
|
||||
def getKeyRequestByHasData(self,label):
|
||||
return [k for k in self.keyRequests if k['hasData'][label]==False]
|
||||
|
||||
def hasNoData(self,label):
|
||||
for k in self.keyRequests:
|
||||
if k['hasData'][label]==False:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
||||
def checkKeyRequestsHasData(self):
|
||||
pass
|
||||
|
||||
def mergeServices(self, listServices):
|
||||
listOfServiceIds=[o["entityId"] for o in self.services]
|
||||
|
||||
for s in listServices:
|
||||
if s["entityId"] not in listOfServiceIds:
|
||||
self.services.append(s)
|
||||
|
||||
def __init__(self,
|
||||
metadata,
|
||||
matchedGroups: KeyRequestGroup = None):
|
||||
self.metadata=metadata
|
||||
|
||||
if matchedGroups == None:
|
||||
self.matchedGroups = KeyRequestGroup()
|
||||
else:
|
||||
self.matchedGroups = keyRequests_groups
|
||||
|
||||
self.keyRequests=[]
|
||||
self.services=[]
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,211 @@
|
|||
import re
|
||||
|
||||
from key_request_parser import patterns, keyrequests, helper
|
||||
|
||||
from enum import Flag, auto
|
||||
|
||||
|
||||
class KROption(Flag):
|
||||
VALIDATE_EXISTS = auto()
|
||||
VALIDATE_HASDATA = auto()
|
||||
RESOLVEKEYREQUETS = auto()
|
||||
RESOLVESERVICES = auto()
|
||||
|
||||
class KRParser:
|
||||
patterns=[patterns.Pattern1(), patterns.Pattern2(), patterns.Pattern3()]
|
||||
|
||||
def normalize(self,x):
|
||||
#tmp=x.replace("~","")
|
||||
tmp=x.replace("\n","")
|
||||
#tmp=tmp.replace("\"/","\"")
|
||||
#tmp=tmp.replace("\"/","") -_>was active
|
||||
#tmp=tmp.replace("/\"","\"")
|
||||
tmp=tmp.replace("/\"","")
|
||||
tmp=tmp.replace("\"","")
|
||||
tmp=tmp.replace("\t","")
|
||||
|
||||
tmp=re.sub("([\s]*)\)", ")", tmp)
|
||||
|
||||
|
||||
|
||||
tmp=re.sub("\([\s\n\r]*", "(", tmp)
|
||||
tmp=re.sub("\,[\s\n\r]*", ",", tmp)
|
||||
tmp=re.sub("\)[\s\n\r]*,", "),", tmp)
|
||||
tmp=re.sub("in[\s\n\r]*\(", "in(", tmp)
|
||||
|
||||
return tmp
|
||||
|
||||
|
||||
def applyPatterns(self,subject):
|
||||
groups=None
|
||||
for p in self.patterns:
|
||||
groups=p.parseServicesAndMethods(subject)
|
||||
|
||||
if len(groups) > 0:
|
||||
break
|
||||
|
||||
return groups
|
||||
|
||||
|
||||
def checkKeyRequetsHasData(self,kr, tfrom, DTAPIURL, DTAPIToken):
|
||||
|
||||
DTAPIURL = DTAPIURL + "/api/v2/entities"
|
||||
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Api-Token ' + DTAPIToken
|
||||
}
|
||||
|
||||
for gid, group in enumerate(kr.matchedGroups):
|
||||
params={"entitySelector": group["existsQuery"], "from":tfrom["tfrom"], "fields": "fromRelationships"}
|
||||
response = helper.get_request(DTAPIURL, headers, params)
|
||||
entities = (response.json())['entities']
|
||||
|
||||
if len(entities) > 0:
|
||||
y=0
|
||||
for method in kr.keyRequests:
|
||||
if method["groupId"] == gid:
|
||||
found = [x for x in entities if x[method["comparer"]] == method[method["comparer"]]]
|
||||
|
||||
if len(found) > 0:
|
||||
method["hasData"][tfrom["label"]]=True
|
||||
#method["displayName"]=found[0]["displayName"]
|
||||
#method["entityId"]=found[0]["entityId"]
|
||||
#method["services"]=found[0]["fromRelationships"]["isServiceMethodOfService"]
|
||||
|
||||
# for idx,o in enumerate(method["services"]):
|
||||
# tmpS=[p for p in kr.services if p["entityId"]==o["id"]]
|
||||
# if len(tmpS)>0:
|
||||
# method["services"][idx]=tmpS[0]
|
||||
|
||||
else:
|
||||
method["hasData"][tfrom["label"]]=False
|
||||
|
||||
def resolveServices(self,services, DTAPIURL, DTAPIToken):
|
||||
#DTAPIURL = DTAPIURL + "/api/v2/entities"
|
||||
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Api-Token ' + DTAPIToken
|
||||
}
|
||||
|
||||
for gid, service in enumerate(services):
|
||||
query="type(SERVICE),entityId("+service["id"]+")"
|
||||
params={"entitySelector": query,"from":"now-2y"}
|
||||
response = helper.get_request(DTAPIURL, headers, params)
|
||||
entities = (response.json())['entities']
|
||||
|
||||
if len(entities)>0:
|
||||
services[gid]=entities[0]
|
||||
|
||||
def resolveKeyRequests(self,kr, DTAPIURL, DTAPIToken, options):
|
||||
DTAPIURL = DTAPIURL + "/api/v2/entities"
|
||||
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Api-Token ' + DTAPIToken
|
||||
}
|
||||
|
||||
|
||||
for gid, k in enumerate(kr.keyRequests):
|
||||
try:
|
||||
query="type(service_method)"
|
||||
group=kr.matchedGroups[k["groupId"]]
|
||||
|
||||
if len(group["services"])> 0:
|
||||
if group["services"][0].startswith("SERVICE-"):
|
||||
query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityId(\""+'","'.join(group["services"])+"\"))"
|
||||
else:
|
||||
query+=",fromRelationship.isServiceMethodOfService(type(\"SERVICE\"),entityName.in(\""+'","'.join(group["services"])+"\"))"
|
||||
|
||||
if k["comparer"]=="entityId":
|
||||
query+=",entityId("+k["entityId"]+")"
|
||||
else:
|
||||
query+=",entityName.in(\""+k["displayName"]+"\")"
|
||||
|
||||
params={"entitySelector": query, "from":"now-2y","fields": "fromRelationships"}
|
||||
response = helper.get_request(DTAPIURL, headers, params)
|
||||
entities = (response.json())['entities']
|
||||
|
||||
# if len(entities) > 1:
|
||||
# kr.keyRequests[gid]['foundCount']=len(entities)
|
||||
# print("Multiple keyrequest found: ")
|
||||
|
||||
if len(entities)> 0:
|
||||
kr.keyRequests[gid]["found"]=True
|
||||
kr.keyRequests[gid]['foundCount']=len(entities)
|
||||
kr.keyRequests[gid]["displayName"]=entities[0]["displayName"]
|
||||
kr.keyRequests[gid]["entityId"]=entities[0]["entityId"]
|
||||
|
||||
if "isServiceMethodOfService" in entities[0]["fromRelationships"]:
|
||||
kr.keyRequests[gid]["services"]=entities[0]["fromRelationships"]["isServiceMethodOfService"]
|
||||
|
||||
if KROption.RESOLVESERVICES in options and len( kr.keyRequests[gid]["services"])>0:
|
||||
self.resolveServices(kr.keyRequests[gid]["services"], DTAPIURL, DTAPIToken)
|
||||
|
||||
except Exception as err:
|
||||
kr.keyRequests[gid]["exception"]="resolveKeyRequests failed: "+repr(err)
|
||||
|
||||
|
||||
|
||||
#kr.mergeServices(entities)
|
||||
|
||||
|
||||
|
||||
|
||||
def process(self, kr):
|
||||
|
||||
for gid, group in enumerate(kr.matchedGroups):
|
||||
for method in group["methods"]:
|
||||
if method.startswith('SERVICE_METHOD-'):
|
||||
tmp={"displayName": None,"comparer": "entityId", "entityId":method, "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None,
|
||||
else:
|
||||
tmp={"displayName":method,"comparer": "displayName", "entityId":None, "groupId":gid, "hasData":{}, "services":[], "found":False, "foundCount":0, "exception":""} #"exists":None, 'hasData_1W':None,
|
||||
|
||||
kr.keyRequests.append(tmp)
|
||||
|
||||
|
||||
# if self.options and KROption.VALIDATE_EXISTS in self.options:
|
||||
# self.checkKeyRequetsExists(kr,self.DTAPIURL, self.DTAPIToken)
|
||||
self.resolveKeyRequests(kr,self.DTAPIURL, self.DTAPIToken, self.options)
|
||||
|
||||
if KROption.VALIDATE_HASDATA in self.options:
|
||||
self.checkKeyRequetsHasData(kr,{"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken)
|
||||
self.checkKeyRequetsHasData(kr,{"label":"1M", "tfrom":"now-1M"},self.DTAPIURL, self.DTAPIToken)
|
||||
# elif KROption.RESOLVEKEYREQUETS in self.options:
|
||||
# self.checkKeyRequetsHasData(kr, {"label":"1W", "tfrom":"now-1w"},self.DTAPIURL, self.DTAPIToken)
|
||||
# if KROption.RESOLVESERVICES in self.options:
|
||||
# self.resolveServices(kr,self.DTAPIURL, self.DTAPIToken)
|
||||
|
||||
return kr
|
||||
|
||||
|
||||
def parseBySLO(self, row):
|
||||
#normalize
|
||||
normFilter=self.normalize(row['filter'])
|
||||
normExpresseion=self.normalize(row['metricExpression'])
|
||||
|
||||
tmp_KR = keyrequests.KR({"sloName":row["name"], "env":row["env"], "metricExpression": normExpresseion, "filter": normFilter, "matchedGroups": None})
|
||||
|
||||
|
||||
#SLO with Filter
|
||||
if normFilter.upper().startswith("TYPE(SERVICE_METHOD),"):
|
||||
subject=normFilter
|
||||
else:
|
||||
subject=normExpresseion
|
||||
|
||||
groups=self.applyPatterns(subject)
|
||||
|
||||
for g in groups:
|
||||
if g["methods"] != None and len(g["methods"]) > 0:
|
||||
tmp_KR.matchedGroups.append(g)
|
||||
|
||||
#self.process(tmp_KR)
|
||||
|
||||
return self.process(tmp_KR)
|
||||
|
||||
|
||||
def __init__(self, options: KROption , DTAPIURL, DTAPIToken ):
|
||||
self.DTAPIURL= DTAPIURL
|
||||
self.DTAPIToken=DTAPIToken
|
||||
self.options=options
|
||||
|
|
@ -0,0 +1,88 @@
|
|||
import re
|
||||
import urllib
|
||||
|
||||
class Pattern1:
|
||||
|
||||
|
||||
def parseServicesAndMethods(self, metricExpression):
|
||||
|
||||
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)\)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*[\)]*", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([\"]*[^.*]*[\"]*)[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\,[\s\n\r]*entityName[\s\n\r]*\.[in]*[\s\n\r]*\([\s\n\r]*([\"]*[^.*\)]*)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
|
||||
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*(\"[^.*]*\")[\s\n\r]*\)[\s\n\r]*\)[\s\n\r]*\,[\s\n\r]*entityName[\s\n\r]*\.[in]*[\s\n\r]*\([\s\n\r]*(\"[^.*]*)\)\"", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
|
||||
#Endoce
|
||||
metricExpression=re.sub(r'~([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)~', lambda m: str(urllib.parse.quote_plus(m.group(1))), metricExpression, flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
result = re.findall(r"type\(\"?service_method\"?\),fromRelationship\.isServiceMethodOfService\(type\(\"?service\"?\),entityName[\.]*[in]*\(([^\)]*)\)\)\,entityName[\.]*[in]*\(([^\)]*)\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
#result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*fromRelationship[\s\n\r]*\.[\s\n\r]*isServiceMethodOfService[\s\n\r]*\([\s\n\r]*type\(\"?service\"?\)[\s\n\r]*,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*[\s\n\r]*\([\s\n\r]*([^\)]*)\)[\s\n\r]*\)[\s\n\r]*\,[\s\n\r]*entityName[\s\n\r]*[\.]*[\s\n\r]*[in]*\([\s\n\r]*([^\)]*)[\s\n\r]*\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
#services=[]
|
||||
#methods=[]
|
||||
groups=[]
|
||||
if result:
|
||||
for r in result:
|
||||
services=[s.strip() for s in urllib.parse.unquote_plus(r[0]).split(",")]
|
||||
methods=[s.strip() for s in urllib.parse.unquote_plus(r[1]).split(",")]
|
||||
groups.append({"services":services, "methods":methods})
|
||||
#return services, methods
|
||||
return groups
|
||||
|
||||
class Pattern2:
|
||||
|
||||
def parseServicesAndMethods(self, metricExpression):
|
||||
metricExpression=re.sub(r'~([A-Z0-9\:\<\>\_\$\.\s\-\,\(\),\[\]\\\\/*]*)~', lambda m: str(urllib.parse.quote_plus(m.group(1))), metricExpression, flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
result = re.findall(r"type\(\"?service_method\"?\),fromRelationship\.isServiceMethodOfService\(type\([~]*service[~]*\),entityName[\.]*[in]*\(([^\)]*)\),tag\(([^\)]*)\)\),entityName[\.]*[in]*\(([^\)]*)\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
# services=[]
|
||||
# methods=[]
|
||||
groups=[]
|
||||
if result:
|
||||
for r in result:
|
||||
services=[s.strip() for s in urllib.parse.unquote_plus(r[0]).split(",")]
|
||||
methods=[s.strip() for s in urllib.parse.unquote_plus(r[2]).split(",")]
|
||||
groups.append({"services":services, "methods":methods})
|
||||
|
||||
return groups
|
||||
|
||||
|
||||
|
||||
class Pattern3:
|
||||
|
||||
def parseServicesAndMethods(self, metricExpression):
|
||||
result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*entityId[\s\n\r]*[\s\n\r]*\([\s\n\r]*[\s\n\r]*([^\)]*)[\s\n\r]*\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
# services=[]
|
||||
# methods=[]
|
||||
groups=[]
|
||||
if result:
|
||||
for r in result:
|
||||
methods=[s.strip() for s in r.split(",")]
|
||||
groups.append({"services":[], "methods":methods})
|
||||
|
||||
return groups
|
||||
|
||||
|
||||
|
||||
# class Pattern4:
|
||||
|
||||
# def parseServicesAndMethods(self, metricExpression):
|
||||
# result = re.findall(r"service_method,([^\)]*)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
|
||||
# groups=[]
|
||||
# methods=[]
|
||||
# if result:
|
||||
# for r in result:
|
||||
# methods.append(r)
|
||||
|
||||
# groups.append({"services":[], "methods":methods})
|
||||
|
||||
# return groups
|
||||
|
||||
# class FilterMethodPattern:
|
||||
|
||||
# def parseServicesAndMethods(self, metricExpression):
|
||||
# result = re.findall(r"type\(\"?service_method\"?\)[\s\n\r]*,[\s\n\r]*entityId[\s\n\r]*[\s\n\r]*\([\s\n\r]*[\s\n\r]*([^\)]*)[\s\n\r]*\)", metricExpression,flags=re.IGNORECASE|re.X|re.MULTILINE)
|
||||
# services=[]
|
||||
# methods=[]
|
||||
# if result:
|
||||
# for r in result:
|
||||
# methods=[s.strip() for s in r.split(",")]
|
||||
|
||||
# return services, methods
|
||||
|
|
@ -0,0 +1,116 @@
|
|||
import logging
|
||||
from typing import Dict, Optional, Any
|
||||
import time
|
||||
|
||||
import requests
|
||||
import urllib3
|
||||
from urllib3.util.retry import Retry
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
||||
TOO_MANY_REQUESTS_WAIT = "wait"
|
||||
|
||||
|
||||
##Not sure where/why this is here
|
||||
class DynatraceRetry(Retry):
|
||||
def get_backoff_time(self):
|
||||
return self.backoff_factor
|
||||
|
||||
|
||||
class HttpClient:
|
||||
def __init__(
|
||||
self,
|
||||
base_url: str,
|
||||
token: str,
|
||||
log: logging.Logger = None,
|
||||
proxies: Dict = None,
|
||||
too_many_requests_strategy=None,
|
||||
retries: int = 0,
|
||||
retry_delay_ms: int = 0,
|
||||
#mc_jsession_id: Optional[str] = None,
|
||||
#mc_b925d32c: Optional[str] = None,
|
||||
#mc_sso_csrf_cookie: Optional[str] = None,
|
||||
):
|
||||
while base_url.endswith("/"):
|
||||
base_url = base_url[:-1]
|
||||
self.base_url = base_url
|
||||
|
||||
if proxies is None:
|
||||
proxies = {}
|
||||
self.proxies = proxies
|
||||
|
||||
self.auth_header = {"Authorization": f"Api-Token {token}"}
|
||||
self.log = log
|
||||
if self.log is None:
|
||||
self.log = logging.getLogger(__name__)
|
||||
self.log.setLevel(logging.WARNING)
|
||||
st = logging.StreamHandler()
|
||||
fmt = logging.Formatter("%(asctime)s - %(levelname)s - %(name)s - %(thread)d - %(filename)s:%(lineno)d - %(message)s")
|
||||
st.setFormatter(fmt)
|
||||
self.log.addHandler(st)
|
||||
|
||||
self.too_many_requests_strategy = too_many_requests_strategy
|
||||
retry_delay_s = retry_delay_ms / 1000
|
||||
|
||||
try:
|
||||
self.retries = Retry(
|
||||
total=retries,
|
||||
backoff_factor=retry_delay_s,
|
||||
status_forcelist=[400, 401, 403, 404, 413, 429, 500, 502, 503, 504],
|
||||
allowed_methods=["TRACE", "PUT", "DELETE", "OPTIONS", "HEAD", "GET", "POST"],
|
||||
raise_on_status=False,
|
||||
)
|
||||
except TypeError: # Older version of urllib3?
|
||||
self.retries = Retry(
|
||||
total=retries,
|
||||
backoff_factor=retry_delay_s,
|
||||
status_forcelist=[400, 401, 403, 404, 413, 429, 500, 502, 503, 504],
|
||||
method_whitelist=["TRACE", "PUT", "DELETE", "OPTIONS", "HEAD", "GET", "POST"],
|
||||
raise_on_status=False,
|
||||
)
|
||||
|
||||
# This is for internal dynatrace usage
|
||||
#self.mc_jsession_id = mc_jsession_id
|
||||
#self.mc_b925d32c = mc_b925d32c
|
||||
#self.mc_sso_csrf_cookie = mc_sso_csrf_cookie
|
||||
|
||||
def make_request(
|
||||
self, path: str, params: Optional[Any] = None, headers: Optional[Dict] = None, method="GET", data=None, files=None, query_params=None
|
||||
) -> requests.Response:
|
||||
url = f"{self.base_url}{path}"
|
||||
|
||||
body = None
|
||||
if method in ["POST", "PUT"]:
|
||||
body = params
|
||||
params = query_params
|
||||
|
||||
if headers is None:
|
||||
headers = {}
|
||||
if files is None and "content-type" not in [key.lower() for key in headers.keys()]:
|
||||
headers.update({"content-type": "application/json"})
|
||||
headers.update(self.auth_header)
|
||||
|
||||
cookies = None
|
||||
#if self.mc_b925d32c and self.mc_sso_csrf_cookie and self.mc_jsession_id:
|
||||
# headers.update({"Cookie": f"JSESSIONID={self.mc_jsession_id}; ssoCSRFCookie={self.mc_sso_csrf_cookie}; b925d32c={self.mc_b925d32c}"})
|
||||
# cookies = {"JSESSIONID": self.mc_jsession_id, "ssoCSRFCookie": self.mc_sso_csrf_cookie, "b925d32c": self.mc_b925d32c}
|
||||
|
||||
s = requests.Session()
|
||||
s.mount("https://", HTTPAdapter(max_retries=self.retries))
|
||||
|
||||
self.log.debug(f"Making {method} request to '{url}' with params {params} and body: {body}")
|
||||
r = s.request(method, url, headers=headers, params=params, json=body, verify=False, proxies=self.proxies, data=data, cookies=cookies, files=files)
|
||||
self.log.debug(f"Received response '{r}'")
|
||||
|
||||
while r.status_code == 429 and self.too_many_requests_strategy == TOO_MANY_REQUESTS_WAIT:
|
||||
sleep_amount = int(r.headers.get("retry-after", 5))
|
||||
self.log.warning(f"Sleeping for {sleep_amount}s because we have received an HTTP 429")
|
||||
time.sleep(sleep_amount)
|
||||
r = requests.request(method, url, headers=headers, params=params, json=body, verify=False, proxies=self.proxies)
|
||||
|
||||
if r.status_code >= 400:
|
||||
raise Exception(f"Error making request to {url}: {r}. Response: {r.text}")
|
||||
|
||||
return r
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
from more_utils.httpClient import HttpClient
|
||||
|
||||
|
||||
|
||||
class Pagionation():
|
||||
def __init__(self, http_client, target_url, target_params=None, headers=None, list_item="result"):
|
||||
#self.__target_class = target_class
|
||||
self.__http_client: HttpClient = http_client
|
||||
self.__target_url = target_url
|
||||
self.__target_params = target_params
|
||||
self.__headers = headers
|
||||
self.__list_item = list_item
|
||||
self._has_next_page = True
|
||||
self.__total_count = None
|
||||
self.__page_size = None
|
||||
self.elements = self._get_next_page()
|
||||
|
||||
|
||||
|
||||
def __iter__(self):# -> Iterator[T]:
|
||||
for element in self.__elements:
|
||||
yield element
|
||||
|
||||
while self._has_next_page:
|
||||
new_elements = self._get_next_page()
|
||||
for element in new_elements:
|
||||
yield element
|
||||
|
||||
|
||||
def __len__(self):
|
||||
return self.__total_count or len(self.__elements)
|
||||
|
||||
def _get_next_page(self):
|
||||
response = self.__http_client.make_request(self.__target_url, params=self.__target_params, headers=self.__headers)
|
||||
json_response = response.json()
|
||||
data = []
|
||||
if json_response.get("nextPageKey", None):
|
||||
self._has_next_page = True
|
||||
self.__target_params = {"nextPageKey": json_response["nextPageKey"]}
|
||||
else:
|
||||
self._has_next_page = False
|
||||
|
||||
if self.__list_item in json_response:
|
||||
elements = json_response[self.__list_item]
|
||||
self.__total_count = json_response.get("totalCount") or len(elements)
|
||||
|
||||
while self._has_next_page == True:
|
||||
self.__target_url = self.__target_url.split("?")[0]
|
||||
elements += self._get_response()
|
||||
|
||||
return elements
|
||||
|
||||
def _get_response(self):
|
||||
response = self.__http_client.make_request(self.__target_url, params=self.__target_params, headers=self.__headers)
|
||||
json_response = response.json()
|
||||
data = []
|
||||
if json_response.get("nextPageKey", None):
|
||||
self._has_next_page = True
|
||||
self.__target_params = {"nextPageKey": json_response["nextPageKey"]}
|
||||
else:
|
||||
self._has_next_page = False
|
||||
|
||||
if self.__list_item in json_response:
|
||||
elements = json_response[self.__list_item]
|
||||
self.__total_count = json_response.get("totalCount") or len(elements)
|
||||
|
||||
|
||||
return elements
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
# Package mgmt
|
||||
python-decouple
|
||||
requests
|
||||
pyyaml
|
||||
pandas
|
||||
Loading…
Reference in New Issue