master
Daniel Mikula 2023-04-25 15:50:51 +02:00
parent 2c7d991258
commit d7f0b62a57
1 changed files with 81 additions and 112 deletions

View File

@ -9,145 +9,114 @@ from datetime import datetime, timedelta
import datetime
import re
def getSLO(ENV, DTAPIToken, DTENV):
# DTENV = base url
# DTAPIToken = sec token
dtclient = dynatraceAPI.Dynatrace(DTENV, DTAPIToken)
my_params_report = {'pageSize': 25}
my_params_report = {"pageSize": 25}
# gets all slos and filter later
api_url_report = "/api/v2/slo"
pages = dtclient.returnPageination(api_url_report, my_params_report, "slo")
df = pd.DataFrame(pages.elements)
df["env"]=ENV
df["env"] = ENV
return df
def previous_week_range(date: datetime):
start_date = date + timedelta(-date.weekday(), weeks=-1)
end_date = date + timedelta(-date.weekday() -1)
return start_date, end_date
start_date = date + timedelta(-date.weekday(), weeks=-1)
end_date = date + timedelta(-date.weekday() - 1)
return start_date, end_date
def main():
#Get All SLOs
# Get All SLOs
reportItem={}
with open('./environment.yaml') as file:
env_doc = yaml.safe_load(file)
reportItem = {}
with open("./environment.yaml") as file:
env_doc = yaml.safe_load(file)
for env, doc in env_doc.items():
token = dict(doc[2])
url = dict(doc[1])
for env, doc in env_doc.items():
token = dict(doc[2])
url = dict(doc[1])
if(config(token.get('env-token-name')) != ""):
print("Gather data, hold on a minute")
DTTOKEN = config(token.get('env-token-name'))
DTURL = url.get('env-url')
if config(token.get("env-token-name")) != "":
print("Gather data, hold on a minute")
DTTOKEN = config(token.get("env-token-name"))
DTURL = url.get("env-url")
#krp = krparser.KRParser(krparser.KROption.VALIDATE_EXISTS | krparser.KROption.VALIDATE_HASDATA ,DTURL, DTTOKEN)
slosF = getSLO(env, DTTOKEN, DTURL)
slosF = slosF[slosF['name'].str.startswith('TP_')]
#parse the metric Expression to get Services and Requests
# krp = krparser.KRParser(krparser.KROption.VALIDATE_EXISTS | krparser.KROption.VALIDATE_HASDATA ,DTURL, DTTOKEN)
krs=[]
krp = krparser.KRParser(krparser.KROption.VALIDATE_EXISTS | krparser.KROption.VALIDATE_HASDATA | krparser.KROption.RESOLVESERVICES, DTURL, DTTOKEN)
for index, row in slosF.iterrows():
krs.append(krp.parseBySLO(row))
slosF = getSLO(env, DTTOKEN, DTURL)
slosF = slosF[slosF["name"].str.startswith("TP_")]
x=0
# parse the metric Expression to get Services and Requests
#SLO Name | SERVICE | PROCESS GRUPPE | TAGS
{"sloname": {
"sloname":$sloname$,
"services":[{
"serviceName": "$servicename$"
}]
},
"sloname": {
"sloname":$sloname$,
"services":[{
"serviceName": "$servicename$"
}]
# {
"SLO 1": {
"sloname": "SLO 1",
"services": [
{
"serviceName": "Service 1",
"processGroup": "Process Group 1",
"tags": ["tag1", "tag2"]
},
{
"serviceName": "Service 2",
"processGroup": "Process Group 2",
"tags": ["tag3", "tag4"]
}
]
},
"SLO 2": {
"sloname": "SLO 2",
"services": [
{
"serviceName": "Service 3",
"processGroup": "Process Group 3",
"tags": ["tag5", "tag6"]
}
]
}
}
#
}}
for kr in krs:
reportItem[kr.metadata.name]=
# DEBUG
# print(krs)
for x in krs:
print(x.metadata)
# regex ~ XYZ ~
krs = []
krp = krparser.KRParser(
krparser.KROption.VALIDATE_EXISTS
| krparser.KROption.VALIDATE_HASDATA
| krparser.KROption.RESOLVESERVICES,
DTURL,
DTTOKEN,
)
#filtering the SLO dataframe based on those dates and the service name
for index, row in slosF.iterrows():
krs.append(krp.parseBySLO(row))
# import datetime
# x = 0
# SLO Name | SERVICE | PROCESS GRUPPE | TAGS
# {"sloname": {
# "sloname":$sloname$,
# "services":[{
# "serviceName": "$servicename$"
# }]
# },
# "sloname": {
# "sloname":$sloname$,
# "services":[{
# "serviceName": "$servicename$"
# }]
for kr in krs:
reportItem[kr.metadata["sloName"]] = {}
reportItem[kr.metadata["sloName"]]["sloname"] = kr.metadata["sloName"]
reportItem[kr.metadata["sloName"]]["services"] = []
reportItem[kr.metadata["sloName"]]["services"].append(
{"serviceName": "$servicename$"}
)
extract_services_and_requests(kr.metadata["metricExpression"])
print(reportItem)
# filtering the SLO dataframe based on those dates and the service name
# import re
def extract_services_and_requests(metric_expression: str):
services = []
requests = []
#extract all the service names that start with "TP_"
metric_expression = 'sum(duration), filter(entity.service.name, beginsWith("TP_"))'
# Define regex pattern to match service names
regex_pattern = r'beginsWith\("TP_(.*?)"\)'
# Find all matches of pattern in metric expression
matches = re.findall(regex_pattern, metric_expression)
print(matches)
#get detail info service names
import requests
import json
environment_id = "ENVIRONMENT_ID"
api_token = "dt0c01.MBX344ELRJNDGSWGOSK3JBE5.KHJ6BLKLZ6UB6NKJQRXYINLDCKN36HGOK6ECYAOZ3REF2KHWM2NIBIKOITVXCWUT"
headers = {"Authorization": "Api-Token {}".format(api_token)}
# url = "https://xxu26128.live.dynatrace.com/api/v1/entity/services".format(environment_id)
params = {"filter": "displayName LIKE 'TP_%'"}
# response = requests.get(url, headers=headers, params=params)
# response_json = json.loads(response.text)
# for service in response_json["entities"]:
# display_name = service["displayName"]
# print("Service Name: {0}".format(display_name))
result = metric_expression.split("entityName.in")
regex_pattern = r"~(.*?)~"
method_list = ["GET", "POST", "http"]
for x in result:
matches = re.findall(regex_pattern, x)
for match in matches:
if match != "SERVICE":
# TODO: not working properly. fix needed
res = any(ele in match for ele in method_list)
if res:
requests.extend(matches)
else:
services.extend(matches)
print(f"services: {services}")
print("-" * 80)
print(f"requests: {requests}")
return result
if __name__ == "__main__":
main()
main()