|
|
|
|
@ -14,6 +14,8 @@ import os
|
|
|
|
|
import dynatraceAPI
|
|
|
|
|
from pagination import Pagionation
|
|
|
|
|
|
|
|
|
|
from KRParser import krparser
|
|
|
|
|
|
|
|
|
|
warnings.filterwarnings("ignore")
|
|
|
|
|
# warning, there are warnings which are ignored!
|
|
|
|
|
|
|
|
|
|
@ -37,21 +39,25 @@ def make_request(url, headers,verify,parameters):
|
|
|
|
|
return response
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def previous_day_range(date):
|
|
|
|
|
start_date = date - datetime.timedelta(days=1)
|
|
|
|
|
end_date = date - datetime.timedelta(days=1)
|
|
|
|
|
return start_date, end_date
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def previous_week_range(date):
|
|
|
|
|
start_date = date + datetime.timedelta(-date.weekday(), weeks=-1)
|
|
|
|
|
end_date = date + datetime.timedelta(-date.weekday() - 1)
|
|
|
|
|
return start_date, end_date
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def previous_month_range(date):
|
|
|
|
|
end_date = date.replace(day=1) - datetime.timedelta(days=1)
|
|
|
|
|
start_date = end_date.replace(day=1)
|
|
|
|
|
return start_date, end_date
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
def getSLO(DTAPIToken, DTENV, fromDate, toDate):
|
|
|
|
|
|
|
|
|
|
@ -121,6 +127,11 @@ def getSLO(DTAPIToken, DTENV, fromDate, toDate, selector_var, selector_type):
|
|
|
|
|
return df
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_metric(DTAPIToken, DTENV, fromDate, toDate, metricExpression):
|
|
|
|
|
print(f"here: {len(metricExpression)}")
|
|
|
|
|
print(f"{metricExpression[:-1]}:timeshift(-7d))")
|
|
|
|
|
return metricExpression
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_daily_slice(start_date, end_date):
|
|
|
|
|
tempstart = start_date
|
|
|
|
|
@ -145,13 +156,16 @@ def get_daily_slice(start_date, end_date):
|
|
|
|
|
|
|
|
|
|
return days
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_hourly_slice(start_date, end_date):
|
|
|
|
|
|
|
|
|
|
# date object to datetime
|
|
|
|
|
tempstart = datetime.datetime(start_date.year,start_date.month,start_date.day)
|
|
|
|
|
tempstart = datetime.datetime(
|
|
|
|
|
start_date.year, start_date.month, start_date.day)
|
|
|
|
|
|
|
|
|
|
# date object to datetime
|
|
|
|
|
final_end = datetime.datetime.combine(end_date,datetime.datetime.max.time())
|
|
|
|
|
final_end = datetime.datetime.combine(
|
|
|
|
|
end_date, datetime.datetime.max.time())
|
|
|
|
|
hours = pd.DataFrame()
|
|
|
|
|
|
|
|
|
|
# Add the first slice
|
|
|
|
|
@ -174,6 +188,7 @@ def get_hourly_slice(start_date, end_date):
|
|
|
|
|
|
|
|
|
|
return hours
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def init_argparse():
|
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
|
usage="%(prog)s [--fromDate] [toDate] or [preSelect]",
|
|
|
|
|
@ -196,9 +211,9 @@ def init_argparse():
|
|
|
|
|
help="h | d | t | y - writes the slices hourly, daily, total or year to date into ecxel. given in any order"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return parser
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_inputs(args):
|
|
|
|
|
'''
|
|
|
|
|
This functions is the single point of true for arguments. If new arguments are added they need to be added in here. Returns from and to date.
|
|
|
|
|
@ -260,46 +275,64 @@ def check_inputs(args):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_one_slice(item, DTTOKEN, DTURL, slice, out_df, selector_var, selector_type):
|
|
|
|
|
###Calc daily SLO
|
|
|
|
|
# Calc daily SLO
|
|
|
|
|
df = pd.DataFrame()
|
|
|
|
|
for index, row in slice.iterrows():
|
|
|
|
|
num_probs = len(slice)
|
|
|
|
|
percentage = str(round((100*(index+1))/num_probs, 2)).split(".")
|
|
|
|
|
print("{:0>4d} von {:0>4d} = {:0>3d}.{:0>2d} %".format(index+1, num_probs, int(percentage[0]), int(percentage[1]) ), end='\r')
|
|
|
|
|
temp_df = getSLO(DTTOKEN,DTURL,row['startTime'],row['endTime'], selector_var, selector_type)
|
|
|
|
|
print("{:0>4d} von {:0>4d} = {:0>3d}.{:0>2d} %".format(
|
|
|
|
|
index+1, num_probs, int(percentage[0]), int(percentage[1])), end='\r')
|
|
|
|
|
temp_df = getSLO(
|
|
|
|
|
DTTOKEN, DTURL, row['startTime'], row['endTime'], selector_var, selector_type)
|
|
|
|
|
temp_df['Date'] = row['Date']
|
|
|
|
|
temp_df['HUB'] = item
|
|
|
|
|
|
|
|
|
|
# new metric expression
|
|
|
|
|
# temp_df["newMetricExpression"] = get_metric(
|
|
|
|
|
# DTTOKEN, DTURL, row["startTime"], row["endTime"], temp_df["metricExpression"])
|
|
|
|
|
# print(temp_df)
|
|
|
|
|
temp_df["newMetricExpression"] = temp_df["metricExpression"].apply(
|
|
|
|
|
lambda x: get_metric(DTTOKEN, DTURL, row["startTime"], row["endTime"], x))
|
|
|
|
|
|
|
|
|
|
df = pd.concat([df, temp_df], ignore_index=True)
|
|
|
|
|
|
|
|
|
|
# sort columns in a try block - if API is returning columns which are non exist, this will not fail the script
|
|
|
|
|
try:
|
|
|
|
|
df[['description','Touchpoint']] = df['description'].str.split('_',expand=True)
|
|
|
|
|
df[['description', 'Touchpoint']
|
|
|
|
|
] = df['description'].str.split('_', expand=True)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"This error was encounterted : {e}")
|
|
|
|
|
try:
|
|
|
|
|
df = df[['Date', 'HUB', 'id', 'enabled', 'name', 'description', 'Touchpoint', 'evaluatedPercentage', 'errorBudget', 'status', 'error', 'target','warning', 'evaluationType', 'timeframe', 'metricExpression', 'filter']]
|
|
|
|
|
df = df[['Date', 'HUB', 'id', 'enabled', 'name', 'description', 'Touchpoint', 'evaluatedPercentage', 'errorBudget',
|
|
|
|
|
'status', 'error', 'target', 'warning', 'evaluationType', 'timeframe', 'metricExpression', 'filter']]
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print("Could not rearrange columns: " + str(e))
|
|
|
|
|
out_df = pd.concat([out_df, df], ignore_index=True)
|
|
|
|
|
|
|
|
|
|
print() # newline to remove \r from progress bar
|
|
|
|
|
return out_df
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_slice_ytd_total(DTTOKEN, DTURL, item, start_date, end_date, time_name, time_val, out_df, selector_var, selector_type):
|
|
|
|
|
df = getSLO(DTTOKEN,DTURL,start_date,end_date, selector_var, selector_type)
|
|
|
|
|
df = getSLO(DTTOKEN, DTURL, start_date, end_date,
|
|
|
|
|
selector_var, selector_type)
|
|
|
|
|
df[time_name] = time_val
|
|
|
|
|
df['HUB'] = item
|
|
|
|
|
try:
|
|
|
|
|
df[['description','Touchpoint']] = df['description'].str.split('_',expand=True)
|
|
|
|
|
df[['description', 'Touchpoint']
|
|
|
|
|
] = df['description'].str.split('_', expand=True)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"This error was encounterted : {e}")
|
|
|
|
|
try:
|
|
|
|
|
df = df[['Date', 'HUB', 'id', 'enabled', 'name', 'description','Touchpoint', 'evaluatedPercentage', 'errorBudget', 'status', 'error', 'target','warning', 'evaluationType', 'timeframe', 'metricExpression', 'filter']]
|
|
|
|
|
df = df[['Date', 'HUB', 'id', 'enabled', 'name', 'description', 'Touchpoint', 'evaluatedPercentage', 'errorBudget',
|
|
|
|
|
'status', 'error', 'target', 'warning', 'evaluationType', 'timeframe', 'metricExpression', 'filter']]
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print("Could not rearrange columns: " + str(e))
|
|
|
|
|
|
|
|
|
|
out_df = pd.concat([out_df, df], ignore_index=True)
|
|
|
|
|
return out_df
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_slo_parameter(path):
|
|
|
|
|
# the first part is to read a yaml and only select latest, valid config
|
|
|
|
|
mandatory_fields = ['hub', 'selector_type', 'selector_var', 'yearstart']
|
|
|
|
|
@ -318,16 +351,20 @@ def load_slo_parameter(path):
|
|
|
|
|
# yearstart = datetime.date.fromisoformat(tmp_dict['yearstart'])
|
|
|
|
|
# python <3.7
|
|
|
|
|
|
|
|
|
|
yearstart = datetime.datetime.strptime(tmp_dict['yearstart'], "%Y-%m-%d")
|
|
|
|
|
yearstart = datetime.datetime.strptime(
|
|
|
|
|
tmp_dict['yearstart'], "%Y-%m-%d")
|
|
|
|
|
|
|
|
|
|
# common code
|
|
|
|
|
yearstart = datetime.datetime(yearstart.year, yearstart.month, yearstart.day)
|
|
|
|
|
yearstart = datetime.datetime(
|
|
|
|
|
yearstart.year, yearstart.month, yearstart.day)
|
|
|
|
|
yearstart = time.mktime(yearstart.timetuple()) * 1000
|
|
|
|
|
|
|
|
|
|
selector_type = tmp_dict['selector_type'] # name if exact name is wanted
|
|
|
|
|
# name if exact name is wanted
|
|
|
|
|
selector_type = tmp_dict['selector_type']
|
|
|
|
|
selector_var = tmp_dict['selector_var']
|
|
|
|
|
hub = tmp_dict['hub']
|
|
|
|
|
all_yaml_configs.append([hub, selector_type, selector_var, yearstart, header_name])
|
|
|
|
|
all_yaml_configs.append(
|
|
|
|
|
[hub, selector_type, selector_var, yearstart, header_name])
|
|
|
|
|
else:
|
|
|
|
|
print(f"Slo Configuration {header_name} is broken")
|
|
|
|
|
return all_yaml_configs
|
|
|
|
|
@ -343,25 +380,35 @@ def write_slo_to_excel(args, fromDate, hourlyall, dailyall, totalall, ytd):
|
|
|
|
|
fileName = "./QM_Report_" + str(fromDate.isocalendar()[1]) + ".xlsx"
|
|
|
|
|
|
|
|
|
|
writer = pd.ExcelWriter(fileName)
|
|
|
|
|
workbook = writer.book
|
|
|
|
|
|
|
|
|
|
if not totalall.empty and 't' in str.lower(args.slices):
|
|
|
|
|
totalall = totalall[totalall['Touchpoint'].isin(touchpoints)]
|
|
|
|
|
totalall.to_excel(writer, sheet_name='total')
|
|
|
|
|
totalall.to_excel(writer, sheet_name='total', index=False)
|
|
|
|
|
worksheet = writer.sheets['total']
|
|
|
|
|
worksheet.autofilter(0, 0, totalall.shape[0], totalall.shape[1])
|
|
|
|
|
|
|
|
|
|
if not dailyall.empty and 'd' in str.lower(args.slices):
|
|
|
|
|
dailyall = dailyall[dailyall['Touchpoint'].isin(touchpoints)]
|
|
|
|
|
dailyall.to_excel(writer, sheet_name='daily')
|
|
|
|
|
dailyall.to_excel(writer, sheet_name='daily', index=False)
|
|
|
|
|
worksheet = writer.sheets['daily']
|
|
|
|
|
worksheet.autofilter(0, 0, dailyall.shape[0], dailyall.shape[1])
|
|
|
|
|
|
|
|
|
|
if not hourlyall.empty and 'h' in str.lower(args.slices):
|
|
|
|
|
hourlyall = hourlyall[hourlyall['Touchpoint'].isin(touchpoints)]
|
|
|
|
|
hourlyall.to_excel(writer, sheet_name='hourly')
|
|
|
|
|
hourlyall.to_excel(writer, sheet_name='hourly', index=False)
|
|
|
|
|
worksheet = writer.sheets['hourly']
|
|
|
|
|
worksheet.autofilter(0, 0, hourlyall.shape[0], hourlyall.shape[1])
|
|
|
|
|
|
|
|
|
|
if not ytd.empty and 'y' in str.lower(args.slices):
|
|
|
|
|
ytd = ytd[ytd['Touchpoint'].isin(touchpoints)]
|
|
|
|
|
ytd.to_excel(writer, sheet_name='YTD')
|
|
|
|
|
ytd.to_excel(writer, sheet_name='YTD', index=False)
|
|
|
|
|
worksheet = writer.sheets['YTD']
|
|
|
|
|
worksheet.autofilter(0, 0, ytd.shape[0], ytd.shape[1])
|
|
|
|
|
|
|
|
|
|
workbook.close()
|
|
|
|
|
writer.save()
|
|
|
|
|
writer.close()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def main(slo_path):
|
|
|
|
|
start_timer = time.time()
|
|
|
|
|
@ -372,7 +419,6 @@ def main(slo_path):
|
|
|
|
|
print("fromDate: " + str(fromDate))
|
|
|
|
|
print("toDate: " + str(toDate))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# days = get_daily_slice(fromDate,toDate)
|
|
|
|
|
days = get_daily_slice(fromDate, toDate)
|
|
|
|
|
hours = get_hourly_slice(fromDate, toDate)
|
|
|
|
|
@ -388,10 +434,12 @@ def main(slo_path):
|
|
|
|
|
|
|
|
|
|
for one_slo_config in slo_configs:
|
|
|
|
|
hub, selector_type, selector_var, yearstart, header_name = one_slo_config
|
|
|
|
|
print(f"For the slo config was '{slo_path}' used with the config '{header_name}'.")
|
|
|
|
|
print(
|
|
|
|
|
f"For the slo config was '{slo_path}' used with the config '{header_name}'.")
|
|
|
|
|
for item, doc in env_doc.items():
|
|
|
|
|
if not item in hub:
|
|
|
|
|
print(f"{item} will be skipped since it is not in {hub}, which was selected in {slo_path}")
|
|
|
|
|
print(
|
|
|
|
|
f"{item} will be skipped since it is not in {hub}, which was selected in {slo_path}")
|
|
|
|
|
continue
|
|
|
|
|
token = dict(doc[2])
|
|
|
|
|
url = dict(doc[1])
|
|
|
|
|
@ -402,22 +450,43 @@ def main(slo_path):
|
|
|
|
|
DTTOKEN = config(token.get('env-token-name'))
|
|
|
|
|
DTURL = url.get('env-url')
|
|
|
|
|
|
|
|
|
|
###Calc daily SLO
|
|
|
|
|
# key request parser start
|
|
|
|
|
krp = krparser.KRParser(
|
|
|
|
|
name=item,
|
|
|
|
|
options=krparser.KROption.RESOLVESERVICES,
|
|
|
|
|
config={
|
|
|
|
|
"threads": 10,
|
|
|
|
|
"serviceLookupParams": {"fields": "tags,fromRelationships"},
|
|
|
|
|
"extendResultObjects": {"env": item},
|
|
|
|
|
},
|
|
|
|
|
DTAPIURL=DTURL,
|
|
|
|
|
DTAPIToken=DTTOKEN,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# TODO: Pass krp down to parse Slos and get metrics via /metrics/query
|
|
|
|
|
|
|
|
|
|
# Calc daily SLO
|
|
|
|
|
if 'd' in str.lower(args.slices):
|
|
|
|
|
dailyall = get_one_slice(item, DTTOKEN, DTURL, days, dailyall, selector_var, selector_type)
|
|
|
|
|
dailyall = get_one_slice(
|
|
|
|
|
item, DTTOKEN, DTURL, days, dailyall, selector_var, selector_type)
|
|
|
|
|
# Calc hourly SLO
|
|
|
|
|
if 'h' in str.lower(args.slices):
|
|
|
|
|
hourlyall = get_one_slice(item, DTTOKEN, DTURL, hours, hourlyall, selector_var, selector_type)
|
|
|
|
|
###Calc Overall YTD SLO
|
|
|
|
|
hourlyall = get_one_slice(
|
|
|
|
|
item, DTTOKEN, DTURL, hours, hourlyall, selector_var, selector_type)
|
|
|
|
|
# Calc Overall YTD SLO
|
|
|
|
|
if 'y' in str.lower(args.slices):
|
|
|
|
|
ytd = get_slice_ytd_total(DTTOKEN,DTURL,item, yearstart, days['endTime'].max(), 'Date', fromDate.year, ytd, selector_var, selector_type)
|
|
|
|
|
###Calc Overall SLO
|
|
|
|
|
ytd = get_slice_ytd_total(DTTOKEN, DTURL, item, yearstart, days['endTime'].max(
|
|
|
|
|
), 'Date', fromDate.year, ytd, selector_var, selector_type)
|
|
|
|
|
# Calc Overall SLO
|
|
|
|
|
if 't' in str.lower(args.slices):
|
|
|
|
|
totalall = get_slice_ytd_total(DTTOKEN,DTURL,item, days['startTime'].min(), days['endTime'].max(), 'Date', fromDate.isocalendar()[1], totalall, selector_var, selector_type)
|
|
|
|
|
totalall = get_slice_ytd_total(DTTOKEN, DTURL, item, days['startTime'].min(), days['endTime'].max(
|
|
|
|
|
), 'Date', fromDate.isocalendar()[1], totalall, selector_var, selector_type)
|
|
|
|
|
else:
|
|
|
|
|
print("token not found, skipping " + item)
|
|
|
|
|
write_slo_to_excel(args, fromDate, hourlyall, dailyall, totalall, ytd)
|
|
|
|
|
write_slo_to_excel(args, fromDate, hourlyall,
|
|
|
|
|
dailyall, totalall, ytd)
|
|
|
|
|
print("It took {} seconds to run this script".format(time.time()-start_timer))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
main('./slo_parameter.yaml')
|