adding new script
parent
e8442877fe
commit
5cf11581fa
|
|
@ -77,7 +77,7 @@
|
||||||
stage('Execute Reporting Script') {
|
stage('Execute Reporting Script') {
|
||||||
steps {
|
steps {
|
||||||
|
|
||||||
sh 'python createReport.py'
|
sh 'python createReport.py -p week'
|
||||||
print env.FROMDATE
|
print env.FROMDATE
|
||||||
//Only required once CN is not reachable from EMEA
|
//Only required once CN is not reachable from EMEA
|
||||||
//loopEnvironments(environments)
|
//loopEnvironments(environments)
|
||||||
|
|
|
||||||
212
createReport.py
212
createReport.py
|
|
@ -1,9 +1,32 @@
|
||||||
from decouple import config
|
from decouple import config
|
||||||
|
import sys
|
||||||
import yaml
|
import yaml
|
||||||
import requests
|
import datetime
|
||||||
import json
|
import time
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from datetime import datetime
|
import requests
|
||||||
|
import openpyxl
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
def init_argparse() -> argparse.ArgumentParser:
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
usage="%(prog)s [--fromDate] [toDate] or [preSelect]",
|
||||||
|
description="gather SLO in daily slices for given Timeframe"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-f","--fromDate",
|
||||||
|
help = "YYYY-mm-dd e.g. 2022-01-01"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-t","--toDate",
|
||||||
|
help = "YYYY-mm-dd e.g. 2022-01-31"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-p","--preSelect",
|
||||||
|
help = "week | month - gathers the data for the last full week or month"
|
||||||
|
)
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
def make_request(url, headers,verify,parameters):
|
def make_request(url, headers,verify,parameters):
|
||||||
try:
|
try:
|
||||||
|
|
@ -20,8 +43,18 @@ def make_request(url, headers,verify,parameters):
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def GatherReportingInfo(DTAPIToken, DTENV,friendlyName):
|
def previous_week_range(date):
|
||||||
|
start_date = date + datetime.timedelta(-date.weekday(), weeks=-1)
|
||||||
|
end_date = date + datetime.timedelta(-date.weekday() - 1)
|
||||||
|
return start_date, end_date
|
||||||
|
|
||||||
|
def previous_month_range(date):
|
||||||
|
end_date = date.replace(day=1) - datetime.timedelta(days=1)
|
||||||
|
start_date = end_date.replace(day=1)
|
||||||
|
return start_date, end_date
|
||||||
|
|
||||||
|
def getSLO(DTAPIToken, DTENV, fromDate, toDate):
|
||||||
|
|
||||||
env = DTENV
|
env = DTENV
|
||||||
DTAPIToken = DTAPIToken
|
DTAPIToken = DTAPIToken
|
||||||
|
|
||||||
|
|
@ -35,54 +68,131 @@ def GatherReportingInfo(DTAPIToken, DTENV,friendlyName):
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
'Authorization': 'Api-Token ' + DTAPIToken
|
'Authorization': 'Api-Token ' + DTAPIToken
|
||||||
}
|
}
|
||||||
date_report = date_from
|
|
||||||
|
|
||||||
days = (date_to - date_from).days
|
parameters = {
|
||||||
print (str(days) + " days within reproting period")
|
"pageSize": 25,
|
||||||
writer = pd.ExcelWriter(friendlyName+'QM_.xlsx')
|
"from": int(fromDate),
|
||||||
while (date_report<=date_to):
|
"to": int(toDate),
|
||||||
date_report_end = date_report + datetime.timedelta(hours=days*24)
|
"timeFrame": "GTF",
|
||||||
millisec_date_report = date_report.timestamp() * 1000
|
"evaluate": True,
|
||||||
millisec_date_report_end = date_report_end.timestamp() * 1000
|
"sloSelector": "text(\"CoCo-QM-Report\")"
|
||||||
|
}
|
||||||
|
r = make_request(DTAPIURL,headers=headers,parameters=parameters,verify=verify)
|
||||||
parameters = {
|
|
||||||
"pageSize": 25,
|
|
||||||
"from": int(millisec_date_report),
|
|
||||||
"to": int(millisec_date_report_end),
|
|
||||||
"timeFrame": "GTF",
|
|
||||||
"evaluate": True,
|
|
||||||
"sloSelector": "text(\"CoCo-QM-Report\")"
|
|
||||||
}
|
|
||||||
|
|
||||||
r = make_request(DTAPIURL,headers,verify,parameters)
|
|
||||||
content = r.json()
|
|
||||||
|
|
||||||
df = pd.json_normalize(content['slo'])
|
|
||||||
|
|
||||||
# Save to excel file/sheet
|
|
||||||
df.to_excel(writer, sheet_name=str(date_report).split(" ")[0])
|
|
||||||
|
|
||||||
date_report = date_report + datetime.timedelta(hours=days*24)
|
|
||||||
|
|
||||||
writer.save()
|
df = pd.json_normalize(r.json()['slo'])
|
||||||
|
|
||||||
|
|
||||||
with open('./environment.yaml') as file:
|
return df
|
||||||
doc = yaml.safe_load(file)
|
|
||||||
|
|
||||||
for item, doc in doc.items():
|
def get_daily_slice(start_date, end_date):
|
||||||
token = dict(doc[2])
|
tempstart = start_date
|
||||||
url = dict(doc[1])
|
days = pd.DataFrame()
|
||||||
print("Crawling through: " + item)
|
|
||||||
print("Check if token exists in environment...")
|
#Add the first day
|
||||||
if(config(token.get('env-token-name')) != ""):
|
tempend = tempstart + datetime.timedelta(hours=24)
|
||||||
print("Gather data, hold on a minute")
|
startms = time.mktime(tempstart.timetuple()) * 1000
|
||||||
DTTOKEN = config(token.get('env-token-name'))
|
endms = time.mktime(tempend.timetuple()) * 1000
|
||||||
DTURL = url.get('env-url')
|
|
||||||
date_from = datetime.strptime(config('FROM_DATE'), '%Y-%m-%d')
|
row = {'Date':tempstart,'startTime':startms, 'endTime':endms}
|
||||||
date_to = datetime.strptime(config('TO_DATE'), '%Y-%m-%d')
|
days = days.append(row,ignore_index=True)
|
||||||
GatherReportingInfo(DTTOKEN,DTURL,item)
|
|
||||||
|
while tempstart < end_date:
|
||||||
|
tempstart = tempstart + datetime.timedelta(hours=24)
|
||||||
|
tempend = tempstart + datetime.timedelta(hours=24)
|
||||||
|
startms = time.mktime(tempstart.timetuple()) * 1000
|
||||||
|
endms = time.mktime(tempend.timetuple()) * 1000
|
||||||
|
|
||||||
|
row = {'Date':tempstart,'startTime':startms, 'endTime':endms}
|
||||||
|
days = days.append(row,ignore_index=True)
|
||||||
|
|
||||||
|
return days
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
|
||||||
|
parser = init_argparse()
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.preSelect and (args.fromDate or args.toDate):
|
||||||
|
print("--preSelect must not be used in conjuntion with --fromDate and/or --toDate")
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
elif args.fromDate and not args.toDate:
|
||||||
|
print("--fromDate only in conjunction with --toDate")
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
else:
|
elif args.toDate and not args.fromDate:
|
||||||
print("token not found, skipping " + item)
|
print("--toDate only in conjunction with --fromDate")
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
elif args.toDate and args.fromDate and not args.preSelect:
|
||||||
|
try:
|
||||||
|
fromDate = datetime.date.fromisoformat(args.fromDate)
|
||||||
|
toDate = datetime.date.fromisoformat(args.toDate)
|
||||||
|
except Exception as e:
|
||||||
|
print("Progam closed: " + str(e))
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
if toDate < fromDate:
|
||||||
|
print("--toDate can't be older than --fromDate")
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
if toDate > datetime.date.today() or fromDate > datetime.date.today():
|
||||||
|
print("--toDate or --fromDate can't be in the future")
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
elif args.preSelect and not args.fromDate and not args.toDate:
|
||||||
|
|
||||||
|
date = datetime.date.today()
|
||||||
|
|
||||||
|
if args.preSelect == "week":
|
||||||
|
fromDate, toDate = previous_week_range(date)
|
||||||
|
elif args.preSelect == "month":
|
||||||
|
fromDate, toDate = previous_month_range(date)
|
||||||
|
else:
|
||||||
|
print("--preSelect must be week or month")
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
else:
|
||||||
|
print("Invalid arguments, please use --help")
|
||||||
|
sys.exit()
|
||||||
|
print("fromDate: " + str(fromDate))
|
||||||
|
print("toDate: " + str(toDate))
|
||||||
|
|
||||||
|
days = get_daily_slice(fromDate,toDate)
|
||||||
|
with open('./environment.yaml') as file:
|
||||||
|
doc = yaml.safe_load(file)
|
||||||
|
|
||||||
|
for item, doc in doc.items():
|
||||||
|
token = dict(doc[2])
|
||||||
|
url = dict(doc[1])
|
||||||
|
print("Crawling through: " + item)
|
||||||
|
print("Check if token exists in environment...")
|
||||||
|
if(config(token.get('env-token-name')) != ""):
|
||||||
|
print("Gather data, hold on a minute")
|
||||||
|
DTTOKEN = config(token.get('env-token-name'))
|
||||||
|
DTURL = url.get('env-url')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
df = pd.DataFrame()
|
||||||
|
for index, row in days.iterrows():
|
||||||
|
temp_df = getSLO(DTTOKEN,DTURL,row['startTime'],row['endTime'])
|
||||||
|
temp_df['Date'] = row['Date']
|
||||||
|
df = pd.concat([df,temp_df],ignore_index=True)
|
||||||
|
|
||||||
|
#sort columns in a try block - if API is returning columns which are non exist, this will not fail the script
|
||||||
|
|
||||||
|
try:
|
||||||
|
df = df[['Date', 'id', 'enabled', 'name', 'description', 'evaluatedPercentage', 'errorBudget', 'status', 'error', 'target','warning', 'evaluationType', 'timeframe', 'metricExpression', 'filter']]
|
||||||
|
except Exception as e:
|
||||||
|
print("Could not rearrange columns: " + e)
|
||||||
|
|
||||||
|
|
||||||
|
writer = pd.ExcelWriter("./"+ item +'.xlsx')
|
||||||
|
df.to_excel(writer, sheet_name=str(item).split(" ")[0])
|
||||||
|
writer.save()
|
||||||
|
|
||||||
|
else:
|
||||||
|
print("token not found, skipping " + item)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
@ -4,4 +4,5 @@ pandas
|
||||||
requests
|
requests
|
||||||
datetime
|
datetime
|
||||||
argparse
|
argparse
|
||||||
openpyxl
|
openpyxl
|
||||||
|
argparse
|
||||||
199
utils.py
199
utils.py
|
|
@ -1,199 +0,0 @@
|
||||||
from tracemalloc import start
|
|
||||||
from decouple import config
|
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
import datetime
|
|
||||||
import time
|
|
||||||
import pandas as pd
|
|
||||||
import requests
|
|
||||||
import openpyxl
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
def init_argparse() -> argparse.ArgumentParser:
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
usage="%(prog)s [--fromDate] [toDate] or [preSelect]",
|
|
||||||
description="gather SLO in daily slices for given Timeframe"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-f","--fromDate",
|
|
||||||
help = "YYYY-mm-dd e.g. 2022-01-01"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-t","--toDate",
|
|
||||||
help = "YYYY-mm-dd e.g. 2022-01-31"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-p","--preSelect",
|
|
||||||
help = "week | month - gathers the data for the last full week or month"
|
|
||||||
)
|
|
||||||
|
|
||||||
return parser
|
|
||||||
|
|
||||||
def make_request(url, headers,verify,parameters):
|
|
||||||
try:
|
|
||||||
response = requests.get(url, headers=headers,verify=verify,params=parameters)
|
|
||||||
response.raise_for_status()
|
|
||||||
except requests.exceptions.HTTPError as errh:
|
|
||||||
return "An Http Error occurred:" + repr(errh)
|
|
||||||
except requests.exceptions.ConnectionError as errc:
|
|
||||||
return "An Error Connecting to the API occurred:" + repr(errc)
|
|
||||||
except requests.exceptions.Timeout as errt:
|
|
||||||
return "A Timeout Error occurred:" + repr(errt)
|
|
||||||
except requests.exceptions.RequestException as err:
|
|
||||||
return "An Unknown Error occurred" + repr(err)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
def previous_week_range(date):
|
|
||||||
start_date = date + datetime.timedelta(-date.weekday(), weeks=-1)
|
|
||||||
end_date = date + datetime.timedelta(-date.weekday() - 1)
|
|
||||||
return start_date, end_date
|
|
||||||
|
|
||||||
def previous_month_range(date):
|
|
||||||
end_date = date.replace(day=1) - datetime.timedelta(days=1)
|
|
||||||
start_date = end_date.replace(day=1)
|
|
||||||
return start_date, end_date
|
|
||||||
|
|
||||||
def getSLO(DTAPIToken, DTENV, fromDate, toDate):
|
|
||||||
|
|
||||||
env = DTENV
|
|
||||||
DTAPIToken = DTAPIToken
|
|
||||||
|
|
||||||
if (DTENV.find('dynatracemgd') != -1):
|
|
||||||
verify=False
|
|
||||||
else:
|
|
||||||
verify=True
|
|
||||||
|
|
||||||
DTAPIURL= env + "/api/v2/slo"
|
|
||||||
headers = {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'Authorization': 'Api-Token ' + DTAPIToken
|
|
||||||
}
|
|
||||||
|
|
||||||
parameters = {
|
|
||||||
"pageSize": 25,
|
|
||||||
"from": int(fromDate),
|
|
||||||
"to": int(toDate),
|
|
||||||
"timeFrame": "GTF",
|
|
||||||
"evaluate": True,
|
|
||||||
"sloSelector": "text(\"CoCo-QM-Report\")"
|
|
||||||
}
|
|
||||||
r = make_request(DTAPIURL,headers=headers,parameters=parameters,verify=verify)
|
|
||||||
|
|
||||||
df = pd.json_normalize(r.json()['slo'])
|
|
||||||
|
|
||||||
return df
|
|
||||||
|
|
||||||
def get_daily_slice(start_date, end_date):
|
|
||||||
tempstart = start_date
|
|
||||||
days = pd.DataFrame()
|
|
||||||
|
|
||||||
#Add the first day
|
|
||||||
tempend = tempstart + datetime.timedelta(hours=24)
|
|
||||||
startms = time.mktime(tempstart.timetuple()) * 1000
|
|
||||||
endms = time.mktime(tempend.timetuple()) * 1000
|
|
||||||
|
|
||||||
row = {'Date':tempstart,'startTime':startms, 'endTime':endms}
|
|
||||||
days = days.append(row,ignore_index=True)
|
|
||||||
|
|
||||||
while tempstart < end_date:
|
|
||||||
tempstart = tempstart + datetime.timedelta(hours=24)
|
|
||||||
tempend = tempstart + datetime.timedelta(hours=24)
|
|
||||||
startms = time.mktime(tempstart.timetuple()) * 1000
|
|
||||||
endms = time.mktime(tempend.timetuple()) * 1000
|
|
||||||
|
|
||||||
row = {'Date':tempstart,'startTime':startms, 'endTime':endms}
|
|
||||||
days = days.append(row,ignore_index=True)
|
|
||||||
|
|
||||||
return days
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
|
|
||||||
parser = init_argparse()
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
if args.preSelect and (args.fromDate or args.toDate):
|
|
||||||
print("--preSelect must not be used in conjuntion with --fromDate and/or --toDate")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
elif args.fromDate and not args.toDate:
|
|
||||||
print("--fromDate only in conjunction with --toDate")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
elif args.toDate and not args.fromDate:
|
|
||||||
print("--toDate only in conjunction with --fromDate")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
elif args.toDate and args.fromDate and not args.preSelect:
|
|
||||||
try:
|
|
||||||
fromDate = datetime.date.fromisoformat(args.fromDate)
|
|
||||||
toDate = datetime.date.fromisoformat(args.toDate)
|
|
||||||
except Exception as e:
|
|
||||||
print("Progam closed: " + str(e))
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
if toDate < fromDate:
|
|
||||||
print("--toDate can't be older than --fromDate")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
if toDate > datetime.date.today() or fromDate > datetime.date.today():
|
|
||||||
print("--toDate or --fromDate can't be in the future")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
elif args.preSelect and not args.fromDate and not args.toDate:
|
|
||||||
|
|
||||||
date = datetime.date.today()
|
|
||||||
|
|
||||||
if args.preSelect == "week":
|
|
||||||
fromDate, toDate = previous_week_range(date)
|
|
||||||
elif args.preSelect == "month":
|
|
||||||
fromDate, toDate = previous_month_range(date)
|
|
||||||
else:
|
|
||||||
print("--preSelect must be week or month")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
else:
|
|
||||||
print("Invalid arguments, please use --help")
|
|
||||||
sys.exit()
|
|
||||||
print("fromDate: " + str(fromDate))
|
|
||||||
print("toDate: " + str(toDate))
|
|
||||||
|
|
||||||
days = get_daily_slice(fromDate,toDate)
|
|
||||||
with open('./environment.yaml') as file:
|
|
||||||
doc = yaml.safe_load(file)
|
|
||||||
|
|
||||||
for item, doc in doc.items():
|
|
||||||
token = dict(doc[2])
|
|
||||||
url = dict(doc[1])
|
|
||||||
print("Crawling through: " + item)
|
|
||||||
print("Check if token exists in environment...")
|
|
||||||
if(config(token.get('env-token-name')) != ""):
|
|
||||||
print("Gather data, hold on a minute")
|
|
||||||
DTTOKEN = config(token.get('env-token-name'))
|
|
||||||
DTURL = url.get('env-url')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
df = pd.DataFrame()
|
|
||||||
for index, row in days.iterrows():
|
|
||||||
temp_df = getSLO(DTTOKEN,DTURL,row['startTime'],row['endTime'])
|
|
||||||
temp_df['Date'] = row['Date']
|
|
||||||
df = pd.concat([df,temp_df],ignore_index=True)
|
|
||||||
|
|
||||||
#sort columns in a try block - if API is returning columns which are non exist, this will not fail the script
|
|
||||||
|
|
||||||
try:
|
|
||||||
df = df[['Date', 'id', 'enabled', 'name', 'description', 'evaluatedPercentage', 'errorBudget', 'status', 'error', 'target','warning', 'evaluationType', 'timeframe', 'metricExpression', 'filter']]
|
|
||||||
except Exception as e:
|
|
||||||
print("Could not rearrange columns: " + e)
|
|
||||||
|
|
||||||
|
|
||||||
writer = pd.ExcelWriter("./"+ item +'.xlsx')
|
|
||||||
df.to_excel(writer, sheet_name=str(item).split(" ")[0])
|
|
||||||
writer.save()
|
|
||||||
|
|
||||||
else:
|
|
||||||
print("token not found, skipping " + item)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
Loading…
Reference in New Issue