diff --git a/Jenkinsfile b/Jenkinsfile index 2761b19..c52305b 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -77,7 +77,7 @@ stage('Execute Reporting Script') { steps { - sh 'python createReport.py' + sh 'python createReport.py -p week' print env.FROMDATE //Only required once CN is not reachable from EMEA //loopEnvironments(environments) diff --git a/createReport.py b/createReport.py index f51f003..8543305 100644 --- a/createReport.py +++ b/createReport.py @@ -1,9 +1,32 @@ from decouple import config +import sys import yaml -import requests -import json +import datetime +import time import pandas as pd -from datetime import datetime +import requests +import openpyxl +import argparse + +def init_argparse() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + usage="%(prog)s [--fromDate] [toDate] or [preSelect]", + description="gather SLO in daily slices for given Timeframe" + ) + parser.add_argument( + "-f","--fromDate", + help = "YYYY-mm-dd e.g. 2022-01-01" + ) + parser.add_argument( + "-t","--toDate", + help = "YYYY-mm-dd e.g. 2022-01-31" + ) + parser.add_argument( + "-p","--preSelect", + help = "week | month - gathers the data for the last full week or month" + ) + + return parser def make_request(url, headers,verify,parameters): try: @@ -20,8 +43,18 @@ def make_request(url, headers,verify,parameters): return response -def GatherReportingInfo(DTAPIToken, DTENV,friendlyName): - +def previous_week_range(date): + start_date = date + datetime.timedelta(-date.weekday(), weeks=-1) + end_date = date + datetime.timedelta(-date.weekday() - 1) + return start_date, end_date + +def previous_month_range(date): + end_date = date.replace(day=1) - datetime.timedelta(days=1) + start_date = end_date.replace(day=1) + return start_date, end_date + +def getSLO(DTAPIToken, DTENV, fromDate, toDate): + env = DTENV DTAPIToken = DTAPIToken @@ -35,54 +68,131 @@ def GatherReportingInfo(DTAPIToken, DTENV,friendlyName): 'Content-Type': 'application/json', 'Authorization': 'Api-Token ' + DTAPIToken } - date_report = date_from - days = (date_to - date_from).days - print (str(days) + " days within reproting period") - writer = pd.ExcelWriter(friendlyName+'QM_.xlsx') - while (date_report<=date_to): - date_report_end = date_report + datetime.timedelta(hours=days*24) - millisec_date_report = date_report.timestamp() * 1000 - millisec_date_report_end = date_report_end.timestamp() * 1000 - - - parameters = { - "pageSize": 25, - "from": int(millisec_date_report), - "to": int(millisec_date_report_end), - "timeFrame": "GTF", - "evaluate": True, - "sloSelector": "text(\"CoCo-QM-Report\")" - } - - r = make_request(DTAPIURL,headers,verify,parameters) - content = r.json() - - df = pd.json_normalize(content['slo']) - - # Save to excel file/sheet - df.to_excel(writer, sheet_name=str(date_report).split(" ")[0]) - - date_report = date_report + datetime.timedelta(hours=days*24) + parameters = { + "pageSize": 25, + "from": int(fromDate), + "to": int(toDate), + "timeFrame": "GTF", + "evaluate": True, + "sloSelector": "text(\"CoCo-QM-Report\")" + } + r = make_request(DTAPIURL,headers=headers,parameters=parameters,verify=verify) - writer.save() - + df = pd.json_normalize(r.json()['slo']) -with open('./environment.yaml') as file: - doc = yaml.safe_load(file) + return df -for item, doc in doc.items(): - token = dict(doc[2]) - url = dict(doc[1]) - print("Crawling through: " + item) - print("Check if token exists in environment...") - if(config(token.get('env-token-name')) != ""): - print("Gather data, hold on a minute") - DTTOKEN = config(token.get('env-token-name')) - DTURL = url.get('env-url') - date_from = datetime.strptime(config('FROM_DATE'), '%Y-%m-%d') - date_to = datetime.strptime(config('TO_DATE'), '%Y-%m-%d') - GatherReportingInfo(DTTOKEN,DTURL,item) +def get_daily_slice(start_date, end_date): + tempstart = start_date + days = pd.DataFrame() + + #Add the first day + tempend = tempstart + datetime.timedelta(hours=24) + startms = time.mktime(tempstart.timetuple()) * 1000 + endms = time.mktime(tempend.timetuple()) * 1000 + + row = {'Date':tempstart,'startTime':startms, 'endTime':endms} + days = days.append(row,ignore_index=True) + + while tempstart < end_date: + tempstart = tempstart + datetime.timedelta(hours=24) + tempend = tempstart + datetime.timedelta(hours=24) + startms = time.mktime(tempstart.timetuple()) * 1000 + endms = time.mktime(tempend.timetuple()) * 1000 + + row = {'Date':tempstart,'startTime':startms, 'endTime':endms} + days = days.append(row,ignore_index=True) + + return days + +def main() -> None: + + parser = init_argparse() + args = parser.parse_args() + + if args.preSelect and (args.fromDate or args.toDate): + print("--preSelect must not be used in conjuntion with --fromDate and/or --toDate") + sys.exit() + + elif args.fromDate and not args.toDate: + print("--fromDate only in conjunction with --toDate") + sys.exit() - else: - print("token not found, skipping " + item) \ No newline at end of file + elif args.toDate and not args.fromDate: + print("--toDate only in conjunction with --fromDate") + sys.exit() + + elif args.toDate and args.fromDate and not args.preSelect: + try: + fromDate = datetime.date.fromisoformat(args.fromDate) + toDate = datetime.date.fromisoformat(args.toDate) + except Exception as e: + print("Progam closed: " + str(e)) + sys.exit() + + if toDate < fromDate: + print("--toDate can't be older than --fromDate") + sys.exit() + + if toDate > datetime.date.today() or fromDate > datetime.date.today(): + print("--toDate or --fromDate can't be in the future") + sys.exit() + + elif args.preSelect and not args.fromDate and not args.toDate: + + date = datetime.date.today() + + if args.preSelect == "week": + fromDate, toDate = previous_week_range(date) + elif args.preSelect == "month": + fromDate, toDate = previous_month_range(date) + else: + print("--preSelect must be week or month") + sys.exit() + + else: + print("Invalid arguments, please use --help") + sys.exit() + print("fromDate: " + str(fromDate)) + print("toDate: " + str(toDate)) + + days = get_daily_slice(fromDate,toDate) + with open('./environment.yaml') as file: + doc = yaml.safe_load(file) + + for item, doc in doc.items(): + token = dict(doc[2]) + url = dict(doc[1]) + print("Crawling through: " + item) + print("Check if token exists in environment...") + if(config(token.get('env-token-name')) != ""): + print("Gather data, hold on a minute") + DTTOKEN = config(token.get('env-token-name')) + DTURL = url.get('env-url') + + + + df = pd.DataFrame() + for index, row in days.iterrows(): + temp_df = getSLO(DTTOKEN,DTURL,row['startTime'],row['endTime']) + temp_df['Date'] = row['Date'] + df = pd.concat([df,temp_df],ignore_index=True) + + #sort columns in a try block - if API is returning columns which are non exist, this will not fail the script + + try: + df = df[['Date', 'id', 'enabled', 'name', 'description', 'evaluatedPercentage', 'errorBudget', 'status', 'error', 'target','warning', 'evaluationType', 'timeframe', 'metricExpression', 'filter']] + except Exception as e: + print("Could not rearrange columns: " + e) + + + writer = pd.ExcelWriter("./"+ item +'.xlsx') + df.to_excel(writer, sheet_name=str(item).split(" ")[0]) + writer.save() + + else: + print("token not found, skipping " + item) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index fbd508f..b791c3a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,5 @@ pandas requests datetime argparse -openpyxl \ No newline at end of file +openpyxl +argparse \ No newline at end of file diff --git a/utils.py b/utils.py deleted file mode 100644 index dfb0407..0000000 --- a/utils.py +++ /dev/null @@ -1,199 +0,0 @@ -from tracemalloc import start -from decouple import config -import sys -import yaml -import datetime -import time -import pandas as pd -import requests -import openpyxl -import argparse - -def init_argparse() -> argparse.ArgumentParser: - parser = argparse.ArgumentParser( - usage="%(prog)s [--fromDate] [toDate] or [preSelect]", - description="gather SLO in daily slices for given Timeframe" - ) - parser.add_argument( - "-f","--fromDate", - help = "YYYY-mm-dd e.g. 2022-01-01" - ) - parser.add_argument( - "-t","--toDate", - help = "YYYY-mm-dd e.g. 2022-01-31" - ) - parser.add_argument( - "-p","--preSelect", - help = "week | month - gathers the data for the last full week or month" - ) - - return parser - -def make_request(url, headers,verify,parameters): - try: - response = requests.get(url, headers=headers,verify=verify,params=parameters) - response.raise_for_status() - except requests.exceptions.HTTPError as errh: - return "An Http Error occurred:" + repr(errh) - except requests.exceptions.ConnectionError as errc: - return "An Error Connecting to the API occurred:" + repr(errc) - except requests.exceptions.Timeout as errt: - return "A Timeout Error occurred:" + repr(errt) - except requests.exceptions.RequestException as err: - return "An Unknown Error occurred" + repr(err) - - return response - -def previous_week_range(date): - start_date = date + datetime.timedelta(-date.weekday(), weeks=-1) - end_date = date + datetime.timedelta(-date.weekday() - 1) - return start_date, end_date - -def previous_month_range(date): - end_date = date.replace(day=1) - datetime.timedelta(days=1) - start_date = end_date.replace(day=1) - return start_date, end_date - -def getSLO(DTAPIToken, DTENV, fromDate, toDate): - - env = DTENV - DTAPIToken = DTAPIToken - - if (DTENV.find('dynatracemgd') != -1): - verify=False - else: - verify=True - - DTAPIURL= env + "/api/v2/slo" - headers = { - 'Content-Type': 'application/json', - 'Authorization': 'Api-Token ' + DTAPIToken - } - - parameters = { - "pageSize": 25, - "from": int(fromDate), - "to": int(toDate), - "timeFrame": "GTF", - "evaluate": True, - "sloSelector": "text(\"CoCo-QM-Report\")" - } - r = make_request(DTAPIURL,headers=headers,parameters=parameters,verify=verify) - - df = pd.json_normalize(r.json()['slo']) - - return df - -def get_daily_slice(start_date, end_date): - tempstart = start_date - days = pd.DataFrame() - - #Add the first day - tempend = tempstart + datetime.timedelta(hours=24) - startms = time.mktime(tempstart.timetuple()) * 1000 - endms = time.mktime(tempend.timetuple()) * 1000 - - row = {'Date':tempstart,'startTime':startms, 'endTime':endms} - days = days.append(row,ignore_index=True) - - while tempstart < end_date: - tempstart = tempstart + datetime.timedelta(hours=24) - tempend = tempstart + datetime.timedelta(hours=24) - startms = time.mktime(tempstart.timetuple()) * 1000 - endms = time.mktime(tempend.timetuple()) * 1000 - - row = {'Date':tempstart,'startTime':startms, 'endTime':endms} - days = days.append(row,ignore_index=True) - - return days - -def main() -> None: - - parser = init_argparse() - args = parser.parse_args() - - if args.preSelect and (args.fromDate or args.toDate): - print("--preSelect must not be used in conjuntion with --fromDate and/or --toDate") - sys.exit() - - elif args.fromDate and not args.toDate: - print("--fromDate only in conjunction with --toDate") - sys.exit() - - elif args.toDate and not args.fromDate: - print("--toDate only in conjunction with --fromDate") - sys.exit() - - elif args.toDate and args.fromDate and not args.preSelect: - try: - fromDate = datetime.date.fromisoformat(args.fromDate) - toDate = datetime.date.fromisoformat(args.toDate) - except Exception as e: - print("Progam closed: " + str(e)) - sys.exit() - - if toDate < fromDate: - print("--toDate can't be older than --fromDate") - sys.exit() - - if toDate > datetime.date.today() or fromDate > datetime.date.today(): - print("--toDate or --fromDate can't be in the future") - sys.exit() - - elif args.preSelect and not args.fromDate and not args.toDate: - - date = datetime.date.today() - - if args.preSelect == "week": - fromDate, toDate = previous_week_range(date) - elif args.preSelect == "month": - fromDate, toDate = previous_month_range(date) - else: - print("--preSelect must be week or month") - sys.exit() - - else: - print("Invalid arguments, please use --help") - sys.exit() - print("fromDate: " + str(fromDate)) - print("toDate: " + str(toDate)) - - days = get_daily_slice(fromDate,toDate) - with open('./environment.yaml') as file: - doc = yaml.safe_load(file) - - for item, doc in doc.items(): - token = dict(doc[2]) - url = dict(doc[1]) - print("Crawling through: " + item) - print("Check if token exists in environment...") - if(config(token.get('env-token-name')) != ""): - print("Gather data, hold on a minute") - DTTOKEN = config(token.get('env-token-name')) - DTURL = url.get('env-url') - - - - df = pd.DataFrame() - for index, row in days.iterrows(): - temp_df = getSLO(DTTOKEN,DTURL,row['startTime'],row['endTime']) - temp_df['Date'] = row['Date'] - df = pd.concat([df,temp_df],ignore_index=True) - - #sort columns in a try block - if API is returning columns which are non exist, this will not fail the script - - try: - df = df[['Date', 'id', 'enabled', 'name', 'description', 'evaluatedPercentage', 'errorBudget', 'status', 'error', 'target','warning', 'evaluationType', 'timeframe', 'metricExpression', 'filter']] - except Exception as e: - print("Could not rearrange columns: " + e) - - - writer = pd.ExcelWriter("./"+ item +'.xlsx') - df.to_excel(writer, sheet_name=str(item).split(" ")[0]) - writer.save() - - else: - print("token not found, skipping " + item) - -if __name__ == "__main__": - main() \ No newline at end of file