adding new script
parent
b7cf28ef03
commit
9443045941
193
createReport_.py
193
createReport_.py
|
|
@ -1,193 +0,0 @@
|
||||||
from decouple import config
|
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
import datetime
|
|
||||||
import time
|
|
||||||
import pandas as pd
|
|
||||||
import requests
|
|
||||||
import openpyxl
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
def make_request(url, headers,verify,parameters):
|
|
||||||
try:
|
|
||||||
response = requests.get(url, headers=headers,verify=verify,params=parameters)
|
|
||||||
response.raise_for_status()
|
|
||||||
except requests.exceptions.HTTPError as errh:
|
|
||||||
return "An Http Error occurred:" + repr(errh)
|
|
||||||
except requests.exceptions.ConnectionError as errc:
|
|
||||||
return "An Error Connecting to the API occurred:" + repr(errc)
|
|
||||||
except requests.exceptions.Timeout as errt:
|
|
||||||
return "A Timeout Error occurred:" + repr(errt)
|
|
||||||
except requests.exceptions.RequestException as err:
|
|
||||||
return "An Unknown Error occurred" + repr(err)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
def previous_week_range(date):
|
|
||||||
start_date = date + datetime.timedelta(-date.weekday(), weeks=-1)
|
|
||||||
end_date = date + datetime.timedelta(-date.weekday() - 1)
|
|
||||||
return start_date, end_date
|
|
||||||
|
|
||||||
def previous_month_range(date):
|
|
||||||
end_date = date.replace(day=1) - datetime.timedelta(days=1)
|
|
||||||
start_date = end_date.replace(day=1)
|
|
||||||
return start_date, end_date
|
|
||||||
|
|
||||||
def getSLO(DTAPIToken, DTENV, fromDate, toDate):
|
|
||||||
|
|
||||||
env = DTENV
|
|
||||||
DTAPIToken = DTAPIToken
|
|
||||||
|
|
||||||
if (DTENV.find('dynatracemgd') != -1):
|
|
||||||
verify=False
|
|
||||||
else:
|
|
||||||
verify=True
|
|
||||||
|
|
||||||
DTAPIURL= env + "/api/v2/slo"
|
|
||||||
headers = {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'Authorization': 'Api-Token ' + DTAPIToken
|
|
||||||
}
|
|
||||||
|
|
||||||
parameters = {
|
|
||||||
"pageSize": 25,
|
|
||||||
"from": int(fromDate),
|
|
||||||
"to": int(toDate),
|
|
||||||
"timeFrame": "GTF",
|
|
||||||
"evaluate": True,
|
|
||||||
"sloSelector": "text(\"CoCo-QM-Report\")"
|
|
||||||
}
|
|
||||||
r = make_request(DTAPIURL,headers=headers,parameters=parameters,verify=verify)
|
|
||||||
|
|
||||||
df = pd.json_normalize(r.json()['slo'])
|
|
||||||
|
|
||||||
return df
|
|
||||||
|
|
||||||
def get_daily_slice(start_date, end_date):
|
|
||||||
tempstart = start_date
|
|
||||||
days = pd.DataFrame()
|
|
||||||
|
|
||||||
#Add the first day
|
|
||||||
tempend = tempstart + datetime.timedelta(hours=24)
|
|
||||||
startms = time.mktime(tempstart.timetuple()) * 1000
|
|
||||||
endms = time.mktime(tempend.timetuple()) * 1000
|
|
||||||
|
|
||||||
row = {'Date':tempstart,'startTime':startms, 'endTime':endms}
|
|
||||||
days = days.append(row,ignore_index=True)
|
|
||||||
|
|
||||||
while tempstart < end_date:
|
|
||||||
tempstart = tempstart + datetime.timedelta(hours=24)
|
|
||||||
tempend = tempstart + datetime.timedelta(hours=24)
|
|
||||||
startms = time.mktime(tempstart.timetuple()) * 1000
|
|
||||||
endms = time.mktime(tempend.timetuple()) * 1000
|
|
||||||
|
|
||||||
row = {'Date':tempstart,'startTime':startms, 'endTime':endms}
|
|
||||||
days = days.append(row,ignore_index=True)
|
|
||||||
|
|
||||||
return days
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
usage="%(prog)s [--fromDate] [toDate] or [preSelect]",
|
|
||||||
description="gather SLO in daily slices for given Timeframe"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-f","--fromDate",
|
|
||||||
help = "YYYY-mm-dd e.g. 2022-01-01"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-t","--toDate",
|
|
||||||
help = "YYYY-mm-dd e.g. 2022-01-31"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-p","--preSelect",
|
|
||||||
help = "week | month - gathers the data for the last full week or month"
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
if args.preSelect and (args.fromDate or args.toDate):
|
|
||||||
print("--preSelect must not be used in conjuntion with --fromDate and/or --toDate")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
elif args.fromDate and not args.toDate:
|
|
||||||
print("--fromDate only in conjunction with --toDate")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
elif args.toDate and not args.fromDate:
|
|
||||||
print("--toDate only in conjunction with --fromDate")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
elif args.toDate and args.fromDate and not args.preSelect:
|
|
||||||
try:
|
|
||||||
fromDate = datetime.date.fromisoformat(args.fromDate)
|
|
||||||
toDate = datetime.date.fromisoformat(args.toDate)
|
|
||||||
except Exception as e:
|
|
||||||
print("Progam closed: " + str(e))
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
if toDate < fromDate:
|
|
||||||
print("--toDate can't be older than --fromDate")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
if toDate > datetime.date.today() or fromDate > datetime.date.today():
|
|
||||||
print("--toDate or --fromDate can't be in the future")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
elif args.preSelect and not args.fromDate and not args.toDate:
|
|
||||||
|
|
||||||
date = datetime.date.today()
|
|
||||||
|
|
||||||
if args.preSelect == "week":
|
|
||||||
fromDate, toDate = previous_week_range(date)
|
|
||||||
elif args.preSelect == "month":
|
|
||||||
fromDate, toDate = previous_month_range(date)
|
|
||||||
else:
|
|
||||||
print("--preSelect must be week or month")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
else:
|
|
||||||
print("Invalid arguments, please use --help")
|
|
||||||
sys.exit()
|
|
||||||
print("fromDate: " + str(fromDate))
|
|
||||||
print("toDate: " + str(toDate))
|
|
||||||
|
|
||||||
days = get_daily_slice(fromDate,toDate)
|
|
||||||
with open('./environment.yaml') as file:
|
|
||||||
doc = yaml.safe_load(file)
|
|
||||||
|
|
||||||
for item, doc in doc.items():
|
|
||||||
token = dict(doc[2])
|
|
||||||
url = dict(doc[1])
|
|
||||||
print("Crawling through: " + item)
|
|
||||||
print("Check if token exists in environment...")
|
|
||||||
if(config(token.get('env-token-name')) != ""):
|
|
||||||
print("Gather data, hold on a minute")
|
|
||||||
DTTOKEN = config(token.get('env-token-name'))
|
|
||||||
DTURL = url.get('env-url')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
df = pd.DataFrame()
|
|
||||||
for index, row in days.iterrows():
|
|
||||||
temp_df = getSLO(DTTOKEN,DTURL,row['startTime'],row['endTime'])
|
|
||||||
temp_df['Date'] = row['Date']
|
|
||||||
df = pd.concat([df,temp_df],ignore_index=True)
|
|
||||||
|
|
||||||
#sort columns in a try block - if API is returning columns which are non exist, this will not fail the script
|
|
||||||
|
|
||||||
try:
|
|
||||||
df = df[['Date', 'id', 'enabled', 'name', 'description', 'evaluatedPercentage', 'errorBudget', 'status', 'error', 'target','warning', 'evaluationType', 'timeframe', 'metricExpression', 'filter']]
|
|
||||||
except Exception as e:
|
|
||||||
print("Could not rearrange columns: " + e)
|
|
||||||
|
|
||||||
|
|
||||||
writer = pd.ExcelWriter("./"+ item +'.xlsx')
|
|
||||||
df.to_excel(writer, sheet_name=str(item).split(" ")[0])
|
|
||||||
writer.save()
|
|
||||||
|
|
||||||
else:
|
|
||||||
print("token not found, skipping " + item)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
|
|
@ -1,88 +0,0 @@
|
||||||
from decouple import config
|
|
||||||
import yaml
|
|
||||||
import requests
|
|
||||||
import json
|
|
||||||
import pandas as pd
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
def make_request(url, headers,verify,parameters):
|
|
||||||
try:
|
|
||||||
response = requests.get(url, headers=headers,verify=verify,params=parameters)
|
|
||||||
response.raise_for_status()
|
|
||||||
except requests.exceptions.HTTPError as errh:
|
|
||||||
return "An Http Error occurred:" + repr(errh)
|
|
||||||
except requests.exceptions.ConnectionError as errc:
|
|
||||||
return "An Error Connecting to the API occurred:" + repr(errc)
|
|
||||||
except requests.exceptions.Timeout as errt:
|
|
||||||
return "A Timeout Error occurred:" + repr(errt)
|
|
||||||
except requests.exceptions.RequestException as err:
|
|
||||||
return "An Unknown Error occurred" + repr(err)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
def GatherReportingInfo(DTAPIToken, DTENV,friendlyName):
|
|
||||||
|
|
||||||
env = DTENV
|
|
||||||
DTAPIToken = DTAPIToken
|
|
||||||
|
|
||||||
if (DTENV.find('dynatracemgd') != -1):
|
|
||||||
verify=False
|
|
||||||
else:
|
|
||||||
verify=True
|
|
||||||
|
|
||||||
DTAPIURL= env + "/api/v2/slo"
|
|
||||||
headers = {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'Authorization': 'Api-Token ' + DTAPIToken
|
|
||||||
}
|
|
||||||
date_report = date_from
|
|
||||||
|
|
||||||
days = (date_to - date_from).days
|
|
||||||
print (str(days) + " days within reproting period")
|
|
||||||
writer = pd.ExcelWriter(friendlyName+'QM_.xlsx')
|
|
||||||
while (date_report<=date_to):
|
|
||||||
date_report_end = date_report + datetime.timedelta(hours=days*24)
|
|
||||||
millisec_date_report = date_report.timestamp() * 1000
|
|
||||||
millisec_date_report_end = date_report_end.timestamp() * 1000
|
|
||||||
|
|
||||||
|
|
||||||
parameters = {
|
|
||||||
"pageSize": 25,
|
|
||||||
"from": int(millisec_date_report),
|
|
||||||
"to": int(millisec_date_report_end),
|
|
||||||
"timeFrame": "GTF",
|
|
||||||
"evaluate": True,
|
|
||||||
"sloSelector": "text(\"CoCo-QM-Report\")"
|
|
||||||
}
|
|
||||||
|
|
||||||
r = make_request(DTAPIURL,headers,verify,parameters)
|
|
||||||
content = r.json()
|
|
||||||
|
|
||||||
df = pd.json_normalize(content['slo'])
|
|
||||||
|
|
||||||
# Save to excel file/sheet
|
|
||||||
df.to_excel(writer, sheet_name=str(date_report).split(" ")[0])
|
|
||||||
|
|
||||||
date_report = date_report + datetime.timedelta(hours=days*24)
|
|
||||||
|
|
||||||
writer.save()
|
|
||||||
|
|
||||||
|
|
||||||
with open('./environment.yaml') as file:
|
|
||||||
doc = yaml.safe_load(file)
|
|
||||||
|
|
||||||
for item, doc in doc.items():
|
|
||||||
token = dict(doc[2])
|
|
||||||
url = dict(doc[1])
|
|
||||||
print("Crawling through: " + item)
|
|
||||||
print("Check if token exists in environment...")
|
|
||||||
if(config(token.get('env-token-name')) != ""):
|
|
||||||
print("Gather data, hold on a minute")
|
|
||||||
DTTOKEN = config(token.get('env-token-name'))
|
|
||||||
DTURL = url.get('env-url')
|
|
||||||
date_from = datetime.strptime(config('FROM_DATE'), '%Y-%m-%d')
|
|
||||||
date_to = datetime.strptime(config('TO_DATE'), '%Y-%m-%d')
|
|
||||||
GatherReportingInfo(DTTOKEN,DTURL,item)
|
|
||||||
|
|
||||||
else:
|
|
||||||
print("token not found, skipping " + item)
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
python-decouple
|
|
||||||
pyyaml
|
|
||||||
pandas
|
|
||||||
decouple
|
|
||||||
requests
|
|
||||||
datetime
|
|
||||||
argparse
|
|
||||||
openpyxl
|
|
||||||
os
|
|
||||||
configparser
|
|
||||||
|
|
@ -1,84 +0,0 @@
|
||||||
import requests, configparser, datetime, openpyxl, os
|
|
||||||
import pandas as pd
|
|
||||||
|
|
||||||
def getSLO():
|
|
||||||
api_url = ''.join(YOUR_DT_API_URL) + '/api/v2/slo'
|
|
||||||
millisec_date_report = date_report.timestamp() * 1000
|
|
||||||
millisec_date_report_end = date_report_end.timestamp() * 1000
|
|
||||||
parameters = {
|
|
||||||
"pageSize": 25,
|
|
||||||
"from": int(millisec_date_report),
|
|
||||||
"to": int(millisec_date_report_end),
|
|
||||||
"timeFrame": "GTF",
|
|
||||||
"evaluate": True,
|
|
||||||
"sloSelector": "text(\"CoCo-QM-Report\")"
|
|
||||||
}
|
|
||||||
print(parameters)
|
|
||||||
r = requests.get(api_url, headers=headers, params=parameters);
|
|
||||||
print(r)
|
|
||||||
content = r.json()
|
|
||||||
print(content)
|
|
||||||
return content
|
|
||||||
|
|
||||||
# Initialize config parser
|
|
||||||
config = configparser.ConfigParser(interpolation=None)
|
|
||||||
config.read('config.ini')
|
|
||||||
|
|
||||||
#Create /data folder
|
|
||||||
if not os.path.exists('reports/'):
|
|
||||||
os.mkdir('reports/')
|
|
||||||
|
|
||||||
# Get values from the config file
|
|
||||||
date_from = config['DATES']['From']
|
|
||||||
date_to = config['DATES']['To']
|
|
||||||
|
|
||||||
# Name of the report
|
|
||||||
string_date = date_from+"-to-"+date_to
|
|
||||||
|
|
||||||
# Iterate though tenants
|
|
||||||
tenant_count=1
|
|
||||||
for (tenantkey, val) in config.items('TENANTS'):
|
|
||||||
# Get tenant URL and TOKEN
|
|
||||||
YOUR_DT_API_URL = val.split(" ")[0].rstrip('\n')
|
|
||||||
YOUR_DT_API_TOKEN = val.split(" ")[1].rstrip('\n')
|
|
||||||
|
|
||||||
# Format date
|
|
||||||
date_from = datetime.datetime.strptime(date_from, '%Y-%m-%d')
|
|
||||||
date_to = datetime.datetime.strptime(date_to, '%Y-%m-%d')
|
|
||||||
|
|
||||||
# Create header
|
|
||||||
headers={}
|
|
||||||
headers["Authorization"] = "Api-Token "+YOUR_DT_API_TOKEN
|
|
||||||
|
|
||||||
#Generate env_id
|
|
||||||
if "/e/" in YOUR_DT_API_URL:
|
|
||||||
env_id = YOUR_DT_API_URL.split("/e/")[1]
|
|
||||||
else:
|
|
||||||
env_id = YOUR_DT_API_URL.split("//")[1]
|
|
||||||
|
|
||||||
#Create tenant folder
|
|
||||||
path = "reports/"+env_id
|
|
||||||
if not os.path.exists(path):
|
|
||||||
os.mkdir(path)
|
|
||||||
|
|
||||||
# Iterate to get results per day
|
|
||||||
date_report = date_from
|
|
||||||
print("Getting SLO for tenant "+env_id)
|
|
||||||
writer = pd.ExcelWriter(path+"/"+string_date+'.xlsx')
|
|
||||||
while (date_report<=date_to):
|
|
||||||
date_report_end = date_report + datetime.timedelta(hours=720)
|
|
||||||
|
|
||||||
# Get data from Dynatrace
|
|
||||||
content = getSLO()
|
|
||||||
|
|
||||||
# Create pandas
|
|
||||||
df = pd.json_normalize(content['slo'])
|
|
||||||
|
|
||||||
# Save to excel file/sheet
|
|
||||||
df.to_excel(writer, sheet_name=str(date_report).split(" ")[0])
|
|
||||||
|
|
||||||
date_report = date_report + datetime.timedelta(hours=720)
|
|
||||||
writer.save()
|
|
||||||
tenant_count+=1
|
|
||||||
date_from = config['DATES']['From']
|
|
||||||
date_to = config['DATES']['To']
|
|
||||||
|
|
@ -1,84 +0,0 @@
|
||||||
import requests, configparser, datetime, openpyxl, os
|
|
||||||
import pandas as pd
|
|
||||||
|
|
||||||
def getSLO():
|
|
||||||
api_url = ''.join(YOUR_DT_API_URL) + '/api/v2/slo'
|
|
||||||
millisec_date_report = date_report.timestamp() * 1000
|
|
||||||
millisec_date_report_end = date_report_end.timestamp() * 1000
|
|
||||||
parameters = {
|
|
||||||
"pageSize": 25,
|
|
||||||
"from": int(millisec_date_report),
|
|
||||||
"to": int(millisec_date_report_end),
|
|
||||||
"timeFrame": "GTF",
|
|
||||||
"evaluate": True,
|
|
||||||
"sloSelector": "text(\"CoCo-QM-Report\")"
|
|
||||||
}
|
|
||||||
print(parameters)
|
|
||||||
r = requests.get(api_url, headers=headers, params=parameters);
|
|
||||||
print(r)
|
|
||||||
content = r.json()
|
|
||||||
print(content)
|
|
||||||
return content
|
|
||||||
|
|
||||||
# Initialize config parser
|
|
||||||
config = configparser.ConfigParser(interpolation=None)
|
|
||||||
config.read('config.ini')
|
|
||||||
|
|
||||||
#Create /data folder
|
|
||||||
if not os.path.exists('reports/'):
|
|
||||||
os.mkdir('reports/')
|
|
||||||
|
|
||||||
# Get values from the config file
|
|
||||||
date_from = config['DATES']['From']
|
|
||||||
date_to = config['DATES']['To']
|
|
||||||
|
|
||||||
# Name of the report
|
|
||||||
string_date = date_from+"-to-"+date_to
|
|
||||||
|
|
||||||
# Iterate though tenants
|
|
||||||
tenant_count=2
|
|
||||||
for (tenantkey, val) in config.items('TENANTS'):
|
|
||||||
# Get tenant URL and TOKEN
|
|
||||||
YOUR_DT_API_URL = val.split(" ")[0].rstrip('\n')
|
|
||||||
YOUR_DT_API_TOKEN = val.split(" ")[1].rstrip('\n')
|
|
||||||
|
|
||||||
# Format date
|
|
||||||
date_from = datetime.datetime.strptime(date_from, '%Y-%m-%d')
|
|
||||||
date_to = datetime.datetime.strptime(date_to, '%Y-%m-%d')
|
|
||||||
|
|
||||||
# Create header
|
|
||||||
headers={}
|
|
||||||
headers["Authorization"] = "Api-Token "+YOUR_DT_API_TOKEN
|
|
||||||
|
|
||||||
#Generate env_id
|
|
||||||
if "/e/" in YOUR_DT_API_URL:
|
|
||||||
env_id = YOUR_DT_API_URL.split("/e/")[1]
|
|
||||||
else:
|
|
||||||
env_id = YOUR_DT_API_URL.split("//")[1]
|
|
||||||
|
|
||||||
#Create tenant folder
|
|
||||||
path = "reports/"+env_id
|
|
||||||
if not os.path.exists(path):
|
|
||||||
os.mkdir(path)
|
|
||||||
|
|
||||||
# Iterate to get results per day
|
|
||||||
date_report = date_from
|
|
||||||
print("Getting SLO for tenant "+env_id)
|
|
||||||
writer = pd.ExcelWriter(path+"/"+string_date+'.xlsx')
|
|
||||||
while (date_report<=date_to):
|
|
||||||
date_report_end = date_report + datetime.timedelta(hours=24)
|
|
||||||
|
|
||||||
# Get data from Dynatrace
|
|
||||||
content = getSLO()
|
|
||||||
|
|
||||||
# Create pandas
|
|
||||||
df = pd.json_normalize(content['slo'])
|
|
||||||
|
|
||||||
# Save to excel file/sheet
|
|
||||||
df.to_excel(writer, sheet_name=str(date_report).split(" ")[0])
|
|
||||||
|
|
||||||
date_report = date_report + datetime.timedelta(hours=24)
|
|
||||||
writer.save()
|
|
||||||
tenant_count+=1
|
|
||||||
date_from = config['DATES']['From']
|
|
||||||
date_to = config['DATES']['To']
|
|
||||||
Loading…
Reference in New Issue