Compare commits

..

No commits in common. "45f37d5180e9d230b096b2d4639a697fba7dfb46" and "21745330fcc3cafc5be892101a733fd176b44b34" have entirely different histories.

2 changed files with 25 additions and 45 deletions

16
Jenkinsfile vendored
View File

@ -39,7 +39,7 @@
//cron('0 8 * * *')
//every monday at 08:00
//cron('0 4 * * 1')
cron('0 4 * * 1')
}
@ -92,8 +92,8 @@
emailext subject: env.JOB_NAME,
body: 'Please find the output of the weekly QM-Report attached',
//to: 'rene.forstner@nttdata.com',
to: 'rene.forstner@nttdata.com, stephan.oertelt@bmw.de, Mohammed.Abadel@bmw.de, michaela.jaeger@bmw.de, Andreas.DB.Danzer@bmwgroup.com',
//to: 'rene.forstner@nttdata.com, stephan.oertelt@bmw.de, Mohammed.Abadel@bmw.de, michaela.jaeger@bmw.de, OOC-Support@bmwgroup.com, Sonja.Yildizoglu@bmw.de, Andreas.DA.Danzer@partner.bmw.de',
//to: 'rene.forstner@nttdata.com, stephan.oertelt@bmw.de, Mohammed.Abadel@bmw.de, michaela.jaeger@bmw.de',
to: 'rene.forstner@nttdata.com, stephan.oertelt@bmw.de, Mohammed.Abadel@bmw.de, michaela.jaeger@bmw.de, OOC-Support@bmwgroup.com, Sonja.Yildizoglu@bmw.de, Andreas.DA.Danzer@partner.bmw.de',
replyTo: 'coco-apm@bmw.de',
attachmentsPattern: '*.xlsx'
@ -107,16 +107,6 @@
}
post {
failure {
emailext subject: "${env.JOB_NAME} build ${env.BUILD_ID} failed",
body: "QM report failed, see logs for details: ${env.BUILD_URL}",
to: 'BMW.CoCo.Dynatrace@nttdata.com, ermis.wieger@nttdata.com, Andreas.DB.Danzer@bmwgroup.com'
// to post to the teams channel "0 - APM Service Desk" just uncomment the following command:
// office365ConnectorSend webhookUrl: "https://bmwgroup.webhook.office.com/webhookb2/483edc00-c925-4672-8088-8299a0139fca@ce849bab-cc1c-465b-b62e-18f07c9ac198/JenkinsCI/9aca6923685b40f794134853fcbe88f1/ff31bcee-96b3-4481-9bd8-4f74180b263b",
// message: "QM report failed, see logs for details: ${env.BUILD_URL}",
// status: 'Failure',
// color: "d00000"
}
always {
cleanWs()
}

View File

@ -8,10 +8,6 @@ import pandas as pd
import requests
import openpyxl
import argparse
import os
os.environ['TZ'] = 'Europe/Berlin' # set new timezone
time.tzset()
def make_request(url, headers,verify,parameters):
try:
@ -61,7 +57,7 @@ def getSLO(DTAPIToken, DTENV, fromDate, toDate):
"from": int(fromDate),
"to": int(toDate),
"timeFrame": "GTF",
"evaluate": "true",
"evaluate": True,
"sloSelector": "text(\"CoCo-QM-Report\")"
}
r = make_request(DTAPIURL,headers=headers,parameters=parameters,verify=verify)
@ -88,8 +84,8 @@ def get_daily_slice(start_date, end_date):
#Add the first day
tempend = tempstart + datetime.timedelta(hours=24)
startms = time.mktime(tempstart.timetuple()) * 1000
endms = time.mktime(tempend.timetuple()) * 1000
startms = time.mktime(tempstart.timetuple()) * 1000
endms = time.mktime(tempend.timetuple()) * 1000
row = {'Date':tempstart,'startTime':startms, 'endTime':endms}
days = days.append(row,ignore_index=True)
@ -97,10 +93,8 @@ def get_daily_slice(start_date, end_date):
while tempstart < end_date:
tempstart = tempstart + datetime.timedelta(hours=24)
tempend = tempstart + datetime.timedelta(hours=24)
startms = time.mktime(tempstart.timetuple()) * 1000
endms = time.mktime(tempend.timetuple()) * 1000
startms = time.mktime(tempstart.timetuple()) * 1000
endms = time.mktime(tempend.timetuple()) * 1000
row = {'Date':tempstart,'startTime':startms, 'endTime':endms}
days = days.append(row,ignore_index=True)
@ -204,10 +198,6 @@ def main():
else:
print("Invalid arguments, please use --help")
sys.exit()
#Adding 2 hours to be UTC+2 on the pipeline
fromDate = fromDate + datetime.timedelta(hours=2)
toDate = toDate + datetime.timedelta(hours=2)
print("fromDate: " + str(fromDate))
print("toDate: " + str(toDate))
@ -251,22 +241,22 @@ def main():
dailyall = pd.concat([dailyall,df],ignore_index=True)
#Calc hourly SLO
#if (args.preSelect == "week"):
df = pd.DataFrame()
for index, row in hours.iterrows():
temp_df = getSLO(DTTOKEN,DTURL,row['startTime'],row['endTime'])
temp_df['Date'] = row['Date']
temp_df['HUB'] = item
df = pd.concat([df,temp_df],ignore_index=True)
#sort columns in a try block - if API is returning columns which are non exist, this will not fail the script
df[['description','Touchpoint']] = df['description'].str.split('_',expand=True)
try:
df = df[['Date', 'HUB', 'id', 'enabled', 'name', 'description', 'Touchpoint', 'evaluatedPercentage', 'errorBudget', 'status', 'error', 'target','warning', 'evaluationType', 'timeframe', 'metricExpression', 'filter']]
except Exception as e:
print("Could not rearrange columns: " + str(e))
hourlyall = pd.concat([hourlyall,df],ignore_index=True)
if (args.preSelect == "week"):
df = pd.DataFrame()
for index, row in hours.iterrows():
temp_df = getSLO(DTTOKEN,DTURL,row['startTime'],row['endTime'])
temp_df['Date'] = row['Date']
temp_df['HUB'] = item
df = pd.concat([df,temp_df],ignore_index=True)
#sort columns in a try block - if API is returning columns which are non exist, this will not fail the script
df[['description','Touchpoint']] = df['description'].str.split('_',expand=True)
try:
df = df[['Date', 'HUB', 'id', 'enabled', 'name', 'description', 'Touchpoint', 'evaluatedPercentage', 'errorBudget', 'status', 'error', 'target','warning', 'evaluationType', 'timeframe', 'metricExpression', 'filter']]
except Exception as e:
print("Could not rearrange columns: " + str(e))
hourlyall = pd.concat([hourlyall,df],ignore_index=True)
###Calc Overall YTD SLO
yearstart = toDate.replace(month=2,day=28)
@ -318,4 +308,4 @@ def main():
writer.close()
if __name__ == "__main__":
main()
main()