init commit

master
rforstner 2021-12-02 13:16:04 +01:00
commit 5fa5bf2af0
8 changed files with 423 additions and 0 deletions

111
Jenkinsfile vendored Normal file
View File

@ -0,0 +1,111 @@
//not required right now as CN is reachable from EMEA as well
def loopEnvironments(environments){
print env.JENKINS_URL
environments.each { key, val ->
//Execute only if you are on the same environment
//not required right now as CN is reachable from EMEA as well
if (env.JENKINS_URL == environments."${key}"[3].'jenkins')
{
envname = environments."${key}"[0].'name'
envurl = environments."${key}"[1].'env-url'
tokenname = environments."${key}"[2].'env-token-name'
sh 'python createReport.py "${envname}"'
}
}
}
pipeline {
options {
ansiColor('xterm')
}
//label libraryBuild is available in CN JAWS and ROW JAWS, therefore this one was used; no additional intents
agent {label 'libraryBuild'}
parameters {
string(name: 'FROMDATE', defaultValue: '', description: 'Enter from date in format YYYY-MM-DD e.g. 2021-11-01')
string(name: 'TODATE', defaultValue: '', description: 'Enter to date in format YYYY-MM-DD e.g. 2021-11-30')
}
//here comes the trigger according to crontabs - jenkins is in UTC
triggers {
//every 1st of every month at 00:00
cron('0 0 1 * *')
//every day at 08:00
//cron('0 8 * * *')
//every monday at 08:00
//cron('0 8 * * MON')
}
environment {
//ProxySettings
AUTH = credentials('proxy')
proxy_user = "${AUTH_USR}"
proxy_pw = "${AUTH_PSW}"
http_proxy="http://${proxy_user}:${proxy_pw}@proxy.muc:8080"
https_proxy="http://${proxy_user}:${proxy_pw}@proxy.muc:8080"
no_proxy="localhost,127.0.0.1,.muc,.bmwgroup.net"
HTTP_PROXY="${http_proxy}"
HTTPS_PROXY="${https_proxy}"
NO_PROXY="${no_proxy}"
EUPROD_TOKEN_VAR = credentials('EUPROD_TOKEN_VAR')
EUPREPROD_TOKEN_VAR = credentials('EUPREPROD_TOKEN_VAR')
NAPROD_TOKEN_VAR = credentials('NAPROD_TOKEN_VAR')
NAPREPROD_TOKEN_VAR = credentials('NAPREPROD_TOKEN_VAR')
CNPROD_TOKEN_VAR = credentials('CNPROD_TOKEN_VAR')
CNPREPROD_TOKEN_VAR = credentials('CNPREPROD_TOKEN_VAR')
FROMDATE = params.FROMDATE
TODATE = params.TODATE
}
stages {
stage('install required python packages') {
steps {
sh '''
pip install --user -r requirements.txt
'''
print env.JENKINS_URL
}
}
stage('Execute Reporting Script') {
steps {
sh 'python slo-report-monthly.py'
print env.FROMDATE
//Only required once CN is not reachable from EMEA
//loopEnvironments(environments)
}
}
stage('Send report') {
steps {
script {
try {
emailext subject: env.JOB_NAME,
body: 'Please find the output of your reports attached',
to: 'rene.forstner@nttdata.com',
replyTo: 'coco-apm@bmw.de',
attachmentsPattern: '*.csv'
}
catch ( mailExc ){
echo "Sending Email Failed: ${mailExc}"
}
}
}
}
}
post {
always {
cleanWs()
}
}
}

10
config.ini Normal file
View File

@ -0,0 +1,10 @@
[SLO]
getUsers = 1
[DATES]
From = 2021-11-01
To = 2021-11-30
[TENANTS]
tenant1 = https://xxu26128.live.dynatrace.com dt0c01.EJDSZOHHTZCLY2DK76NBQ7K6.G4HSYIMCVCNAYXBD4VXDZHAQ5BDWCIGHCWLTLSVKT34A57BTC5X7HWDON3D4BNBP
tenant2 = https://wgv50241.live.dynatrace.com dt0c01.QVVOID7YLSRHQU6JB7GKTRH6.RZRTPDQDICZ2UVH62OE4QCA552DJC24OP2OB2WJ2PU32NUOE5RHXCX7QIRMVJFNA

88
createReport.py Normal file
View File

@ -0,0 +1,88 @@
from decouple import config
import yaml
import requests
import json
import pandas as pd
from datetime import datetime
def make_request(url, headers,verify,parameters):
try:
response = requests.get(url, headers=headers,verify=verify,params=parameters)
response.raise_for_status()
except requests.exceptions.HTTPError as errh:
return "An Http Error occurred:" + repr(errh)
except requests.exceptions.ConnectionError as errc:
return "An Error Connecting to the API occurred:" + repr(errc)
except requests.exceptions.Timeout as errt:
return "A Timeout Error occurred:" + repr(errt)
except requests.exceptions.RequestException as err:
return "An Unknown Error occurred" + repr(err)
return response
def GatherReportingInfo(DTAPIToken, DTENV,friendlyName):
env = DTENV
DTAPIToken = DTAPIToken
if (DTENV.find('dynatracemgd') != -1):
verify=False
else:
verify=True
DTAPIURL= env + "/api/v2/slo"
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
}
date_report = date_from
days = (date_to - date_from).days
print (str(days) + " days within reproting period")
while (date_report<=date_to):
date_report_end = date_report + datetime.timedelta(hours=days*24)
millisec_date_report = date_report.timestamp() * 1000
millisec_date_report_end = date_report_end.timestamp() * 1000
parameters = {
"pageSize": 25,
"from": int(millisec_date_report),
"to": int(millisec_date_report_end),
"timeFrame": "GTF",
"evaluate": True,
"sloSelector": "text(\"CoCo-QM-Report\")"
}
r = make_request(DTAPIURL,headers,verify,parameters)
content = r.json()
df = pd.json_normalize(content['slo'])
# Save to excel file/sheet
df.to_excel(writer, sheet_name=str(date_report).split(" ")[0])
date_report = date_report + datetime.timedelta(hours=days*24)
with open('./environment.yaml') as file:
doc = yaml.safe_load(file)
for item, doc in doc.items():
token = dict(doc[2])
url = dict(doc[1])
print("Crawling through: " + item)
print("Check if token exists in environment...")
if(config(token.get('env-token-name')) != ""):
print("Gather data, hold on a minute")
DTTOKEN = config(token.get('env-token-name'))
DTURL = url.get('env-url')
date_from = datetime.datetime.strptime(config('FROM_DATE'), '%Y-%m-%d')
date_to = datetime.datetime.strptime(config('TO_DATE'), '%Y-%m-%d')
GatherReportingInfo(DTTOKEN,DTURL,item)
else:
print("token not found, skipping " + item)

30
environment.yaml Normal file
View File

@ -0,0 +1,30 @@
euprod:
- name: "EUprod"
- env-url: "https://xxu26128.live.dynatrace.com"
- env-token-name: "EUPROD_TOKEN_VAR"
- jenkins: "https://jaws.bmwgroup.net/opapm/"
eupreprod:
- name: "eupreprod"
- env-url: "https://qqk70169.live.dynatrace.com"
- env-token-name: "EUPREPROD_TOKEN_VAR"
- jenkins: "https://jaws.bmwgroup.net/opapm/"
napreprod:
- name: "napreprod"
- env-url: "https://onb44935.live.dynatrace.com"
- env-token-name: "NAPREPROD_TOKEN_VAR"
- jenkins: "https://jaws.bmwgroup.net/opapm/"
naprod:
- name: "naprod"
- env-url: "https://wgv50241.live.dynatrace.com"
- env-token-name: "NAPROD_TOKEN_VAR"
- jenkins: "https://jaws.bmwgroup.net/opapm/"
cnprod:
- name: "cnprod"
- env-url: "https://dynatracemgd-tsp.bmwgroup.net/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
- env-token-name: "CNPROD_TOKEN_VAR"
- jenkins: "https://jaws-china.bmwgroup.net/opmaas/"
cnpreprod:
- name: "cnpreprod"
- env-url: "https://dynatracemgd-tsp.bmwgroup.net/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35"
- env-token-name: "CNPREPROD_TOKEN_VAR"
- jenkins: "https://jaws-china.bmwgroup.net/opmaas/"

10
requirements copy.txt Normal file
View File

@ -0,0 +1,10 @@
python-decouple
pyyaml
pandas
decouple
requests
datetime
argparse
openpyxl
os
configparser

6
requirements.txt Normal file
View File

@ -0,0 +1,6 @@
python-decouple
pyyaml
pandas
requests
datetime
argparse

84
slo-report-monthly.py Normal file
View File

@ -0,0 +1,84 @@
import requests, configparser, datetime, openpyxl, os
import pandas as pd
def getSLO():
api_url = ''.join(YOUR_DT_API_URL) + '/api/v2/slo'
millisec_date_report = date_report.timestamp() * 1000
millisec_date_report_end = date_report_end.timestamp() * 1000
parameters = {
"pageSize": 25,
"from": int(millisec_date_report),
"to": int(millisec_date_report_end),
"timeFrame": "GTF",
"evaluate": True,
"sloSelector": "text(\"CoCo-QM-Report\")"
}
print(parameters)
r = requests.get(api_url, headers=headers, params=parameters);
print(r)
content = r.json()
print(content)
return content
# Initialize config parser
config = configparser.ConfigParser(interpolation=None)
config.read('config.ini')
#Create /data folder
if not os.path.exists('reports/'):
os.mkdir('reports/')
# Get values from the config file
date_from = config['DATES']['From']
date_to = config['DATES']['To']
# Name of the report
string_date = date_from+"-to-"+date_to
# Iterate though tenants
tenant_count=1
for (tenantkey, val) in config.items('TENANTS'):
# Get tenant URL and TOKEN
YOUR_DT_API_URL = val.split(" ")[0].rstrip('\n')
YOUR_DT_API_TOKEN = val.split(" ")[1].rstrip('\n')
# Format date
date_from = datetime.datetime.strptime(date_from, '%Y-%m-%d')
date_to = datetime.datetime.strptime(date_to, '%Y-%m-%d')
# Create header
headers={}
headers["Authorization"] = "Api-Token "+YOUR_DT_API_TOKEN
#Generate env_id
if "/e/" in YOUR_DT_API_URL:
env_id = YOUR_DT_API_URL.split("/e/")[1]
else:
env_id = YOUR_DT_API_URL.split("//")[1]
#Create tenant folder
path = "reports/"+env_id
if not os.path.exists(path):
os.mkdir(path)
# Iterate to get results per day
date_report = date_from
print("Getting SLO for tenant "+env_id)
writer = pd.ExcelWriter(path+"/"+string_date+'.xlsx')
while (date_report<=date_to):
date_report_end = date_report + datetime.timedelta(hours=720)
# Get data from Dynatrace
content = getSLO()
# Create pandas
df = pd.json_normalize(content['slo'])
# Save to excel file/sheet
df.to_excel(writer, sheet_name=str(date_report).split(" ")[0])
date_report = date_report + datetime.timedelta(hours=720)
writer.save()
tenant_count+=1
date_from = config['DATES']['From']
date_to = config['DATES']['To']

84
slo-report.py Normal file
View File

@ -0,0 +1,84 @@
import requests, configparser, datetime, openpyxl, os
import pandas as pd
def getSLO():
api_url = ''.join(YOUR_DT_API_URL) + '/api/v2/slo'
millisec_date_report = date_report.timestamp() * 1000
millisec_date_report_end = date_report_end.timestamp() * 1000
parameters = {
"pageSize": 25,
"from": int(millisec_date_report),
"to": int(millisec_date_report_end),
"timeFrame": "GTF",
"evaluate": True,
"sloSelector": "text(\"CoCo-QM-Report\")"
}
print(parameters)
r = requests.get(api_url, headers=headers, params=parameters);
print(r)
content = r.json()
print(content)
return content
# Initialize config parser
config = configparser.ConfigParser(interpolation=None)
config.read('config.ini')
#Create /data folder
if not os.path.exists('reports/'):
os.mkdir('reports/')
# Get values from the config file
date_from = config['DATES']['From']
date_to = config['DATES']['To']
# Name of the report
string_date = date_from+"-to-"+date_to
# Iterate though tenants
tenant_count=2
for (tenantkey, val) in config.items('TENANTS'):
# Get tenant URL and TOKEN
YOUR_DT_API_URL = val.split(" ")[0].rstrip('\n')
YOUR_DT_API_TOKEN = val.split(" ")[1].rstrip('\n')
# Format date
date_from = datetime.datetime.strptime(date_from, '%Y-%m-%d')
date_to = datetime.datetime.strptime(date_to, '%Y-%m-%d')
# Create header
headers={}
headers["Authorization"] = "Api-Token "+YOUR_DT_API_TOKEN
#Generate env_id
if "/e/" in YOUR_DT_API_URL:
env_id = YOUR_DT_API_URL.split("/e/")[1]
else:
env_id = YOUR_DT_API_URL.split("//")[1]
#Create tenant folder
path = "reports/"+env_id
if not os.path.exists(path):
os.mkdir(path)
# Iterate to get results per day
date_report = date_from
print("Getting SLO for tenant "+env_id)
writer = pd.ExcelWriter(path+"/"+string_date+'.xlsx')
while (date_report<=date_to):
date_report_end = date_report + datetime.timedelta(hours=24)
# Get data from Dynatrace
content = getSLO()
# Create pandas
df = pd.json_normalize(content['slo'])
# Save to excel file/sheet
df.to_excel(writer, sheet_name=str(date_report).split(" ")[0])
date_report = date_report + datetime.timedelta(hours=24)
writer.save()
tenant_count+=1
date_from = config['DATES']['From']
date_to = config['DATES']['To']