Compare commits

..

10 Commits

Author SHA1 Message Date
Rene Forstner (ext.) 06be32ea78 Jenkinsfile edited online with Bitbucket 2022-04-15 13:11:08 +02:00
rforstner 1d5b848524 adding caro to mail 2022-04-01 17:19:18 +02:00
rforstner f02d77c5a8 updated gitignore 2022-04-01 17:15:20 +02:00
rforstner 190fe261a5 updated gitignore 2022-04-01 17:14:49 +02:00
rforstner a925d39508 updated gitignore 2022-04-01 17:13:37 +02:00
rforstner b88c72e83a updated gitignore 2022-04-01 17:10:23 +02:00
rforstner 6582457194 changed to spring 2022-04-01 17:09:18 +02:00
Rene Forstner (ext.) e6c895749b Jenkinsfile edited online with Bitbucket 2022-04-01 17:01:36 +02:00
Rene Forstner (ext.) b682dbdb48 createReport.py edited online with Bitbucket 2022-04-01 17:00:49 +02:00
Rene Forstner (ext.) 19f3278673 Jenkinsfile edited online with Bitbucket 2022-01-20 15:11:48 +01:00
6 changed files with 334 additions and 9 deletions

4
.gitignore vendored
View File

@ -3,6 +3,10 @@ __pycache__/
*.py[cod]
*$py.class
# reportfiles
*.csv
# C extensions
*.so

10
Jenkinsfile vendored
View File

@ -31,7 +31,7 @@
//cron('0 0 1 * *')
//every day at 08:00
cron('0 8 * * *')
cron('0 6 * * *')
//every monday at 08:00
//cron('0 8 * * MON')
@ -63,7 +63,7 @@
stage('install required python packages') {
steps {
sh '''
pip install --user -r requirements.txt
pip3 install --user -r requirements.txt
'''
print env.JENKINS_URL
}
@ -72,7 +72,7 @@
stage('Execute Reporting Script') {
steps {
sh 'python createReport.py'
sh 'python3 createReport.py'
//Only required once CN is not reachable from EMEA
//loopEnvironments(environments)
@ -85,7 +85,7 @@
try {
emailext subject: env.JOB_NAME,
body: 'Please find the output of your reports attached',
to: 'rene.forstner@nttdata.com,stephan.oertelt@bmw.de,Hendrik.Schweppe@bmw.de,Klemens.Mang@bmw.de',
to: 'stephan.oertelt@bmw.de,Hendrik.Schweppe@bmw.de,Klemens.Mang@bmw.de,Alexander.Wiedeck@bmw.de,carolin.brandl@bmw.de',
replyTo: 'coco-apm@bmw.de',
attachmentsPattern: '*.csv'
@ -103,4 +103,4 @@
cleanWs()
}
}
}
}

View File

@ -1,3 +1,5 @@
from operator import index
from textwrap import indent
from decouple import config
import yaml
import requests
@ -5,6 +7,7 @@ import json
import pandas as pd
import time
from distutils.version import LooseVersion
from dynatraceAPI import Dynatrace
def make_request(url, headers):
try:
@ -21,13 +24,105 @@ def make_request(url, headers):
return response
def GatherReportingInfo(DTAPIToken, DTENV,friendlyName):
def GatherAllProblems(DTAPIToken, DTENV,friendlyName):
env = DTENV
DTAPIToken = DTAPIToken
DTAPIURL= env + "/api/v2/securityProblems/11497873967941161718/remediationItems"
dtClient = Dynatrace(env, DTAPIToken)
params = {"pageSize":500
#"fields":'%2BriskAssessment'
}
secProblems = dtClient.returnPageination(f"/api/v2/securityProblems/1340823583484240022?fields=%2BriskAssessment", params, list_item="securityProblems")
df = pd.DataFrame(secProblems.elements)
df.to_csv(friendlyName + '_Allvulnerabilities.csv')
return secProblems
def GatherRemediationItems(DTAPIToken, DTENV,friendlyName):
tags = ["Environment", "Hub", "PaaS", "Cloud", "Platform","Namespace","compass-id","ms-id","app-id","app-name",
"WK","Wirkkette","app","deployment","service","itsm-service","runtime.connected.bmw/managed-app-name",
"controller-revision-hash","name2","statefulset.kubernetes.io/pod-name","stage","MCID"]
env = DTENV
DTAPIToken = DTAPIToken
df = pd.DataFrame()
dtClient = Dynatrace(env, DTAPIToken)
dfEntities = pd.DataFrame()
dfEntities['id']=""
#secProblems = dtClient.returnPageination(f"/api/v2/securityProblems/1340823583484240022?fields=%2BriskAssessment", params, list_item="securityProblems")
problem = dtClient.returnSingle(f"/api/v2/securityProblems/1340823583484240022?fields=%2BriskAssessment")
print(problem)
APIURL = "/api/v2/securityProblems/" + problem['securityProblemId'] + '/remediationItems'
params = {'remediationItemSelector':'vulnerabilityState("VULNERABLE")'}
remItems = dtClient.returnPageination(APIURL, params, list_item="remediationItems")
for Item in remItems.elements:
row={'CVE':problem['cveIds'],
'Title':problem['title'],
'riskLevel':problem['riskAssessment']['riskLevel'],
'riskScore':'[' + str(problem['riskAssessment']['riskScore']) + ']',
'Displayname':Item['name'],
'State':Item['vulnerabilityState'],
'muteState' : Item['muteState']['muted'],
'exposure':Item['assessment']['exposure'],
'dataAssets':Item['assessment']['dataAssets'],
#'firstAffectedDate':time.strftime('%Y-%m-%d', time.localtime(Item['firstAffectedTimestamp']/1000)),
#'firstAffectedTime':time.strftime('%H:%M:%S', time.localtime(Item['firstAffectedTimestamp']/1000))
}
i = 0
for comp in Item['vulnerableComponents']:
row.update({'vulnerableComponent_' + str(i): comp['displayName']})
if (Item['id'] in dfEntities['id']):
entity = dfEntities.loc[dfEntities['id'] == Item['id']]
entity = entity.drop(['id'])
row.update(entity)
else:
APIURL = '/api/v2/entities?entitySelector=entityId("' + Item['id'] + '")&from=now-1h&fields=+tags'
params = {}
enty = dtClient.returnPageination(APIURL,params=params,list_item="entities")
try:
if (len(enty) > 0):
for entity in enty.elements:
tagrow = {'id': entity['entityId']}
for tag in entity['tags']:
if tag['key'] in tags:
try:
tagrow.update({ tag['key']:tag['value']})
row.update({ tag['key']:tag['value']})
except:
tagrow.update({ tag['key']:tag['key']})
row.update({ tag['key']:tag['key']})
dfEntities = dfEntities.append(tagrow, ignore_index=True)
#print("adding host to cache " + str(entity['entityId']) )
#print("df len " + str(len(dfEntities.index)))
except:
print("no entity seen")
df = df.append(row,ignore_index=True)
df.to_csv(friendlyName + '_vulnerabilities.csv')
def GatherNew(DTAPIToken, DTENV,friendlyName):
secProblems = GatherAllProblems(DTAPIToken, DTENV,friendlyName)
GatherRemediationItems(DTAPIToken, DTENV,friendlyName, secProblems)
return
def GatherReportingInfo(DTAPIToken, DTENV,friendlyName):
env = DTENV
DTAPIToken = DTAPIToken
DTAPIURL= env + "/api/v2/securityProblems/1340823583484240022/remediationItems"
headers = {
'Content-Type': 'application/json',
'Authorization': 'Api-Token ' + DTAPIToken
@ -87,7 +182,7 @@ for item, doc in doc.items():
DTTOKEN = config(token.get('env-token-name'))
DTURL = url.get('env-url')
GatherReportingInfo(DTTOKEN,DTURL,item)
GatherRemediationItems(DTTOKEN,DTURL,item)
else:
print("token not found, skipping " + item)
print("token not found, skipping " + item)

41
dynatraceAPI.py Normal file
View File

@ -0,0 +1,41 @@
import logging
from typing import Dict
import os
import sys
file_dir = os.path.dirname(__file__)
sys.path.append(file_dir)
from httpClient import HttpClient
from pagination import Pagionation
class Dynatrace:
def __init__(
self,
base_url: str,
token: str,
log: logging.Logger = None,
proxies: Dict = None,
too_many_requests_strategy=None,
retries: int = 0,
retry_delay_ms: int = 0,
):
self.__http_client = HttpClient(
base_url, token, log, proxies, too_many_requests_strategy, retries, retry_delay_ms
)
def returnPageination(self,path,params,list_item):
page = Pagionation(self.__http_client,path,params,list_item=list_item)
return page
def returnSingle(self,path):
response = self.__http_client.make_request(path)
json_response = response.json()
return json_response

116
httpClient.py Normal file
View File

@ -0,0 +1,116 @@
import logging
from typing import Dict, Optional, Any
import time
import requests
import urllib3
from urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
TOO_MANY_REQUESTS_WAIT = "wait"
##Not sure where/why this is here
class DynatraceRetry(Retry):
def get_backoff_time(self):
return self.backoff_factor
class HttpClient:
def __init__(
self,
base_url: str,
token: str,
log: logging.Logger = None,
proxies: Dict = None,
too_many_requests_strategy=None,
retries: int = 0,
retry_delay_ms: int = 0,
#mc_jsession_id: Optional[str] = None,
#mc_b925d32c: Optional[str] = None,
#mc_sso_csrf_cookie: Optional[str] = None,
):
while base_url.endswith("/"):
base_url = base_url[:-1]
self.base_url = base_url
if proxies is None:
proxies = {}
self.proxies = proxies
self.auth_header = {"Authorization": f"Api-Token {token}"}
self.log = log
if self.log is None:
self.log = logging.getLogger(__name__)
self.log.setLevel(logging.WARNING)
st = logging.StreamHandler()
fmt = logging.Formatter("%(asctime)s - %(levelname)s - %(name)s - %(thread)d - %(filename)s:%(lineno)d - %(message)s")
st.setFormatter(fmt)
self.log.addHandler(st)
self.too_many_requests_strategy = too_many_requests_strategy
retry_delay_s = retry_delay_ms / 1000
try:
self.retries = Retry(
total=retries,
backoff_factor=retry_delay_s,
status_forcelist=[400, 401, 403, 404, 413, 429, 500, 502, 503, 504],
allowed_methods=["TRACE", "PUT", "DELETE", "OPTIONS", "HEAD", "GET", "POST"],
raise_on_status=False,
)
except TypeError: # Older version of urllib3?
self.retries = Retry(
total=retries,
backoff_factor=retry_delay_s,
status_forcelist=[400, 401, 403, 404, 413, 429, 500, 502, 503, 504],
method_whitelist=["TRACE", "PUT", "DELETE", "OPTIONS", "HEAD", "GET", "POST"],
raise_on_status=False,
)
# This is for internal dynatrace usage
#self.mc_jsession_id = mc_jsession_id
#self.mc_b925d32c = mc_b925d32c
#self.mc_sso_csrf_cookie = mc_sso_csrf_cookie
def make_request(
self, path: str, params: Optional[Any] = None, headers: Optional[Dict] = None, method="GET", data=None, files=None, query_params=None
) -> requests.Response:
url = f"{self.base_url}{path}"
body = None
if method in ["POST", "PUT"]:
body = params
params = query_params
if headers is None:
headers = {}
if files is None and "content-type" not in [key.lower() for key in headers.keys()]:
headers.update({"content-type": "application/json"})
headers.update(self.auth_header)
cookies = None
#if self.mc_b925d32c and self.mc_sso_csrf_cookie and self.mc_jsession_id:
# headers.update({"Cookie": f"JSESSIONID={self.mc_jsession_id}; ssoCSRFCookie={self.mc_sso_csrf_cookie}; b925d32c={self.mc_b925d32c}"})
# cookies = {"JSESSIONID": self.mc_jsession_id, "ssoCSRFCookie": self.mc_sso_csrf_cookie, "b925d32c": self.mc_b925d32c}
s = requests.Session()
s.mount("https://", HTTPAdapter(max_retries=self.retries))
self.log.debug(f"Making {method} request to '{url}' with params {params} and body: {body}")
r = s.request(method, url, headers=headers, params=params, json=body, verify=False, proxies=self.proxies, data=data, cookies=cookies, files=files)
self.log.debug(f"Received response '{r}'")
while r.status_code == 429 and self.too_many_requests_strategy == TOO_MANY_REQUESTS_WAIT:
sleep_amount = int(r.headers.get("retry-after", 5))
self.log.warning(f"Sleeping for {sleep_amount}s because we have received an HTTP 429")
time.sleep(sleep_amount)
r = requests.request(method, url, headers=headers, params=params, json=body, verify=False, proxies=self.proxies)
if r.status_code >= 400:
raise Exception(f"Error making request to {url}: {r}. Response: {r.text}")
return r

69
pagination.py Normal file
View File

@ -0,0 +1,69 @@
from httpClient import HttpClient
class Pagionation():
def __init__(self, http_client, target_url, target_params=None, headers=None, list_item="result"):
#self.__target_class = target_class
self.__http_client: HttpClient = http_client
self.__target_url = target_url
self.__target_params = target_params
self.__headers = headers
self.__list_item = list_item
self._has_next_page = True
self.__total_count = None
self.__page_size = None
self.elements = self._get_next_page()
def __iter__(self):# -> Iterator[T]:
for element in self.__elements:
yield element
while self._has_next_page:
new_elements = self._get_next_page()
for element in new_elements:
yield element
def __len__(self):
return self.__total_count or len(self.__elements)
def _get_next_page(self):
response = self.__http_client.make_request(self.__target_url, params=self.__target_params, headers=self.__headers)
json_response = response.json()
data = []
if json_response.get("nextPageKey", None):
self._has_next_page = True
self.__target_params = {"nextPageKey": json_response["nextPageKey"]}
else:
self._has_next_page = False
if self.__list_item in json_response:
elements = json_response[self.__list_item]
self.__total_count = json_response.get("totalCount") or len(elements)
while self._has_next_page == True:
self.__target_url = self.__target_url.split("?")[0]
elements += self._get_response()
return elements
def _get_response(self):
response = self.__http_client.make_request(self.__target_url, params=self.__target_params, headers=self.__headers)
json_response = response.json()
data = []
if json_response.get("nextPageKey", None):
self._has_next_page = True
self.__target_params = {"nextPageKey": json_response["nextPageKey"]}
else:
self._has_next_page = False
if self.__list_item in json_response:
elements = json_response[self.__list_item]
self.__total_count = json_response.get("totalCount") or len(elements)
return elements