adding DynatraceAPIClient
parent
4a1d22c212
commit
dae384caf2
|
|
@ -0,0 +1,41 @@
|
|||
import logging
|
||||
from typing import Dict
|
||||
import os
|
||||
import sys
|
||||
|
||||
file_dir = os.path.dirname(__file__)
|
||||
sys.path.append(file_dir)
|
||||
|
||||
from httpClient import HttpClient
|
||||
from pagination import Pagionation
|
||||
|
||||
|
||||
class Dynatrace:
|
||||
def __init__(
|
||||
self,
|
||||
base_url: str,
|
||||
token: str,
|
||||
log: logging.Logger = None,
|
||||
proxies: Dict = None,
|
||||
too_many_requests_strategy=None,
|
||||
retries: int = 0,
|
||||
retry_delay_ms: int = 0,
|
||||
|
||||
):
|
||||
self.__http_client = HttpClient(
|
||||
base_url, token, log, proxies, too_many_requests_strategy, retries, retry_delay_ms
|
||||
)
|
||||
|
||||
def returnPageination(self,path,params,list_item):
|
||||
|
||||
page = Pagionation(self.__http_client,path,params,list_item=list_item)
|
||||
return page
|
||||
|
||||
def returnSingle(self,path):
|
||||
response = self.__http_client.make_request(path)
|
||||
json_response = response.json()
|
||||
return json_response
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,116 @@
|
|||
import logging
|
||||
from typing import Dict, Optional, Any
|
||||
import time
|
||||
|
||||
import requests
|
||||
import urllib3
|
||||
from urllib3.util.retry import Retry
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
||||
TOO_MANY_REQUESTS_WAIT = "wait"
|
||||
|
||||
|
||||
##Not sure where/why this is here
|
||||
class DynatraceRetry(Retry):
|
||||
def get_backoff_time(self):
|
||||
return self.backoff_factor
|
||||
|
||||
|
||||
class HttpClient:
|
||||
def __init__(
|
||||
self,
|
||||
base_url: str,
|
||||
token: str,
|
||||
log: logging.Logger = None,
|
||||
proxies: Dict = None,
|
||||
too_many_requests_strategy=None,
|
||||
retries: int = 0,
|
||||
retry_delay_ms: int = 0,
|
||||
#mc_jsession_id: Optional[str] = None,
|
||||
#mc_b925d32c: Optional[str] = None,
|
||||
#mc_sso_csrf_cookie: Optional[str] = None,
|
||||
):
|
||||
while base_url.endswith("/"):
|
||||
base_url = base_url[:-1]
|
||||
self.base_url = base_url
|
||||
|
||||
if proxies is None:
|
||||
proxies = {}
|
||||
self.proxies = proxies
|
||||
|
||||
self.auth_header = {"Authorization": f"Api-Token {token}"}
|
||||
self.log = log
|
||||
if self.log is None:
|
||||
self.log = logging.getLogger(__name__)
|
||||
self.log.setLevel(logging.WARNING)
|
||||
st = logging.StreamHandler()
|
||||
fmt = logging.Formatter("%(asctime)s - %(levelname)s - %(name)s - %(thread)d - %(filename)s:%(lineno)d - %(message)s")
|
||||
st.setFormatter(fmt)
|
||||
self.log.addHandler(st)
|
||||
|
||||
self.too_many_requests_strategy = too_many_requests_strategy
|
||||
retry_delay_s = retry_delay_ms / 1000
|
||||
|
||||
try:
|
||||
self.retries = Retry(
|
||||
total=retries,
|
||||
backoff_factor=retry_delay_s,
|
||||
status_forcelist=[400, 401, 403, 404, 413, 429, 500, 502, 503, 504],
|
||||
allowed_methods=["TRACE", "PUT", "DELETE", "OPTIONS", "HEAD", "GET", "POST"],
|
||||
raise_on_status=False,
|
||||
)
|
||||
except TypeError: # Older version of urllib3?
|
||||
self.retries = Retry(
|
||||
total=retries,
|
||||
backoff_factor=retry_delay_s,
|
||||
status_forcelist=[400, 401, 403, 404, 413, 429, 500, 502, 503, 504],
|
||||
method_whitelist=["TRACE", "PUT", "DELETE", "OPTIONS", "HEAD", "GET", "POST"],
|
||||
raise_on_status=False,
|
||||
)
|
||||
|
||||
# This is for internal dynatrace usage
|
||||
#self.mc_jsession_id = mc_jsession_id
|
||||
#self.mc_b925d32c = mc_b925d32c
|
||||
#self.mc_sso_csrf_cookie = mc_sso_csrf_cookie
|
||||
|
||||
def make_request(
|
||||
self, path: str, params: Optional[Any] = None, headers: Optional[Dict] = None, method="GET", data=None, files=None, query_params=None
|
||||
) -> requests.Response:
|
||||
url = f"{self.base_url}{path}"
|
||||
|
||||
body = None
|
||||
if method in ["POST", "PUT"]:
|
||||
body = params
|
||||
params = query_params
|
||||
|
||||
if headers is None:
|
||||
headers = {}
|
||||
if files is None and "content-type" not in [key.lower() for key in headers.keys()]:
|
||||
headers.update({"content-type": "application/json"})
|
||||
headers.update(self.auth_header)
|
||||
|
||||
cookies = None
|
||||
#if self.mc_b925d32c and self.mc_sso_csrf_cookie and self.mc_jsession_id:
|
||||
# headers.update({"Cookie": f"JSESSIONID={self.mc_jsession_id}; ssoCSRFCookie={self.mc_sso_csrf_cookie}; b925d32c={self.mc_b925d32c}"})
|
||||
# cookies = {"JSESSIONID": self.mc_jsession_id, "ssoCSRFCookie": self.mc_sso_csrf_cookie, "b925d32c": self.mc_b925d32c}
|
||||
|
||||
s = requests.Session()
|
||||
s.mount("https://", HTTPAdapter(max_retries=self.retries))
|
||||
|
||||
self.log.debug(f"Making {method} request to '{url}' with params {params} and body: {body}")
|
||||
r = s.request(method, url, headers=headers, params=params, json=body, verify=False, proxies=self.proxies, data=data, cookies=cookies, files=files)
|
||||
self.log.debug(f"Received response '{r}'")
|
||||
|
||||
while r.status_code == 429 and self.too_many_requests_strategy == TOO_MANY_REQUESTS_WAIT:
|
||||
sleep_amount = int(r.headers.get("retry-after", 5))
|
||||
self.log.warning(f"Sleeping for {sleep_amount}s because we have received an HTTP 429")
|
||||
time.sleep(sleep_amount)
|
||||
r = requests.request(method, url, headers=headers, params=params, json=body, verify=False, proxies=self.proxies)
|
||||
|
||||
if r.status_code >= 400:
|
||||
raise Exception(f"Error making request to {url}: {r}. Response: {r.text}")
|
||||
|
||||
return r
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
from httpClient import HttpClient
|
||||
|
||||
|
||||
|
||||
class Pagionation():
|
||||
def __init__(self, http_client, target_url, target_params=None, headers=None, list_item="result"):
|
||||
#self.__target_class = target_class
|
||||
self.__http_client: HttpClient = http_client
|
||||
self.__target_url = target_url
|
||||
self.__target_params = target_params
|
||||
self.__headers = headers
|
||||
self.__list_item = list_item
|
||||
self._has_next_page = True
|
||||
self.__total_count = None
|
||||
self.__page_size = None
|
||||
|
||||
|
||||
self.elements = self._get_next_page()
|
||||
|
||||
|
||||
def __iter__(self):# -> Iterator[T]:
|
||||
for element in self.__elements:
|
||||
yield element
|
||||
|
||||
while self._has_next_page:
|
||||
new_elements = self._get_next_page()
|
||||
for element in new_elements:
|
||||
yield element
|
||||
|
||||
|
||||
def __len__(self):
|
||||
return self.__total_count or len(self.__elements)
|
||||
|
||||
def _get_next_page(self):
|
||||
response = self.__http_client.make_request(self.__target_url, params=self.__target_params, headers=self.__headers)
|
||||
json_response = response.json()
|
||||
data = []
|
||||
if json_response.get("nextPageKey", None):
|
||||
self._has_next_page = True
|
||||
self.__target_params = {"nextPageKey": json_response["nextPageKey"]}
|
||||
else:
|
||||
self._has_next_page = False
|
||||
|
||||
if self.__list_item in json_response:
|
||||
elements = json_response[self.__list_item]
|
||||
self.__total_count = json_response.get("totalCount") or len(elements)
|
||||
|
||||
while self._has_next_page == True:
|
||||
self.__target_url = self.__target_url.split("?")[0]
|
||||
elements += self._get_response()
|
||||
|
||||
return elements
|
||||
|
||||
def _get_response(self):
|
||||
response = self.__http_client.make_request(self.__target_url, params=self.__target_params, headers=self.__headers)
|
||||
json_response = response.json()
|
||||
data = []
|
||||
if json_response.get("nextPageKey", None):
|
||||
self._has_next_page = True
|
||||
self.__target_params = {"nextPageKey": json_response["nextPageKey"]}
|
||||
else:
|
||||
self._has_next_page = False
|
||||
|
||||
if self.__list_item in json_response:
|
||||
elements = json_response[self.__list_item]
|
||||
self.__total_count = json_response.get("totalCount") or len(elements)
|
||||
|
||||
|
||||
return elements
|
||||
Loading…
Reference in New Issue