added compass id, lastseents, and container name
parent
1e7871cf93
commit
6110b34384
|
|
@ -138,4 +138,7 @@ crash.log
|
|||
*.xlsx
|
||||
|
||||
# json files
|
||||
*.json
|
||||
*.json
|
||||
|
||||
# backup files
|
||||
*.bak
|
||||
176
create_report.py
176
create_report.py
|
|
@ -98,10 +98,22 @@ def get_data_from_dynatrace(
|
|||
print(f"ERROR - {host_response.status_code}")
|
||||
|
||||
|
||||
def previous_week_range(date: datetime):
|
||||
start_date = date + timedelta(-date.weekday(), weeks=-1)
|
||||
end_date = date + timedelta(-date.weekday() - 1)
|
||||
return start_date, end_date
|
||||
def check_if_service_already_exists(services: list, entity_id: str) -> bool:
|
||||
"""
|
||||
Requests point to the same service. This leads to double entries but we only need the data once.
|
||||
|
||||
Args:
|
||||
services (list): List with services
|
||||
entity_id (str): Entity Id for lookup
|
||||
|
||||
Returns:
|
||||
bool: Returns True if the service is already present else False.
|
||||
"""
|
||||
result = False
|
||||
for service in services:
|
||||
if service["entityId"] == entity_id:
|
||||
result = True
|
||||
return result
|
||||
|
||||
|
||||
def get_process_group_data(df: pd.DataFrame) -> typing.Dict:
|
||||
|
|
@ -136,14 +148,14 @@ def get_process_group_data(df: pd.DataFrame) -> typing.Dict:
|
|||
for hub in unique_hubs:
|
||||
unique_process_groups_per_hub[hub] = {}
|
||||
|
||||
hub_value = hub
|
||||
process_groups_unique = df.query(f"environment == @hub_value")
|
||||
# hub_value = hub
|
||||
process_groups_unique = df.query(f"environment == @hub")
|
||||
|
||||
process_groups_unique = process_groups_unique["process_group_id"].unique()
|
||||
for process_group in process_groups_unique:
|
||||
params = {
|
||||
"entitySelector": f'type("PROCESS_GROUP"),entityId("{process_group}")',
|
||||
"fields": "firstSeenTms,tags",
|
||||
"fields": "firstSeenTms,lastSeenTms,tags",
|
||||
}
|
||||
data = get_data_from_dynatrace(
|
||||
0.1, hub_data[hub]["token"], hub_data[hub]["url"], params, "entities"
|
||||
|
|
@ -200,79 +212,6 @@ def write_xlsx(df: pd.DataFrame) -> None:
|
|||
writer.close()
|
||||
|
||||
|
||||
def develop_load_json():
|
||||
with open("test-data-with-hosts-main.json", "r") as f:
|
||||
data = json.loads(f.read())
|
||||
|
||||
df_data = []
|
||||
|
||||
for hub in data:
|
||||
for slo in data[hub]:
|
||||
slo_name = data[hub][slo]["sloname"]
|
||||
if len(data[hub][slo]["services"]) > 0:
|
||||
for service in data[hub][slo]["services"]:
|
||||
if len(service["entities"]) > 0:
|
||||
for entity in service["entities"]:
|
||||
if "fromRelationships" in entity:
|
||||
if "runsOnHost" in entity["fromRelationships"]:
|
||||
for host in entity["fromRelationships"][
|
||||
"runsOnHost"
|
||||
]:
|
||||
df_data_item = {
|
||||
"slo_name": slo_name,
|
||||
"host_name": host["details"]["displayName"],
|
||||
"host_id": host["id"],
|
||||
"environment": hub,
|
||||
"process_group_id": "",
|
||||
"process_group_name": "",
|
||||
"licensing_tag_host": "",
|
||||
"licensing_tag_process_group": "",
|
||||
"first_seen_process_group": "",
|
||||
"first_seen_host": host["details"][
|
||||
"firstSeenTms"
|
||||
],
|
||||
}
|
||||
|
||||
for tag in host["details"]["tags"]:
|
||||
if tag["key"] == "Platform":
|
||||
df_data_item["platform"] = tag["value"]
|
||||
if tag["key"] == "Namespace":
|
||||
df_data_item["namespace"] = tag["value"]
|
||||
if tag["key"] == "PaaS":
|
||||
df_data_item["paas"] = tag["value"]
|
||||
|
||||
# TODO: rework - add else. so datastructure is complete
|
||||
if "runsOn" in entity["fromRelationships"]:
|
||||
for process_group in entity[
|
||||
"fromRelationships"
|
||||
]["runsOn"]:
|
||||
df_data_item[
|
||||
"process_group_id"
|
||||
] = process_group["id"]
|
||||
|
||||
df_data.append(df_data_item)
|
||||
|
||||
build_dataframe_for_report(df_data)
|
||||
|
||||
|
||||
def check_if_service_already_exists(services: list, entity_id: str) -> bool:
|
||||
"""
|
||||
Requests point to the same service. This leads to double entries but we only need the data once.
|
||||
|
||||
Args:
|
||||
services (list): List with services
|
||||
entity_id (str): Entity Id for lookup
|
||||
|
||||
Returns:
|
||||
bool: Returns True if the service is already present else False.
|
||||
"""
|
||||
result = False
|
||||
for service in services:
|
||||
if service["entityId"] == entity_id:
|
||||
result = True
|
||||
return result
|
||||
|
||||
|
||||
def build_dataframe_data(data: typing.Dict) -> None:
|
||||
"""
|
||||
This function builds the data for the dataframe, which will be used to generate the report. Contains all data but process_groups.
|
||||
|
|
@ -289,6 +228,27 @@ def build_dataframe_data(data: typing.Dict) -> None:
|
|||
for service in data[hub][slo]["services"]:
|
||||
if len(service["entities"]) > 0:
|
||||
for entity in service["entities"]:
|
||||
# get compass id of service here. in tags
|
||||
compass_id_service = []
|
||||
if "tags" in entity:
|
||||
for tag in entity["tags"]:
|
||||
if tag["key"] == "compass-id":
|
||||
compass_id_service.append(tag["value"])
|
||||
compass_id_service = ",".join(compass_id_service)
|
||||
# get container name here
|
||||
container_name = "None"
|
||||
if "properties" in entity:
|
||||
if "softwareTechnologies" in entity["properties"]:
|
||||
for technology in entity["properties"][
|
||||
"softwareTechnologies"
|
||||
]:
|
||||
if (
|
||||
technology["type"] == "DOCKER"
|
||||
or technology["type"] == "CONTAINERD"
|
||||
):
|
||||
container_name = entity["properties"][
|
||||
"detectedName"
|
||||
]
|
||||
if "fromRelationships" in entity:
|
||||
if "runsOnHost" in entity["fromRelationships"]:
|
||||
for host in entity["fromRelationships"][
|
||||
|
|
@ -299,6 +259,7 @@ def build_dataframe_data(data: typing.Dict) -> None:
|
|||
"host_name": host["details"]["displayName"],
|
||||
"host_id": host["id"],
|
||||
"environment": hub,
|
||||
"container_name": container_name,
|
||||
"process_group_id": "",
|
||||
"process_group_name": "",
|
||||
"licensing_tag_host": "",
|
||||
|
|
@ -307,15 +268,36 @@ def build_dataframe_data(data: typing.Dict) -> None:
|
|||
"first_seen_host": host["details"][
|
||||
"firstSeenTms"
|
||||
],
|
||||
"last_seen_host": host["details"][
|
||||
"lastSeenTms"
|
||||
],
|
||||
"compass_id_host": "",
|
||||
"compass_id_service": compass_id_service,
|
||||
}
|
||||
|
||||
compass_id = []
|
||||
namespace = []
|
||||
|
||||
for tag in host["details"]["tags"]:
|
||||
if tag["key"] == "Platform":
|
||||
df_data_item["platform"] = tag["value"]
|
||||
if tag["key"] == "Namespace":
|
||||
df_data_item["namespace"] = tag["value"]
|
||||
# df_data_item["namespace"] = tag["value"]
|
||||
namespace.append(tag["value"])
|
||||
if tag["key"] == "PaaS":
|
||||
df_data_item["paas"] = tag["value"]
|
||||
if tag["key"] == "compass-id":
|
||||
# df_data_item["compass_id"] = tag[
|
||||
# "value"
|
||||
# ]
|
||||
if "value" in tag:
|
||||
compass_id.append(tag["value"])
|
||||
|
||||
df_data_item["compass_id_host"] = ",".join(
|
||||
compass_id
|
||||
)
|
||||
|
||||
df_data_item["namespace"] = ",".join(namespace)
|
||||
|
||||
# TODO: rework
|
||||
if "runsOn" in entity["fromRelationships"]:
|
||||
|
|
@ -336,7 +318,7 @@ def main() -> None:
|
|||
"""
|
||||
Entrypoint.
|
||||
"""
|
||||
throttling_rate: int | float = 0.25 # only tested with 0.5
|
||||
throttling_rate: int | float = 0 # only tested with 0.5
|
||||
reportItem = {}
|
||||
with open("./environment.yaml") as file:
|
||||
env_doc = yaml.safe_load(file)
|
||||
|
|
@ -353,16 +335,27 @@ def main() -> None:
|
|||
# krp = krparser.KRParser(krparser.KROption.VALIDATE_EXISTS | krparser.KROption.VALIDATE_HASDATA ,DTURL, DTTOKEN)
|
||||
|
||||
slosF = get_slo(env, DTTOKEN, DTURL)
|
||||
#slosF = slosF[slosF["id"]=="9c5b0581-acc2-3e70-97d3-531700f78b65"]
|
||||
# slosF = slosF[slosF["id"]=="9c5b0581-acc2-3e70-97d3-531700f78b65"]
|
||||
slosF = slosF[slosF["name"].str.startswith("TP_")]
|
||||
|
||||
# parse the metric Expression to get Services and Requests
|
||||
|
||||
krs = []
|
||||
krp = krparser.KRParser(options=krparser.KROption.RESOLVEKEYREQUETS | krparser.KROption.RESOLVESERVICES, DTAPIURL=DTURL, DTAPIToken=DTTOKEN)
|
||||
# krp = krparser.KRParser(options=krparser.KROption.RESOLVEKEYREQUETS | krparser.KROption.RESOLVESERVICES, DTAPIURL=DTURL, DTAPIToken=DTTOKEN)
|
||||
|
||||
krs=krp.parseBySLO_Threaded(slosF)
|
||||
krp = krparser.KRParser(
|
||||
name=env,
|
||||
options=krparser.KROption.RESOLVESERVICES,
|
||||
config={
|
||||
"threads": 10,
|
||||
"serviceLookupParams": {"fields": "tags,fromRelationships"},
|
||||
"extendResultObjects": {"env": env},
|
||||
},
|
||||
DTAPIURL=DTURL,
|
||||
DTAPIToken=DTTOKEN,
|
||||
)
|
||||
|
||||
krs = krp.parse(slosF)
|
||||
|
||||
reportItem[str(env)] = {}
|
||||
|
||||
|
|
@ -395,9 +388,7 @@ def main() -> None:
|
|||
== 0
|
||||
):
|
||||
if not check_if_service_already_exists(
|
||||
reportItem[str(env)][kr.metadata["sloName"]][
|
||||
"services"
|
||||
],
|
||||
reportItem[env][kr.metadata["sloName"]]["services"],
|
||||
service["entityId"],
|
||||
):
|
||||
reportItem[str(env)][kr.metadata["sloName"]][
|
||||
|
|
@ -409,6 +400,7 @@ def main() -> None:
|
|||
"entityId": service["entityId"],
|
||||
}
|
||||
)
|
||||
|
||||
if (
|
||||
len(
|
||||
reportItem[str(env)][kr.metadata["sloName"]][
|
||||
|
|
@ -427,7 +419,7 @@ def main() -> None:
|
|||
]["services"]:
|
||||
params = {
|
||||
"entitySelector": f'type("SERVICE"),entityId("{service["entityId"]}")',
|
||||
"fields": "fromRelationships,tags",
|
||||
"fields": "fromRelationships,tags,properties",
|
||||
}
|
||||
entities = get_data_from_dynatrace(
|
||||
throttling_rate,
|
||||
|
|
@ -436,6 +428,7 @@ def main() -> None:
|
|||
params,
|
||||
"entities",
|
||||
)
|
||||
print(entities["entities"])
|
||||
# TODO: it is possible that "entities" is empty. maybe create check.
|
||||
service["entities"] = entities["entities"]
|
||||
for hosts in service["entities"]:
|
||||
|
|
@ -458,11 +451,8 @@ def main() -> None:
|
|||
)
|
||||
)
|
||||
host["details"] = host_response
|
||||
|
||||
build_dataframe_data(reportItem)
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
main()
|
||||
|
|
|
|||
Loading…
Reference in New Issue