Compare commits

...

10 Commits

Author SHA1 Message Date
PATRYK GUDALEWICZ (ext.) d7b23f220c requirements.txt edited online with Bitbucket
switching back to default import
2023-07-17 08:09:59 +00:00
PATRYK GUDALEWICZ (ext.) c4373471ce requirements.txt edited online with Bitbucket
Switching to pandas 2.0.2
2023-07-17 07:56:37 +00:00
PATRYK GUDALEWICZ (ext.) b4575321e2 requirements.txt edited online with Bitbucket
Testing fixed pandas version
2023-07-17 07:51:52 +00:00
Daniel Mikula e4ecba10d4 added try exception 2023-07-03 10:52:04 +02:00
Daniel Mikula a894b7a1d4 turned debug mode off 2023-06-30 08:35:32 +02:00
Daniel Mikula f421bf09c3 xlsx dateformat 2023-06-30 08:23:41 +02:00
Daniel Mikula b6b6a81c99 dateformat fix 2023-06-30 07:35:49 +02:00
Daniel Mikula b36c5066f3 kpi fix 2023-06-29 15:27:06 +02:00
Daniel Mikula db0b6aff1b count-7d calc issue 2023-06-28 08:41:01 +02:00
Daniel Mikula 5a203d7e66 count-7d None error 2023-06-28 08:20:26 +02:00
3 changed files with 115 additions and 48 deletions

4
.gitignore vendored
View File

@ -142,4 +142,6 @@ crash.log
# for dev
slo_parameter.yaml
metricexpressions.json
*.bak
*.bak
*.json
failed_requests.txt

View File

@ -65,6 +65,10 @@ class ReportReader:
columns = df.columns
if "Unnamed: 0" in columns:
df = df.drop("Unnamed: 0", axis=1)
if sheet_name != "total":
df["Date"] = pd.to_datetime(df["Date"], format="%Y-%m-%d")
if DEBUG:
Helper.console_output(f"dtypes of {sheet_name}\n{df.dtypes}")
self.qm_report_df[sheet_name] = df
def get_sheet_names(self) -> typing.List:
@ -116,13 +120,7 @@ class QmReportWriter:
for slo_id in self.kpis[sheet][hub].keys():
for query in self.kpis[sheet][hub][slo_id]:
if len(query) > 0:
if (
query["result"] != "None"
and len(query["result"]) > 0
# and len(query["result"][0]["data"]) > 0
# and len(query["result"][0]["data"][0]["values"]) > 0
):
# values = query["result"][0]["data"][0]["values"][0]
if query["result"] != "None" and len(query["result"]) > 0:
values = query["api_result"]
mask = (
(self.report_dfs[sheet]["HUB"] == hub.split("-")[0])
@ -145,27 +143,31 @@ class QmReportWriter:
existing_value = self.report_dfs[sheet].loc[
mask, query["kpi_name"]
]
if not existing_value.empty:
existing_value = existing_value.iloc[0]
if existing_value is not None:
self.report_dfs[sheet].loc[
mask, query["kpi_name"]
] = (existing_value + values)
try:
if not existing_value.empty:
existing_value = existing_value.iloc[0]
if existing_value is not None:
self.report_dfs[sheet].loc[
mask, query["kpi_name"]
] = (existing_value + values)
else:
self.report_dfs[sheet].loc[
mask, query["kpi_name"]
] = values
else:
self.report_dfs[sheet].loc[
mask, query["kpi_name"]
] = values
else:
self.report_dfs[sheet].loc[
mask, query["kpi_name"]
] = values
except Exception as e:
print(f"_combine_dataset EXCEPTION: {e}")
# self.report_dfs[sheet].loc[
# mask, query["kpi_name"]
# ] = values
self._write_report_to_xlsx()
if DEBUG:
self._write_to_csv()
def _write_report_to_xlsx(self):
Helper.console_output("Writing XLSX")
@ -182,8 +184,19 @@ class QmReportWriter:
worksheet = writer.sheets[sheet_name]
worksheet.autofilter(0, 0, dataframe.shape[0], dataframe.shape[1] - 1)
# format date
if sheet_name != "total":
fmt = workbook.add_format({"num_format": "yyyy-mm-dd"})
for row, date_time in enumerate(dataframe["Date"], start=1):
worksheet.write_datetime(row, 0, date_time, fmt)
writer.close()
def _write_to_csv(self):
Helper.console_output("Writing CSV")
for sheet_name, dataframe in self.report_dfs.items():
dataframe.to_csv(f"test_{sheet_name}.csv", index=False)
class DynatraceDataGetter:
def __init__(self) -> None:
@ -261,6 +274,7 @@ class KPIGetter:
report_reader.run()
# Get SLO IDs from first sheet and build metric expression queries.
for i, sheet in enumerate(report_reader.qm_report_ids.keys()):
if i == 0:
for hub in report_reader.qm_report_ids[sheet].keys():
@ -413,23 +427,17 @@ class KPIGetter:
f'{row["HUB"]}-{row["type"]}'
][row["id"]]["services_transformed"],
)
# resolution 1d in daily
self.metric_expressions[sheet][f'{row["HUB"]}-{row["type"]}'][
row["id"]
].append(
# self._template_metric_expression(
# "kpi_1",
# metric_kpi1,
# from_timestamp_ms,
# to_timestamp_ms,
# timeframe,
# row["timeframe"],
# )
{
"kpi_name": "kpi_1",
"metric": metric_kpi1,
"from_date": from_timestamp_ms,
"to_date": to_timestamp_ms,
"resolution": timeframe,
# "resolution": timeframe,
"resolution": self._get_resolution_for_kpi_data(),
"timeframe": row["timeframe"],
}
)
@ -455,7 +463,14 @@ class KPIGetter:
# )
# )
if REPORT_TYPE == "day":
if (
REPORT_TYPE == "day"
or REPORT_TYPE == "month"
and sheet != "total"
or REPORT_TYPE == "week"
and sheet != "total"
):
# if REPORT_TYPE == "day":
metric_count_shifted = self._build_kpi_metric_for_query(
"count_shifted",
timeframe,
@ -650,6 +665,14 @@ class KPIGetter:
)
else:
if DEBUG:
Helper.console_output(
f"Nothing received for: {hub} - {slo} - {result} - {self.metric_expressions[sheet][hub][slo][index]['kpi_name']}"
)
with open("./failed_requests.txt", "a") as f:
f.write(
f"Nothing received for: {hub} - {slo}\n{json.dumps(result, indent=4)}\n{self.metric_expressions[sheet][hub][slo][index]['kpi_name']}\n{'-'*80}\n"
)
self.metric_expressions[sheet][hub][slo][index][
"result"
] = "None"
@ -671,24 +694,48 @@ class KPIGetter:
for data in result[0]["data"]:
result_values.append(data["values"][0])
return sum(result_values) / len(result_values)
elif result_type == "count-7d":
result_values = []
# option 2
# if any(elem is None for elem in result[0]["data"][0]["values"]):
# for value in result[0]["data"][0]["values"]:
# if value is not None:
# return value
# option 2 end
for value in result[0]["data"][0]["values"]:
if value == None:
result_values.append(0)
else:
result_values.append(value)
return int(sum(result_values) / len(result_values))
# elif result_type == "kpi_1":
# elif result_type == "count-7d":
# result_values = []
# option 2
# if any(elem is None for elem in result[0]["data"][0]["values"]):
# for value in result[0]["data"][0]["values"]:
# return sum(result_values) / len(result_values)
# if value is not None:
# return value
# option 2 end
elif result_type == "kpi_1" or result_type == "count-7d":
# 2nd value + none check
if len(result[0]["data"][0]["values"]) > 0:
if len(result[0]["data"][0]["values"]) == 2:
if (
result[0]["data"][0]["values"][1] != "None"
or result[0]["data"][0]["values"][1] != None
):
return result[0]["data"][0]["values"][1]
elif (
result[0]["data"][0]["values"][0] != "None"
or result[0]["data"][0]["values"][0] != None
):
return result[0]["data"][0]["values"][1]
else:
return "None"
else:
return result[0]["data"][0]["values"][0]
else:
if DEBUG:
Helper.console_output(
f"Extraction No Result: {result_type}\n{result}"
)
return "None"
# if len(result[0]["data"][0]["values"]) > 0:
# result_values = []
# for value in result[0]["data"][0]["values"]:
# if value == None:
# result_values.append(0)
# else:
# result_values.append(value)
# return sum(result_values) / len(result_values)
# else:
# return result[0]["data"][0]["values"][0]
# elif result_type == "count":
# # DEBUG
# Helper.console_output(result[0]["data"])
@ -798,6 +845,14 @@ class KPIGetter:
if REPORT_TYPE == "month":
return "1M"
def _get_resolution_for_kpi_data(self) -> str:
if REPORT_TYPE == "day":
return "1d"
if REPORT_TYPE == "week":
return "1w"
if REPORT_TYPE == "month":
return "1M"
def _build_kpi_metric_for_query(
self, kpi_type: str, timeframe: str, service: str = None, request: str = None
) -> typing.Union[str, bool]:
@ -1054,7 +1109,14 @@ class Helper:
Cleans up files created in debugging mode.
"""
Helper.console_output("Cleaning up debug files")
files = ["./metricexpressions.json", "./slo_results.txt", "./test.xlsx"]
files = [
"./metricexpressions.json",
"./slo_results.txt",
"./test.xlsx",
"./test_total.csv",
"./test_daily.csv",
"./failed_requests.txt",
]
for file in files:
if os.path.exists(file):
os.remove(file)
@ -1076,4 +1138,7 @@ def main():
if __name__ == "__main__":
main()
try:
main()
except Exception as e:
print(f"main EXCEPTION: {e}")

View File

@ -7,4 +7,4 @@ argparse
openpyxl
# git+https://atc.bmwgroup.net/bitbucket/scm/opapm/keyrequestparser.git
XlsxWriter==3.0.9
xlrd==2.0.1
xlrd==2.0.1