Compare commits
5 Commits
554f96700c
...
a48b97607f
| Author | SHA1 | Date |
|---|---|---|
|
|
a48b97607f | |
|
|
634484c05c | |
|
|
22b702bf51 | |
|
|
a463e901ab | |
|
|
2b7594e3e9 |
|
|
@ -1,16 +1,20 @@
|
||||||
import json
|
import json
|
||||||
import logging
|
|
||||||
import yaml
|
import yaml
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import time
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import warnings
|
|
||||||
import os
|
import os
|
||||||
import dynatraceAPI
|
import dynatraceAPI
|
||||||
import boto3
|
import boto3
|
||||||
from dateutil.relativedelta import relativedelta
|
from dateutil.relativedelta import relativedelta
|
||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
from botocore.exceptions import NoCredentialsError
|
from botocore.exceptions import NoCredentialsError
|
||||||
|
from logger import setup_logger
|
||||||
|
|
||||||
|
|
||||||
|
logger = setup_logger()
|
||||||
|
os.environ["TZ"] = "Europe/Berlin"
|
||||||
|
time.tzset()
|
||||||
|
|
||||||
PATH_TO_CSVS = "/tmp"
|
PATH_TO_CSVS = "/tmp"
|
||||||
|
|
||||||
|
|
@ -58,7 +62,7 @@ def get_slo(
|
||||||
dt_token, dt_url, from_date, to_date, selector_var, selector_type, header_name
|
dt_token, dt_url, from_date, to_date, selector_var, selector_type, header_name
|
||||||
):
|
):
|
||||||
dt_client = dynatraceAPI.Dynatrace(
|
dt_client = dynatraceAPI.Dynatrace(
|
||||||
dt_url, dt_token, logging.Logger("ERROR"), None, None, 0, 2 * 1000
|
dt_url, dt_token, logger, None, None, 1, 1000
|
||||||
)
|
)
|
||||||
my_params_report = {
|
my_params_report = {
|
||||||
"pageSize": 25,
|
"pageSize": 25,
|
||||||
|
|
@ -104,6 +108,7 @@ def convert_date_input(value: str):
|
||||||
|
|
||||||
def check_inputs(category: str, from_date_as_str: str, to_date_as_str: str):
|
def check_inputs(category: str, from_date_as_str: str, to_date_as_str: str):
|
||||||
if category not in ["hourly", "daily", "weekly", "monthly"]:
|
if category not in ["hourly", "daily", "weekly", "monthly"]:
|
||||||
|
logger.error("category not correctly selected")
|
||||||
raise Exception("category not correctly selected")
|
raise Exception("category not correctly selected")
|
||||||
|
|
||||||
from_date = None
|
from_date = None
|
||||||
|
|
@ -113,13 +118,16 @@ def check_inputs(category: str, from_date_as_str: str, to_date_as_str: str):
|
||||||
if from_date_as_str:
|
if from_date_as_str:
|
||||||
from_date = convert_date_input(from_date_as_str)
|
from_date = convert_date_input(from_date_as_str)
|
||||||
if from_date > now:
|
if from_date > now:
|
||||||
|
logger.error("fromDate must be in the past or empty")
|
||||||
raise Exception("fromDate must be in the past or empty")
|
raise Exception("fromDate must be in the past or empty")
|
||||||
|
|
||||||
if to_date_as_str:
|
if to_date_as_str:
|
||||||
to_date = convert_date_input(to_date_as_str)
|
to_date = convert_date_input(to_date_as_str)
|
||||||
if not from_date:
|
if not from_date:
|
||||||
|
logger.error("toDate can only be used in combination with fromDate")
|
||||||
raise Exception("toDate can only be used in combination with fromDate")
|
raise Exception("toDate can only be used in combination with fromDate")
|
||||||
elif to_date <= from_date:
|
elif to_date <= from_date:
|
||||||
|
logger.error("toDate must be larger than fromDate")
|
||||||
raise Exception("toDate must be larger than fromDate")
|
raise Exception("toDate must be larger than fromDate")
|
||||||
|
|
||||||
if not to_date:
|
if not to_date:
|
||||||
|
|
@ -133,6 +141,7 @@ def check_inputs(category: str, from_date_as_str: str, to_date_as_str: str):
|
||||||
from_date, to_date = get_month_range(from_date)
|
from_date, to_date = get_month_range(from_date)
|
||||||
|
|
||||||
if to_date > now:
|
if to_date > now:
|
||||||
|
logger.error(f"toDate '{str(to_date)}' cannot be in the future")
|
||||||
raise Exception(f"toDate '{str(to_date)}' cannot be in the future")
|
raise Exception(f"toDate '{str(to_date)}' cannot be in the future")
|
||||||
|
|
||||||
return category, from_date, to_date, bool(from_date_as_str)
|
return category, from_date, to_date, bool(from_date_as_str)
|
||||||
|
|
@ -150,23 +159,20 @@ def get_one_slice(
|
||||||
):
|
):
|
||||||
df = pd.DataFrame()
|
df = pd.DataFrame()
|
||||||
for index, row in slices.iterrows():
|
for index, row in slices.iterrows():
|
||||||
num_probs = len(slices)
|
try:
|
||||||
percentage = str(round((100 * (index + 1)) / num_probs, 2)).split(".")
|
temp_df = get_slo(
|
||||||
print(
|
dt_token,
|
||||||
"{:0>4d} von {:0>4d} = {:0>3d}.{:0>2d} %".format(
|
dt_url,
|
||||||
index + 1, num_probs, int(percentage[0]), int(percentage[1])
|
row["startTimestampMs"],
|
||||||
),
|
row["endTimestampMs"],
|
||||||
end="\r",
|
selector_var,
|
||||||
)
|
selector_type,
|
||||||
temp_df = get_slo(
|
header_name,
|
||||||
dt_token,
|
)
|
||||||
dt_url,
|
except Exception as e:
|
||||||
row["startTimestampMs"],
|
logger.error(f'Error while calling Dynatrace API for {header_name} in {item}-{env_type}, {row["startTimestampMs"]} - {row["endTimestampMs"]}: {str(e)}')
|
||||||
row["endTimestampMs"],
|
temp_df = pd.DataFrame({'description': ['']})
|
||||||
selector_var,
|
|
||||||
selector_type,
|
|
||||||
header_name,
|
|
||||||
)
|
|
||||||
temp_df["startTimestampMs"] = row["startTimestampMs"]
|
temp_df["startTimestampMs"] = row["startTimestampMs"]
|
||||||
temp_df["endTimestampMs"] = row["endTimestampMs"]
|
temp_df["endTimestampMs"] = row["endTimestampMs"]
|
||||||
temp_df["HUB"] = item
|
temp_df["HUB"] = item
|
||||||
|
|
@ -179,15 +185,14 @@ def get_one_slice(
|
||||||
"_", expand=True
|
"_", expand=True
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"This error was encountered : {e}")
|
logger.error(f"This error was encountered for {header_name} in {item}-{env_type}: {e}")
|
||||||
print() # newline to remove \r from progress bar
|
|
||||||
return df
|
return df
|
||||||
|
|
||||||
|
|
||||||
def load_slo_parameter():
|
def load_slo_parameter():
|
||||||
mandatory_fields = ["hubs", "selector_type", "selector_var", "yearstart"]
|
mandatory_fields = ["hubs", "selector_type", "selector_var", "yearstart"]
|
||||||
all_yaml_configs = []
|
all_yaml_configs = []
|
||||||
print(f"loading SLOs from AWS SM")
|
logger.info(f"loading SLOs from AWS SM")
|
||||||
sm = boto3.client('secretsmanager')
|
sm = boto3.client('secretsmanager')
|
||||||
yaml_string = sm.get_secret_value(SecretId=os.environ['SLO_SECRET_ARN']).get('SecretString')
|
yaml_string = sm.get_secret_value(SecretId=os.environ['SLO_SECRET_ARN']).get('SecretString')
|
||||||
slo_doc = yaml.safe_load(yaml_string)
|
slo_doc = yaml.safe_load(yaml_string)
|
||||||
|
|
@ -195,7 +200,7 @@ def load_slo_parameter():
|
||||||
for header_name, configs in slo_doc.items():
|
for header_name, configs in slo_doc.items():
|
||||||
tmp_dict = {}
|
tmp_dict = {}
|
||||||
if not len(slo_doc[header_name]) == 15:
|
if not len(slo_doc[header_name]) == 15:
|
||||||
print(f"Slo Configuration {header_name} is broken")
|
logger.error(f"Slo Configuration {header_name} does not match the expected size")
|
||||||
continue
|
continue
|
||||||
for key, value in configs.items():
|
for key, value in configs.items():
|
||||||
tmp_dict.update({key: value})
|
tmp_dict.update({key: value})
|
||||||
|
|
@ -215,7 +220,7 @@ def load_slo_parameter():
|
||||||
[hub, selector_type, selector_var, year_start, header_name]
|
[hub, selector_type, selector_var, year_start, header_name]
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
print(f"Slo Configuration {header_name} is broken")
|
logger.error(f"Slo Configuration {header_name} is broken")
|
||||||
|
|
||||||
return all_yaml_configs
|
return all_yaml_configs
|
||||||
|
|
||||||
|
|
@ -224,7 +229,7 @@ def write_slo_to_csv(file_name: str, df: pd.DataFrame):
|
||||||
try:
|
try:
|
||||||
df = df[COLUMNS_IN_CSV]
|
df = df[COLUMNS_IN_CSV]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("Could not rearrange columns: " + str(e))
|
logger.error("Could not rearrange columns: " + str(e))
|
||||||
|
|
||||||
csvName = "".join([PATH_TO_CSVS, "/", file_name, ".csv"])
|
csvName = "".join([PATH_TO_CSVS, "/", file_name, ".csv"])
|
||||||
|
|
||||||
|
|
@ -249,20 +254,20 @@ def get_files_for_upload(root_dir, filetype):
|
||||||
def remove_files_from_tmp(files):
|
def remove_files_from_tmp(files):
|
||||||
for file in files:
|
for file in files:
|
||||||
os.remove(os.path.join(PATH_TO_CSVS, file))
|
os.remove(os.path.join(PATH_TO_CSVS, file))
|
||||||
print(f"cleared tmp-storage. removed: {files}")
|
logger.info(f"cleared tmp-storage. removed: {files}")
|
||||||
|
|
||||||
|
|
||||||
def upload_to_aws(files):
|
def upload_to_aws(files):
|
||||||
print(f"files to upload: {len(files)}")
|
logger.info(f"files to upload: {len(files)}")
|
||||||
s3 = boto3.client('s3')
|
s3 = boto3.client('s3')
|
||||||
for file in files:
|
for file in files:
|
||||||
try:
|
try:
|
||||||
s3.upload_file(PATH_TO_CSVS + "/" + file, os.environ['SOURCE_BUCKET'], file)
|
s3.upload_file(PATH_TO_CSVS + "/" + file, os.environ['SOURCE_BUCKET'], file)
|
||||||
print(f"Upload of file '{file}' successful")
|
logger.info(f"Upload of file '{file}' successful")
|
||||||
except FileNotFoundError:
|
except FileNotFoundError as e:
|
||||||
print("The file was not found")
|
logger.error(f"The file was not found: {str(e)}")
|
||||||
except NoCredentialsError:
|
except NoCredentialsError as e:
|
||||||
print("S3 Credentials not available")
|
logger.error(f"S3 Credentials not available: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
def get_dt_environments():
|
def get_dt_environments():
|
||||||
|
|
@ -276,7 +281,7 @@ def create_report_files(category: str, from_date: datetime.datetime, to_date: da
|
||||||
df = df[df["Touchpoint"].isin(["Vehicle", "Mobile"])]
|
df = df[df["Touchpoint"].isin(["Vehicle", "Mobile"])]
|
||||||
fileName = f"{category}/QM_Report_{'custom' if custom else 'cron'}_"
|
fileName = f"{category}/QM_Report_{'custom' if custom else 'cron'}_"
|
||||||
if custom:
|
if custom:
|
||||||
fileName += f"{from_date.strftime('%Y-%m-%dT%H:%M')}_{to_date.strftime('%Y-%m-%dT%H:%M')}"
|
fileName += f"{from_date.strftime('%Y-%m-%d_%H-%M')}_{to_date.strftime('%Y-%m-%d_%H-%M')}"
|
||||||
else:
|
else:
|
||||||
if category == "hourly":
|
if category == "hourly":
|
||||||
fileName += from_date.strftime('%Y-%m-%d')
|
fileName += from_date.strftime('%Y-%m-%d')
|
||||||
|
|
@ -288,62 +293,73 @@ def create_report_files(category: str, from_date: datetime.datetime, to_date: da
|
||||||
fileName += str(from_date.year) + "-" + str(from_date.month)
|
fileName += str(from_date.year) + "-" + str(from_date.month)
|
||||||
write_slo_to_csv(fileName, df)
|
write_slo_to_csv(fileName, df)
|
||||||
else:
|
else:
|
||||||
print("No files found")
|
logger.warning("No files found")
|
||||||
|
|
||||||
|
|
||||||
|
def publish_error_message(exception, event):
|
||||||
|
timestamp = datetime.datetime.now().isoformat()
|
||||||
|
message = {
|
||||||
|
'function': os.environ['AWS_LAMBDA_FUNCTION_NAME'],
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'event': event,
|
||||||
|
'error': str(exception)
|
||||||
|
}
|
||||||
|
message_json = json.dumps(message)
|
||||||
|
|
||||||
|
logger.error(f"publishing the following error via SNS: {message_json}")
|
||||||
|
sns = boto3.client('sns')
|
||||||
|
response = sns.publish(TopicArn=os.environ['ERROR_SNS_ARN'], Message=message_json)
|
||||||
|
logger.error("publish successful")
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
def lambda_handler(event, _):
|
def lambda_handler(event, _):
|
||||||
|
logger.info("invoked by event: " + str(event))
|
||||||
try:
|
try:
|
||||||
os.environ["TZ"] = "Europe/Berlin" # set new timezone
|
category, fromDate, toDate, is_custom_event = define_parameters(event)
|
||||||
time.tzset()
|
logger.info(f"running lambda on category '{category}': {str(fromDate)} - {str(toDate)}")
|
||||||
|
slo_configs = load_slo_parameter()
|
||||||
|
dt_environments = get_dt_environments()
|
||||||
|
|
||||||
|
final_df = pd.DataFrame()
|
||||||
|
|
||||||
|
for one_slo_config in slo_configs:
|
||||||
|
hub, selector_type, selector_var, yearstart, header_name = one_slo_config
|
||||||
|
logger.info(f"SLO: '{header_name}'")
|
||||||
|
for item, doc in dt_environments.items():
|
||||||
|
if item not in hub:
|
||||||
|
logger.info(f"{item} will be skipped since it is not in {hub}.")
|
||||||
|
continue
|
||||||
|
if doc["env-token"] == "":
|
||||||
|
logger.warning("token not found, skipping " + item)
|
||||||
|
continue
|
||||||
|
logger.info(f"Gather data for {item}...")
|
||||||
|
|
||||||
|
if category == "hourly":
|
||||||
|
slices = get_slices(fromDate, toDate, interval_hours=1)
|
||||||
|
elif category == "daily":
|
||||||
|
slices = get_slices(fromDate, interval_days=1)
|
||||||
|
elif category == "weekly":
|
||||||
|
slices = get_slices(fromDate, interval_weeks=1)
|
||||||
|
else: # monthly
|
||||||
|
slices = get_slices(fromDate, interval_months=1)
|
||||||
|
|
||||||
|
df = get_one_slice(
|
||||||
|
doc["name"],
|
||||||
|
doc["env-token"],
|
||||||
|
doc["env-url"],
|
||||||
|
slices,
|
||||||
|
selector_var,
|
||||||
|
selector_type,
|
||||||
|
header_name,
|
||||||
|
doc["type"]
|
||||||
|
)
|
||||||
|
final_df = pd.concat([final_df, df], ignore_index=True)
|
||||||
|
|
||||||
|
create_report_files(category, fromDate, toDate, final_df, is_custom_event)
|
||||||
|
files = get_files_for_upload(PATH_TO_CSVS, "csv")
|
||||||
|
upload_to_aws(files)
|
||||||
|
remove_files_from_tmp(files)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"This error was encountered : {e}")
|
publish_error_message(e, event)
|
||||||
|
|
||||||
category, fromDate, toDate, is_custom_event = define_parameters(event)
|
|
||||||
print("running lambda on category " + category)
|
|
||||||
print("fromDate: " + str(fromDate))
|
|
||||||
print("toDate: " + str(toDate))
|
|
||||||
|
|
||||||
slo_configs = load_slo_parameter()
|
|
||||||
dt_environments = get_dt_environments()
|
|
||||||
|
|
||||||
final_df = pd.DataFrame()
|
|
||||||
|
|
||||||
for one_slo_config in slo_configs:
|
|
||||||
hub, selector_type, selector_var, yearstart, header_name = one_slo_config
|
|
||||||
print(f"SLO: '{header_name}'")
|
|
||||||
for item, doc in dt_environments.items():
|
|
||||||
if item not in hub:
|
|
||||||
print(f"{item} will be skipped since it is not in {hub}.")
|
|
||||||
continue
|
|
||||||
if doc["env-token"] == "":
|
|
||||||
print("token not found, skipping " + item)
|
|
||||||
continue
|
|
||||||
print(f"Gather data for {item}...")
|
|
||||||
|
|
||||||
if category == "hourly":
|
|
||||||
slices = get_slices(fromDate, toDate, interval_hours=1)
|
|
||||||
elif category == "daily":
|
|
||||||
slices = get_slices(fromDate, interval_days=1)
|
|
||||||
elif category == "weekly":
|
|
||||||
slices = get_slices(fromDate, interval_weeks=1)
|
|
||||||
else: # monthly
|
|
||||||
slices = get_slices(fromDate, interval_months=1)
|
|
||||||
|
|
||||||
df = get_one_slice(
|
|
||||||
doc["name"],
|
|
||||||
doc["env-token"],
|
|
||||||
doc["env-url"],
|
|
||||||
slices,
|
|
||||||
selector_var,
|
|
||||||
selector_type,
|
|
||||||
header_name,
|
|
||||||
doc["type"]
|
|
||||||
)
|
|
||||||
final_df = pd.concat([final_df, df], ignore_index=True)
|
|
||||||
|
|
||||||
print("\n")
|
|
||||||
create_report_files(category, fromDate, toDate, final_df, is_custom_event)
|
|
||||||
files = get_files_for_upload(PATH_TO_CSVS, "csv")
|
|
||||||
upload_to_aws(files)
|
|
||||||
remove_files_from_tmp(files)
|
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,18 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logger():
|
||||||
|
logger = logging.getLogger()
|
||||||
|
logger.handlers = []
|
||||||
|
|
||||||
|
level = os.environ['LOG_LEVEL']
|
||||||
|
if not level:
|
||||||
|
level = "INFO"
|
||||||
|
logger.setLevel(level)
|
||||||
|
|
||||||
|
logger_handler = logging.StreamHandler()
|
||||||
|
logger_handler.setFormatter(logging.Formatter('[%(levelname)s] %(message)s'))
|
||||||
|
logger.addHandler(logger_handler)
|
||||||
|
|
||||||
|
return logger
|
||||||
|
|
@ -0,0 +1,30 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
echo "Executing create_pkg.sh..."
|
||||||
|
|
||||||
|
FILE=./src/requirements.txt
|
||||||
|
target_folder=layer/
|
||||||
|
|
||||||
|
cd $path_cwd
|
||||||
|
mkdir $target_folder
|
||||||
|
|
||||||
|
virtualenv -p $runtime env_$function_name
|
||||||
|
source ./env_$function_name/bin/activate
|
||||||
|
|
||||||
|
pwd
|
||||||
|
echo "Installing dependencies from ${FILE}"
|
||||||
|
if [ -f "$FILE" ]; then
|
||||||
|
pip install -r "$FILE"
|
||||||
|
else
|
||||||
|
echo "Error: requirement.txt does not exist!"
|
||||||
|
fi
|
||||||
|
|
||||||
|
source deactivate
|
||||||
|
|
||||||
|
echo "Creating deployment package..."
|
||||||
|
cp -r env_$function_name/lib/$runtime/site-packages/ ./$target_folder/python
|
||||||
|
|
||||||
|
echo "Removing virtual environment folder..."
|
||||||
|
rm -rf ./env_$function_name
|
||||||
|
|
||||||
|
echo "Finished script execution!"
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
def lambda_handler(event, context):
|
||||||
|
print(event)
|
||||||
|
print(context)
|
||||||
|
|
@ -6,6 +6,7 @@ import yaml
|
||||||
import re
|
import re
|
||||||
from logging import Logger
|
from logging import Logger
|
||||||
|
|
||||||
|
|
||||||
class Concatenator:
|
class Concatenator:
|
||||||
source_columns = ["startTimestampMs", "endTimestampMs", "HUB", "id", "name", "evaluatedPercentage", "status"]
|
source_columns = ["startTimestampMs", "endTimestampMs", "HUB", "id", "name", "evaluatedPercentage", "status"]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,18 @@
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
def setup_logger(level=logging.INFO):
|
def setup_logger():
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
logger.handlers = []
|
logger.handlers = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
level = os.environ['LOG_LEVEL']
|
||||||
|
if not level:
|
||||||
|
raise Exception("Log Level not found in ENVs - Default INFO used")
|
||||||
|
except Exception:
|
||||||
|
level = "INFO"
|
||||||
|
|
||||||
logger.setLevel(level)
|
logger.setLevel(level)
|
||||||
|
|
||||||
logger_handler = logging.StreamHandler()
|
logger_handler = logging.StreamHandler()
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import boto3
|
import boto3
|
||||||
|
import json
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from concatenator import Concatenator
|
from concatenator import Concatenator
|
||||||
|
|
@ -8,7 +9,6 @@ from helper import Helper
|
||||||
from logger import setup_logger
|
from logger import setup_logger
|
||||||
|
|
||||||
logger = setup_logger()
|
logger = setup_logger()
|
||||||
|
|
||||||
os.environ["TZ"] = "Europe/Berlin"
|
os.environ["TZ"] = "Europe/Berlin"
|
||||||
time.tzset()
|
time.tzset()
|
||||||
KNOWN_TYPES = ['hourly', 'daily', 'weekly', 'monthly']
|
KNOWN_TYPES = ['hourly', 'daily', 'weekly', 'monthly']
|
||||||
|
|
@ -44,6 +44,7 @@ def extract_event_details(event):
|
||||||
|
|
||||||
|
|
||||||
def get_s3_object_as_df(bucket, key) -> pd.DataFrame:
|
def get_s3_object_as_df(bucket, key) -> pd.DataFrame:
|
||||||
|
logger.info(f"fetching s3 file '{key}' from {bucket}")
|
||||||
s3 = boto3.client('s3')
|
s3 = boto3.client('s3')
|
||||||
|
|
||||||
csv_obj = s3.get_object(Bucket=bucket, Key=key)
|
csv_obj = s3.get_object(Bucket=bucket, Key=key)
|
||||||
|
|
@ -54,16 +55,19 @@ def get_s3_object_as_df(bucket, key) -> pd.DataFrame:
|
||||||
|
|
||||||
|
|
||||||
def get_target_df(bucket, prefix) -> tuple[pd.DataFrame, str]:
|
def get_target_df(bucket, prefix) -> tuple[pd.DataFrame, str]:
|
||||||
|
folder = "service_metrics/"
|
||||||
|
full_key_name = f"{folder}service_metrics_{prefix}.csv"
|
||||||
|
|
||||||
s3 = boto3.client('s3')
|
s3 = boto3.client('s3')
|
||||||
files_by_prefix = s3.list_objects_v2(
|
files_by_prefix = s3.list_objects_v2(
|
||||||
Bucket=bucket,
|
Bucket=bucket,
|
||||||
Prefix=(prefix + '/'))
|
Prefix=folder)
|
||||||
if files_by_prefix.get('KeyCount') > 0:
|
if files_by_prefix.get('KeyCount') > 0:
|
||||||
csv_files = [x for x in files_by_prefix.get('Contents', []) if x['Key'].endswith(".csv")]
|
if full_key_name in [x['Key'] for x in files_by_prefix.get('Contents', []) if 'Key' in x]:
|
||||||
if len(csv_files) > 0:
|
logger.info(f"Target csv file '{full_key_name}' already existing - using this one")
|
||||||
file_name = csv_files[0]['Key']
|
return get_s3_object_as_df(bucket, full_key_name), full_key_name
|
||||||
return get_s3_object_as_df(bucket, file_name), file_name
|
logger.warn(f"no target csv file existing - using empty file with name '{full_key_name}'")
|
||||||
return pd.DataFrame(), prefix + "/current.csv"
|
return pd.DataFrame(), full_key_name
|
||||||
|
|
||||||
|
|
||||||
def upload_to_aws(df, file_name):
|
def upload_to_aws(df, file_name):
|
||||||
|
|
@ -81,26 +85,36 @@ def upload_to_aws(df, file_name):
|
||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
|
|
||||||
|
def publish_error_message(error):
|
||||||
|
logger.error("publishing the following error via SNS: ", error)
|
||||||
|
sns = boto3.client('sns')
|
||||||
|
response = sns.publish(TopicArn=os.environ['ERROR_SNS_ARN'], Message=json.dumps(error))
|
||||||
|
logger.error("publish successful")
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
def lambda_handler(event, _):
|
def lambda_handler(event, _):
|
||||||
logger.info("invoked by event: " + str(event))
|
logger.info("invoked by event: " + str(event))
|
||||||
file, prefix, special = extract_event_details(event)
|
try:
|
||||||
|
file, prefix, special = extract_event_details(event)
|
||||||
|
if not prefix:
|
||||||
|
logger.warn("No prefix value detected but obligatory - ending invocation")
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
if not prefix:
|
target_df, file_name = get_target_df(os.environ['PREPARED_BUCKET'], prefix)
|
||||||
logger.warn("No prefix value detected but obligatory - ending invocation")
|
|
||||||
raise Exception()
|
|
||||||
|
|
||||||
target_df, file_name = get_target_df(os.environ['PREPARED_BUCKET'], prefix)
|
if special == "find_gaps":
|
||||||
|
logger.info(f"Manual invocation to find missing gaps in target '{prefix}'")
|
||||||
|
Helper(logger, target_df, prefix).find_and_log_gaps()
|
||||||
|
else:
|
||||||
|
source_df = get_s3_object_as_df(os.environ['SOURCE_BUCKET'], file)
|
||||||
|
logger.info(f"source: '{file}' with {len(source_df)} new rows")
|
||||||
|
logger.info(f"target: '{file_name}' with {len(target_df)} previous rows")
|
||||||
|
|
||||||
if special == "find_gaps":
|
updated_target = Concatenator(logger, target_df, source_df, prefix).get_combined_object()
|
||||||
logger.info(f"Manual invocation to find missing gaps in target '{prefix}'")
|
logger.info(f"updated target includes {len(updated_target)} rows")
|
||||||
Helper(logger, target_df, prefix).find_and_log_gaps()
|
|
||||||
else:
|
|
||||||
source_df = get_s3_object_as_df(os.environ['SOURCE_BUCKET'], file)
|
|
||||||
logger.info(f"source: '{file}' with {len(source_df)} new rows")
|
|
||||||
logger.info(f"target: '{file_name}' with {len(target_df)} previous rows")
|
|
||||||
|
|
||||||
updated_target = Concatenator(logger, target_df, source_df, prefix).get_combined_object()
|
upload_to_aws(updated_target, file_name)
|
||||||
logger.info(f"updated target includes {len(updated_target)} rows")
|
except Exception as e:
|
||||||
|
publish_error_message(e)
|
||||||
upload_to_aws(updated_target, file_name)
|
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,10 @@ locals {
|
||||||
s3PreparedLayerArn = "arn:aws:s3:::cdh-vehicle-qm-report-pre-r1qu"
|
s3PreparedLayerArn = "arn:aws:s3:::cdh-vehicle-qm-report-pre-r1qu"
|
||||||
kmsPreparedKey = "arn:aws:kms:eu-west-1:468747793086:key/a42e3ec9-4a32-45ef-819b-1c72a0886511"
|
kmsPreparedKey = "arn:aws:kms:eu-west-1:468747793086:key/a42e3ec9-4a32-45ef-819b-1c72a0886511"
|
||||||
snsPreparedTopic = "arn:aws:sns:eu-west-1:403259800741:cdh-vehicle-qm-report-pre-r1qu"
|
snsPreparedTopic = "arn:aws:sns:eu-west-1:403259800741:cdh-vehicle-qm-report-pre-r1qu"
|
||||||
|
snsErrorCatcher = "errors-in-lambdas"
|
||||||
|
snsAlertingName = "internal-notification-sns"
|
||||||
|
lambdaNotififcationName = "dt-cdh-lambda-notification"
|
||||||
|
logLevel = "INFO"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ module "dt-cdh-lambda" {
|
||||||
tags = var.tags
|
tags = var.tags
|
||||||
name = local.env.lambdaName
|
name = local.env.lambdaName
|
||||||
timeout = 900
|
timeout = 900
|
||||||
|
layersByArn = ["arn:aws:lambda:eu-west-1:336392948345:layer:AWSSDKPandas-Python39:7"]
|
||||||
additional_permissions = [
|
additional_permissions = [
|
||||||
{
|
{
|
||||||
actions = ["S3:*"]
|
actions = ["S3:*"]
|
||||||
|
|
@ -17,12 +18,17 @@ module "dt-cdh-lambda" {
|
||||||
}, {
|
}, {
|
||||||
actions = ["secretsmanager:GetSecretValue", "secretsmanager:DescribeSecret"]
|
actions = ["secretsmanager:GetSecretValue", "secretsmanager:DescribeSecret"]
|
||||||
resources = [module.lambda_secret_dt.secret_arn, module.lambda_secret_slo.secret_arn]
|
resources = [module.lambda_secret_dt.secret_arn, module.lambda_secret_slo.secret_arn]
|
||||||
|
}, {
|
||||||
|
actions = ["sns:Publish"]
|
||||||
|
resources = [module.error_catcher_sns.arn]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
environment_vars = {
|
environment_vars = {
|
||||||
SOURCE_BUCKET = local.env.s3SourceLayer
|
SOURCE_BUCKET = local.env.s3SourceLayer
|
||||||
ENV_SECRET_ARN = module.lambda_secret_dt.secret_arn
|
ENV_SECRET_ARN = module.lambda_secret_dt.secret_arn
|
||||||
SLO_SECRET_ARN = module.lambda_secret_slo.secret_arn
|
SLO_SECRET_ARN = module.lambda_secret_slo.secret_arn
|
||||||
|
ERROR_SNS_ARN = module.error_catcher_sns.arn
|
||||||
|
LOG_LEVEL = local.env.logLevel
|
||||||
}
|
}
|
||||||
description = ""
|
description = ""
|
||||||
handler = "createReport.lambda_handler"
|
handler = "createReport.lambda_handler"
|
||||||
|
|
@ -86,6 +92,7 @@ module "dt-cdh-lambda-prepared" {
|
||||||
tags = var.tags
|
tags = var.tags
|
||||||
name = local.env.lambdaPreparedName
|
name = local.env.lambdaPreparedName
|
||||||
timeout = 900
|
timeout = 900
|
||||||
|
layersByArn = ["arn:aws:lambda:eu-west-1:336392948345:layer:AWSSDKPandas-Python39:7"]
|
||||||
reserved_concurrent_executions = 1
|
reserved_concurrent_executions = 1
|
||||||
additional_permissions = [
|
additional_permissions = [
|
||||||
{
|
{
|
||||||
|
|
@ -103,12 +110,17 @@ module "dt-cdh-lambda-prepared" {
|
||||||
}, {
|
}, {
|
||||||
actions = ["sns:Subscribe"]
|
actions = ["sns:Subscribe"]
|
||||||
resources = [local.env.snsSourceTopic]
|
resources = [local.env.snsSourceTopic]
|
||||||
|
}, {
|
||||||
|
actions = ["sns:Publish"]
|
||||||
|
resources = [module.error_catcher_sns.arn]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
environment_vars = {
|
environment_vars = {
|
||||||
SOURCE_BUCKET = local.env.s3SourceLayer
|
SOURCE_BUCKET = local.env.s3SourceLayer
|
||||||
PREPARED_BUCKET = local.env.s3PreparedLayer
|
PREPARED_BUCKET = local.env.s3PreparedLayer
|
||||||
SLO_SECRET_ARN = module.lambda_secret_slo.secret_arn
|
SLO_SECRET_ARN = module.lambda_secret_slo.secret_arn
|
||||||
|
ERROR_SNS_ARN = module.error_catcher_sns.arn
|
||||||
|
LOG_LEVEL = local.env.logLevel
|
||||||
}
|
}
|
||||||
description = ""
|
description = ""
|
||||||
handler = "reformatData.lambda_handler"
|
handler = "reformatData.lambda_handler"
|
||||||
|
|
@ -120,6 +132,68 @@ module "dt-cdh-lambda-prepared" {
|
||||||
topic = local.env.snsSourceTopic
|
topic = local.env.snsSourceTopic
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module "error_catcher_sns" {
|
||||||
|
source = "../../../modules/sns"
|
||||||
|
region = var.region
|
||||||
|
environment = var.environment
|
||||||
|
project = var.project
|
||||||
|
main_module = var.main_module
|
||||||
|
tags = var.tags
|
||||||
|
name = local.env.snsErrorCatcher
|
||||||
|
kms_id = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
module "dt-cdh-lambda-notification" {
|
||||||
|
source = "../../../modules/lambda"
|
||||||
|
region = var.region
|
||||||
|
environment = var.environment
|
||||||
|
project = var.project
|
||||||
|
main_module = var.main_module
|
||||||
|
tags = var.tags
|
||||||
|
name = local.env.lambdaNotififcationName
|
||||||
|
timeout = 900
|
||||||
|
reserved_concurrent_executions = 1
|
||||||
|
additional_permissions = [
|
||||||
|
{
|
||||||
|
actions = ["sns:Publish"]
|
||||||
|
resources = [module.notification_sns.arn]
|
||||||
|
}, {
|
||||||
|
actions = ["sns:Subscribe"]
|
||||||
|
resources = [module.error_catcher_sns.arn]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
environment_vars = {
|
||||||
|
LOG_LEVEL = local.env.logLevel
|
||||||
|
SNS_ARN = module.notification_sns.arn
|
||||||
|
}
|
||||||
|
description = ""
|
||||||
|
handler = "notifyHandler.lambda_handler"
|
||||||
|
memory = 2048
|
||||||
|
path_to_function = "../../apps/cdh-maas/main/lambda_notification"
|
||||||
|
runtime = "python3.9"
|
||||||
|
snsTrigger = {
|
||||||
|
source = {
|
||||||
|
topic = module.error_catcher_sns.arn
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module "notification_sns" {
|
||||||
|
source = "../../../modules/sns"
|
||||||
|
region = var.region
|
||||||
|
environment = var.environment
|
||||||
|
project = var.project
|
||||||
|
main_module = var.main_module
|
||||||
|
tags = var.tags
|
||||||
|
name = local.env.snsAlertingName
|
||||||
|
kms_id = ""
|
||||||
|
subscriptions = {
|
||||||
|
email = {
|
||||||
|
endpoint = "peter.oehmichen@partner.bmw.de"
|
||||||
|
protocol = "email"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,10 @@
|
||||||
resource "null_resource" "pip_install" {
|
resource "null_resource" "install_dependencies" {
|
||||||
triggers = {
|
triggers = {
|
||||||
# shell_hash = "${sha256(file("${path.module}/${var.path_to_function}/src/requirements.txt"))}"
|
# shell_hash = "${sha256(file("${path.module}/${var.path_to_function}/src/requirements.txt"))}"
|
||||||
always_run = timestamp()
|
always_run = timestamp()
|
||||||
}
|
}
|
||||||
provisioner "local-exec" {
|
provisioner "local-exec" {
|
||||||
command = "bash ${path.module}/${var.path_to_function}/create_pkg.sh"
|
command = "bash ${path.module}/${var.path_to_function}/create_pkg.sh"
|
||||||
|
|
||||||
environment = {
|
environment = {
|
||||||
function_name = module.lambda_label.id
|
function_name = module.lambda_label.id
|
||||||
runtime = var.runtime
|
runtime = var.runtime
|
||||||
|
|
@ -14,39 +13,39 @@ resource "null_resource" "pip_install" {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
data "archive_file" "layer" {
|
data "archive_file" "dependencies" {
|
||||||
type = "zip"
|
type = "zip"
|
||||||
source_dir = "${path.module}/${var.path_to_function}/layer"
|
source_dir = "${path.module}/${var.path_to_function}/layer"
|
||||||
output_path = "${path.module}/${var.path_to_function}/layer.zip"
|
output_path = "${path.module}/${var.path_to_function}/layer.zip"
|
||||||
depends_on = [null_resource.pip_install]
|
depends_on = [null_resource.install_dependencies]
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "aws_lambda_layer_version" "layer" {
|
resource "aws_lambda_layer_version" "dependencies" {
|
||||||
layer_name = "dependencies"
|
layer_name = "dependencies"
|
||||||
filename = data.archive_file.layer.output_path
|
filename = data.archive_file.dependencies.output_path
|
||||||
source_code_hash = data.archive_file.layer.output_base64sha256
|
source_code_hash = data.archive_file.dependencies.output_base64sha256
|
||||||
compatible_runtimes = ["python3.9"]
|
compatible_runtimes = ["python3.9"]
|
||||||
}
|
}
|
||||||
|
|
||||||
data "archive_file" "code" {
|
data "archive_file" "lambda_code" {
|
||||||
type = "zip"
|
type = "zip"
|
||||||
source_dir = "${path.module}/${var.path_to_function}/src"
|
source_dir = "${path.module}/${var.path_to_function}/src"
|
||||||
output_path = "${path.module}/${var.path_to_function}/lambda.zip"
|
output_path = "${path.module}/${var.path_to_function}/lambda.zip"
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "aws_lambda_function" "this" {
|
resource "aws_lambda_function" "this" {
|
||||||
filename = data.archive_file.code.output_path
|
filename = data.archive_file.lambda_code.output_path
|
||||||
function_name = module.lambda_label.id
|
function_name = module.lambda_label.id
|
||||||
description = var.description
|
description = var.description
|
||||||
role = aws_iam_role.this.arn
|
role = aws_iam_role.this.arn
|
||||||
handler = var.handler
|
handler = var.handler
|
||||||
source_code_hash = data.archive_file.code.output_base64sha256
|
source_code_hash = data.archive_file.lambda_code.output_base64sha256
|
||||||
runtime = var.runtime
|
runtime = var.runtime
|
||||||
memory_size = var.memory
|
memory_size = var.memory
|
||||||
timeout = var.timeout
|
timeout = var.timeout
|
||||||
reserved_concurrent_executions = var.reserved_concurrent_executions
|
reserved_concurrent_executions = var.reserved_concurrent_executions
|
||||||
tags = module.lambda_label.tags
|
tags = module.lambda_label.tags
|
||||||
layers = [aws_lambda_layer_version.layer.arn, "arn:aws:lambda:eu-west-1:336392948345:layer:AWSSDKPandas-Python39:7"]
|
layers = flatten([aws_lambda_layer_version.dependencies.arn, var.layersByArn])
|
||||||
tracing_config {
|
tracing_config {
|
||||||
mode = "PassThrough"
|
mode = "PassThrough"
|
||||||
}
|
}
|
||||||
|
|
@ -60,6 +59,7 @@ resource "aws_cloudwatch_event_rule" "this" {
|
||||||
name = "${module.lambda_label.id}_${each.key}"
|
name = "${module.lambda_label.id}_${each.key}"
|
||||||
description = each.value.description
|
description = each.value.description
|
||||||
schedule_expression = each.value.schedule
|
schedule_expression = each.value.schedule
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "aws_cloudwatch_event_target" "this" {
|
resource "aws_cloudwatch_event_target" "this" {
|
||||||
|
|
@ -79,18 +79,25 @@ resource "aws_lambda_permission" "this" {
|
||||||
source_arn = aws_cloudwatch_event_rule.this[each.key].arn
|
source_arn = aws_cloudwatch_event_rule.this[each.key].arn
|
||||||
}
|
}
|
||||||
|
|
||||||
#resource "aws_s3_bucket_notification" "this" {
|
|
||||||
# for_each = var.s3Trigger
|
|
||||||
# bucket = each.value.bucket
|
|
||||||
# lambda_function {
|
|
||||||
# lambda_function_arn = aws_lambda_function.this.arn
|
|
||||||
# events = ["s3:ObjectCreated:*"]
|
|
||||||
# }
|
|
||||||
#}
|
|
||||||
|
|
||||||
resource "aws_sns_topic_subscription" "invoke_with_sns" {
|
resource "aws_sns_topic_subscription" "invoke_with_sns" {
|
||||||
for_each = var.snsTrigger
|
for_each = var.snsTrigger
|
||||||
topic_arn = each.value.topic
|
topic_arn = each.value.topic
|
||||||
protocol = "lambda"
|
protocol = "lambda"
|
||||||
endpoint = aws_lambda_function.this.arn
|
endpoint = aws_lambda_function.this.arn
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resource "aws_cloudwatch_log_subscription_filter" "this" {
|
||||||
|
for_each = var.logTrigger
|
||||||
|
destination_arn = aws_lambda_function.this.arn
|
||||||
|
filter_pattern = each.value.filter_pattern
|
||||||
|
log_group_name = each.value.log_group
|
||||||
|
name = each.key
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_lambda_permission" "cw_invocation" {
|
||||||
|
for_each = var.logTrigger
|
||||||
|
statement_id = "AllowExecutionFromCloudWatch${each.key}"
|
||||||
|
action = "lambda:InvokeFunction"
|
||||||
|
function_name = aws_lambda_function.this.function_name
|
||||||
|
principal = "logs.amazonaws.com"
|
||||||
|
}
|
||||||
|
|
@ -26,6 +26,11 @@ variable "timeout" {
|
||||||
type = number
|
type = number
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "layersByArn" {
|
||||||
|
type = list(string)
|
||||||
|
default = []
|
||||||
|
}
|
||||||
|
|
||||||
variable "environment_vars" {
|
variable "environment_vars" {
|
||||||
type = map(string)
|
type = map(string)
|
||||||
}
|
}
|
||||||
|
|
@ -42,6 +47,9 @@ variable "snsTrigger" {
|
||||||
default = {}
|
default = {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "logTrigger" {
|
||||||
|
default = {}
|
||||||
|
}
|
||||||
variable "reserved_concurrent_executions" {
|
variable "reserved_concurrent_executions" {
|
||||||
type = number
|
type = number
|
||||||
default = -1
|
default = -1
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,11 @@
|
||||||
|
module "sns_label" {
|
||||||
|
source = "../label"
|
||||||
|
environment = var.environment
|
||||||
|
project = var.project
|
||||||
|
main_module = var.main_module
|
||||||
|
region = var.region
|
||||||
|
tags = var.tags
|
||||||
|
name = var.name
|
||||||
|
attributes = ["sns"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
@ -0,0 +1,54 @@
|
||||||
|
resource "aws_sns_topic" "this" {
|
||||||
|
name = module.sns_label.id
|
||||||
|
kms_master_key_id = var.kms_id
|
||||||
|
tags = module.sns_label.tags
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_sns_topic_subscription" "subscription" {
|
||||||
|
for_each = var.subscriptions
|
||||||
|
endpoint = each.value.endpoint
|
||||||
|
protocol = each.value.protocol
|
||||||
|
topic_arn = aws_sns_topic.this.arn
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_sns_topic_policy" "default" {
|
||||||
|
arn = aws_sns_topic.this.arn
|
||||||
|
policy = data.aws_iam_policy_document.this.json
|
||||||
|
}
|
||||||
|
|
||||||
|
data "aws_iam_policy_document" "this" {
|
||||||
|
policy_id = module.sns_label.id
|
||||||
|
|
||||||
|
statement {
|
||||||
|
actions = [
|
||||||
|
"SNS:Subscribe",
|
||||||
|
"SNS:SetTopicAttributes",
|
||||||
|
"SNS:RemovePermission",
|
||||||
|
"SNS:Receive",
|
||||||
|
"SNS:Publish",
|
||||||
|
"SNS:ListSubscriptionsByTopic",
|
||||||
|
"SNS:GetTopicAttributes",
|
||||||
|
"SNS:DeleteTopic",
|
||||||
|
"SNS:AddPermission",
|
||||||
|
]
|
||||||
|
|
||||||
|
condition {
|
||||||
|
test = "StringEquals"
|
||||||
|
variable = "AWS:SourceOwner"
|
||||||
|
values = var.allowed_accounts
|
||||||
|
}
|
||||||
|
|
||||||
|
effect = "Allow"
|
||||||
|
|
||||||
|
principals {
|
||||||
|
type = "AWS"
|
||||||
|
identifiers = ["*"]
|
||||||
|
}
|
||||||
|
|
||||||
|
resources = [
|
||||||
|
aws_sns_topic.this.arn,
|
||||||
|
]
|
||||||
|
|
||||||
|
sid = module.sns_label.id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
output "arn" {
|
||||||
|
value = aws_sns_topic.this.arn
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,30 @@
|
||||||
|
variable "project" {
|
||||||
|
description = "Project whom the stack belongs to"
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "environment" {
|
||||||
|
description = "Deployment environment/stage (e.g. test,e2e, prod or int)"
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "region" {
|
||||||
|
description = "AWS Region in which the Terraform backend components shall be deployed"
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "main_module" {
|
||||||
|
description = "Functional resource to deploy"
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "label_delimiter" {
|
||||||
|
description = "Delimiter for naming all resources"
|
||||||
|
type = string
|
||||||
|
default = "-"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "tags" {
|
||||||
|
description = "A mapping of tags to assign to the resource"
|
||||||
|
type = map(string)
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,17 @@
|
||||||
|
variable "kms_id" {
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "name" {
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "subscriptions" {
|
||||||
|
default = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "allowed_accounts" {
|
||||||
|
type = list(string)
|
||||||
|
default = []
|
||||||
|
}
|
||||||
|
|
||||||
Loading…
Reference in New Issue