Skip to content

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
nine5two7 committed Oct 30, 2023
1 parent fbac793 commit 9eda892
Show file tree
Hide file tree
Showing 2 changed files with 61 additions and 57 deletions.
46 changes: 24 additions & 22 deletions aws/logs_monitoring/parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,14 +279,14 @@ def find_cloudwatch_source(log_group):
return "rds"

if log_group.startswith(
(
# default location for rest api execution logs
"api-gateway", # e.g. Api-Gateway-Execution-Logs_xxxxxx/dev
# default location set by serverless framework for rest api access logs
"/aws/api-gateway", # e.g. /aws/api-gateway/my-project
# default location set by serverless framework for http api logs
"/aws/http-api", # e.g. /aws/http-api/my-project
)
(
# default location for rest api execution logs
"api-gateway", # e.g. Api-Gateway-Execution-Logs_xxxxxx/dev
# default location set by serverless framework for rest api access logs
"/aws/api-gateway", # e.g. /aws/api-gateway/my-project
# default location set by serverless framework for http api logs
"/aws/http-api", # e.g. /aws/http-api/my-project
)
):
return "apigateway"

Expand Down Expand Up @@ -410,7 +410,7 @@ def parse_service_arn(source, key, bucket, context):
# If there is a prefix on the S3 bucket, remove the prefix before splitting the key
if idsplit[0] != "AWSLogs":
try:
idsplit = idsplit[idsplit.index("AWSLogs"):]
idsplit = idsplit[idsplit.index("AWSLogs") :]
keysplit = "/".join(idsplit).split("_")
except ValueError:
logger.debug("Invalid S3 key, doesn't contain AWSLogs")
Expand Down Expand Up @@ -479,7 +479,7 @@ def get_step_machine_arn(message):
def awslogs_handler(event, context, metadata):
# Get logs
with gzip.GzipFile(
fileobj=BytesIO(base64.b64decode(event["awslogs"]["data"]))
fileobj=BytesIO(base64.b64decode(event["awslogs"]["data"]))
) as decompress_stream:
# Reading line by line avoid a bug where gzip would take a very long
# time (>5min) for file around 60MB gzipped
Expand Down Expand Up @@ -534,11 +534,13 @@ def awslogs_handler(event, context, metadata):
logger.debug("Unable to set verified-access log host: %s" % e)

if metadata[DD_SOURCE] == "stepfunction" and logs["logStream"].startswith(
"states/"
"states/"
):
state_machine_arn = ""
try:
state_machine_arn = get_state_machine_arn(json.loads(logs["logEvents"][0]["message"]))
state_machine_arn = get_state_machine_arn(
json.loads(logs["logEvents"][0]["message"])
)
if state_machine_arn: # not empty
metadata[DD_HOST] = state_machine_arn
except Exception as e:
Expand All @@ -554,8 +556,8 @@ def awslogs_handler(event, context, metadata):
",".join(formatted_stepfunctions_tags)
if not metadata[DD_CUSTOM_TAGS]
else metadata[DD_CUSTOM_TAGS]
+ ","
+ ",".join(formatted_stepfunctions_tags)
+ ","
+ ",".join(formatted_stepfunctions_tags)
)

# When parsing rds logs, use the cloudwatch log group name to derive the
Expand All @@ -565,7 +567,7 @@ def awslogs_handler(event, context, metadata):
if match is not None:
metadata[DD_HOST] = match.group("host")
metadata[DD_CUSTOM_TAGS] = (
metadata[DD_CUSTOM_TAGS] + ",logname:" + match.group("name")
metadata[DD_CUSTOM_TAGS] + ",logname:" + match.group("name")
)

# For Lambda logs we want to extract the function name,
Expand All @@ -586,8 +588,8 @@ def awslogs_handler(event, context, metadata):
aws_attributes = merge_dicts(aws_attributes, arn_attributes)

env_tag_exists = (
metadata[DD_CUSTOM_TAGS].startswith("env:")
or ",env:" in metadata[DD_CUSTOM_TAGS]
metadata[DD_CUSTOM_TAGS].startswith("env:")
or ",env:" in metadata[DD_CUSTOM_TAGS]
)
# If there is no env specified, default to env:none
if not env_tag_exists:
Expand Down Expand Up @@ -707,14 +709,14 @@ def parse_aws_waf_logs(event):

# Iterate through array of non-terminating rules and nest each under its own id
if "nonTerminatingMatchingRules" in rule_group and isinstance(
rule_group["nonTerminatingMatchingRules"], list
rule_group["nonTerminatingMatchingRules"], list
):
non_terminating_rules = rule_group.pop(
"nonTerminatingMatchingRules", None
)
if (
"nonTerminatingMatchingRules"
not in message["ruleGroupList"][group_id]
"nonTerminatingMatchingRules"
not in message["ruleGroupList"][group_id]
):
message["ruleGroupList"][group_id][
"nonTerminatingMatchingRules"
Expand All @@ -725,7 +727,7 @@ def parse_aws_waf_logs(event):

# Iterate through array of excluded rules and nest each under its own id
if "excludedRules" in rule_group and isinstance(
rule_group["excludedRules"], list
rule_group["excludedRules"], list
):
excluded_rules = rule_group.pop("excludedRules", None)
if "excludedRules" not in message["ruleGroupList"][group_id]:
Expand Down Expand Up @@ -775,7 +777,7 @@ def separate_security_hub_findings(event):
This prevents having an unparsable array of objects in the final log.
"""
if event.get(DD_SOURCE) != "securityhub" or not event.get("detail", {}).get(
"findings"
"findings"
):
return None
events = []
Expand Down
72 changes: 37 additions & 35 deletions aws/logs_monitoring/tests/test_parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -792,13 +792,13 @@ class TestAWSLogsHandler(unittest.TestCase):
@patch("base_tags_cache.send_forwarder_internal_metrics")
@patch("parsing.CloudwatchLogGroupTagsCache.get_cache_from_s3")
def test_awslogs_handler_rds_postgresql(
self,
mock_get_s3_cache,
mock_forward_metrics,
mock_write_cache,
mock_acquire_lock,
mock_release_lock,
mock_cache_get,
self,
mock_get_s3_cache,
mock_forward_metrics,
mock_write_cache,
mock_acquire_lock,
mock_release_lock,
mock_cache_get,
):
os.environ["DD_FETCH_LAMBDA_TAGS"] = "True"
os.environ["DD_FETCH_LOG_GROUP_TAGS"] = "True"
Expand Down Expand Up @@ -849,7 +849,7 @@ def test_awslogs_handler_rds_postgresql(
},
"id": "31953106606966983378809025079804211143289615424298221568",
"message": "2021-01-02 03:04:05 UTC::@:[5306]:LOG: database system is ready "
"to accept connections",
"to accept connections",
"timestamp": 1609556645000,
}
],
Expand All @@ -873,14 +873,14 @@ def test_awslogs_handler_rds_postgresql(
@patch("base_tags_cache.send_forwarder_internal_metrics")
@patch("parsing.StepFunctionsTagsCache.get_cache_from_s3")
def test_awslogs_handler_step_functions_tags_added_properly(
self,
mock_get_s3_cache,
mock_forward_metrics,
mock_write_cache,
mock_acquire_lock,
mock_release_lock,
mock_step_functions_cache_get,
mock_cw_log_group_cache_get,
self,
mock_get_s3_cache,
mock_forward_metrics,
mock_write_cache,
mock_acquire_lock,
mock_release_lock,
mock_step_functions_cache_get,
mock_cw_log_group_cache_get,
):
os.environ["DD_FETCH_LAMBDA_TAGS"] = "True"
os.environ["DD_FETCH_LOG_GROUP_TAGS"] = "True"
Expand Down Expand Up @@ -969,31 +969,33 @@ def test_get_service_from_tags_default_to_source(self):

class TestParsingStepFunctionLogs(unittest.TestCase):
def test_get_state_machine_arn(self):
invalid_sf_log_message = {
"no_execution_arn": "xxxx/yyy"

}
invalid_sf_log_message = {"no_execution_arn": "xxxx/yyy"}
self.assertEqual(get_state_machine_arn(invalid_sf_log_message), "")

normal_sf_log_message = {
"execution_arn": "arn:aws:states:sa-east-1:425362996713:express:my-Various-States:7f653fda-c79a-430b-91e2-3f97eb87cabb:862e5d40-a457-4ca2-a3c1-78485bd94d3f"

"execution_arn": "arn:aws:states:sa-east-1:425362996713:express:my-Various-States:7f653fda-c79a-430b-91e2-3f97eb87cabb:862e5d40-a457-4ca2-a3c1-78485bd94d3f"
}
self.assertEqual(get_state_machine_arn(normal_sf_log_message),
"arn:aws:states:sa-east-1:425362996713:stateMachine:my-Various-States")

self.assertEqual(
get_state_machine_arn(normal_sf_log_message),
"arn:aws:states:sa-east-1:425362996713:stateMachine:my-Various-States",
)

forward_slash_sf_log_message = {
"execution_arn": "arn:aws:states:sa-east-1:425362996713:express:my-Various-States/7f653fda-c79a-430b-91e2-3f97eb87cabb:862e5d40-a457-4ca2-a3c1-78485bd94d3f"
"execution_arn": "arn:aws:states:sa-east-1:425362996713:express:my-Various-States/7f653fda-c79a-430b-91e2-3f97eb87cabb:862e5d40-a457-4ca2-a3c1-78485bd94d3f"
}
self.assertEqual(get_state_machine_arn(forward_slash_sf_log_message),
"arn:aws:states:sa-east-1:425362996713:stateMachine:my-Various-States")

self.assertEqual(
get_state_machine_arn(forward_slash_sf_log_message),
"arn:aws:states:sa-east-1:425362996713:stateMachine:my-Various-States",
)

back_slash_sf_log_message = {
"execution_arn": "arn:aws:states:sa-east-1:425362996713:express:my-Various-States\\7f653fda-c79a-430b-91e2-3f97eb87cabb:862e5d40-a457-4ca2-a3c1-78485bd94d3f"
}
self.assertEqual(get_state_machine_arn(back_slash_sf_log_message),
"arn:aws:states:sa-east-1:425362996713:stateMachine:my-Various-States")

"execution_arn": "arn:aws:states:sa-east-1:425362996713:express:my-Various-States\\7f653fda-c79a-430b-91e2-3f97eb87cabb:862e5d40-a457-4ca2-a3c1-78485bd94d3f"
}
self.assertEqual(
get_state_machine_arn(back_slash_sf_log_message),
"arn:aws:states:sa-east-1:425362996713:stateMachine:my-Various-States",
)


if __name__ == "__main__":
unittest.main()

0 comments on commit 9eda892

Please sign in to comment.