Skip to content

Commit

Permalink
refactor(s3): Move all cache files to the cache/ folder (#833)
Browse files Browse the repository at this point in the history
* ci: Refine variable for enabling get resources tags

Signed-off-by: Vincent Boutour <[email protected]>

* chore: Using a common dir for cache stuff

Signed-off-by: Vincent Boutour <[email protected]>

* chore: Fixing the failed_events folder prefix

Signed-off-by: Vincent Boutour <[email protected]>

---------

Signed-off-by: Vincent Boutour <[email protected]>
  • Loading branch information
ViBiOh authored Aug 20, 2024
1 parent 94fda6f commit ef89890
Show file tree
Hide file tree
Showing 8 changed files with 68 additions and 39 deletions.
22 changes: 19 additions & 3 deletions .github/workflows/integration_test.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
---
name: Integration tests

permissions:
actions: none
checks: none
contents: read
deployments: none
issues: none
packages: none
pages: none
pull-requests: none
repository-projects: none
security-events: none

on: [pull_request]

jobs:
Expand All @@ -9,10 +22,13 @@ jobs:
strategy:
max-parallel: 4
matrix:
python-version: ['3.10', '3.11']
python-version: ["3.10", "3.11"]
steps:
- name: Checkout source
uses: actions/checkout@v3
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0

- name: Run integration tests
run: |
Expand Down
18 changes: 11 additions & 7 deletions aws/logs_monitoring/caching/base_tags_cache.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@
import os
import logging
import json
from time import time
import logging
import os
from random import randint
from time import time

import boto3
from botocore.exceptions import ClientError

from caching.common import get_last_modified_time
from settings import (
DD_S3_BUCKET_NAME,
DD_TAGS_CACHE_TTL_SECONDS,
DD_S3_CACHE_DIRNAME,
DD_S3_CACHE_LOCK_TTL_SECONDS,
DD_TAGS_CACHE_TTL_SECONDS,
)
from caching.common import get_last_modified_time
from telemetry import send_forwarder_internal_metrics

JITTER_MIN = 1
Expand All @@ -26,6 +29,7 @@ def __init__(
cache_lock_filename,
tags_ttl_seconds=DD_TAGS_CACHE_TTL_SECONDS,
):
self.cache_dirname = DD_S3_CACHE_DIRNAME
self.tags_ttl_seconds = tags_ttl_seconds
self.tags_by_id = {}
self.last_tags_fetch_time = 0
Expand All @@ -43,10 +47,10 @@ def get_resources_paginator(self):
return self.resource_tagging_client.get_paginator("get_resources")

def get_cache_name_with_prefix(self):
return f"{self.cache_prefix}_{self.cache_filename}"
return f"{self.cache_dirname}/{self.cache_prefix}_{self.cache_filename}"

def get_cache_lock_with_prefix(self):
return f"{self.cache_prefix}_{self.cache_lock_filename}"
return f"{self.cache_dirname}/{self.cache_prefix}_{self.cache_lock_filename}"

def write_cache_to_s3(self, data):
"""Writes tags cache to s3"""
Expand Down
4 changes: 3 additions & 1 deletion aws/logs_monitoring/caching/cloudwatch_log_group_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,11 @@

import boto3
from botocore.config import Config

from caching.common import sanitize_aws_tag_string
from settings import (
DD_S3_BUCKET_NAME,
DD_S3_CACHE_DIRNAME,
DD_S3_LOG_GROUP_CACHE_DIRNAME,
DD_TAGS_CACHE_TTL_SECONDS,
)
Expand All @@ -20,7 +22,7 @@ def __init__(
self,
prefix,
):
self.cache_dirname = DD_S3_LOG_GROUP_CACHE_DIRNAME
self.cache_dirname = f"{DD_S3_CACHE_DIRNAME}/{DD_S3_LOG_GROUP_CACHE_DIRNAME}"
self.cache_ttl_seconds = DD_TAGS_CACHE_TTL_SECONDS
self.bucket_name = DD_S3_BUCKET_NAME
self.cache_prefix = prefix
Expand Down
12 changes: 8 additions & 4 deletions aws/logs_monitoring/caching/lambda_cache.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,22 @@
import os

from botocore.exceptions import ClientError

from caching.base_tags_cache import BaseTagsCache
from caching.common import parse_get_resources_response_for_tags_by_arn
from telemetry import send_forwarder_internal_metrics
from settings import (
DD_S3_CACHE_FILENAME,
DD_S3_CACHE_LOCK_FILENAME,
DD_S3_LAMBDA_CACHE_FILENAME,
DD_S3_LAMBDA_CACHE_LOCK_FILENAME,
GET_RESOURCES_LAMBDA_FILTER,
)
from telemetry import send_forwarder_internal_metrics


class LambdaTagsCache(BaseTagsCache):
def __init__(self, prefix):
super().__init__(prefix, DD_S3_CACHE_FILENAME, DD_S3_CACHE_LOCK_FILENAME)
super().__init__(
prefix, DD_S3_LAMBDA_CACHE_FILENAME, DD_S3_LAMBDA_CACHE_LOCK_FILENAME
)

def should_fetch_tags(self):
return os.environ.get("DD_FETCH_LAMBDA_TAGS", "false").lower() == "true"
Expand Down
10 changes: 6 additions & 4 deletions aws/logs_monitoring/retry/storage.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import os
import json
import logging
import os
from time import time
import json

import boto3
from botocore.exceptions import ClientError
from settings import DD_RETRY_PATH, DD_S3_BUCKET_NAME

from settings import DD_S3_BUCKET_NAME, DD_S3_RETRY_DIRNAME

logger = logging.getLogger(__name__)
logger.setLevel(logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper()))
Expand Down Expand Up @@ -76,7 +78,7 @@ def _fetch_data_for_key(self, key):
return None

def _get_key_prefix(self, retry_prefix):
return f"{DD_RETRY_PATH}/{self.function_prefix}/{str(retry_prefix)}/"
return f"{DD_S3_RETRY_DIRNAME}/{self.function_prefix}/{str(retry_prefix)}/"

def _serialize(self, data):
return bytes(json.dumps(data).encode("UTF-8"))
Expand Down
21 changes: 13 additions & 8 deletions aws/logs_monitoring/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
# Copyright 2021 Datadog, Inc.

import base64
import logging
import os

import boto3
import botocore.config
import logging

logger = logging.getLogger()
logger.setLevel(logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper()))
Expand Down Expand Up @@ -255,15 +255,20 @@ def __init__(self, name, pattern, placeholder):
DD_ADDITIONAL_TARGET_LAMBDAS = get_env_var("DD_ADDITIONAL_TARGET_LAMBDAS", default=None)

DD_S3_BUCKET_NAME = get_env_var("DD_S3_BUCKET_NAME", default=None)

# These default cache names remain unchanged so we can get existing cache data for these
DD_S3_CACHE_FILENAME = "cache.json"
DD_S3_CACHE_LOCK_FILENAME = "cache.lock"
DD_S3_CACHE_DIRNAME = "cache"

DD_S3_LAMBDA_CACHE_FILENAME = "lambda.json"
DD_S3_LAMBDA_CACHE_LOCK_FILENAME = "lambda.lock"

DD_S3_STEP_FUNCTIONS_CACHE_FILENAME = "step-functions-cache.json"
DD_S3_STEP_FUNCTIONS_CACHE_LOCK_FILENAME = "step-functions-cache.lock"
DD_S3_TAGS_CACHE_FILENAME = "s3-cache.json"
DD_S3_TAGS_CACHE_LOCK_FILENAME = "s3-cache.lock"

DD_S3_LOG_GROUP_CACHE_DIRNAME = "log-group-cache"
DD_S3_TAGS_CACHE_FILENAME = "s3.json"
DD_S3_TAGS_CACHE_LOCK_FILENAME = "s3.lock"

DD_S3_LOG_GROUP_CACHE_DIRNAME = "log-group"

DD_TAGS_CACHE_TTL_SECONDS = int(get_env_var("DD_TAGS_CACHE_TTL_SECONDS", default=300))
DD_S3_CACHE_LOCK_TTL_SECONDS = 60
Expand All @@ -272,7 +277,7 @@ def __init__(self, name, pattern, placeholder):
GET_RESOURCES_S3_FILTER = "s3:bucket"


# Retyer
DD_RETRY_PATH = "failed_events"
# Retryer
DD_S3_RETRY_DIRNAME = "failed_events"
DD_RETRY_KEYWORD = "retry"
DD_STORE_FAILED_EVENTS = get_env_var("DD_STORE_FAILED_EVENTS", "false", boolean=True)
18 changes: 7 additions & 11 deletions aws/logs_monitoring/template.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -327,6 +327,9 @@ Conditions:
- !Equals [!Ref ReservedConcurrency, ""]
ShouldUseAccessLogBucket: !Not
- !Equals [!Ref DdForwarderBucketsAccessLogsTarget, ""]
ShouldDdFetchTags: !Or
- !Equals [!Ref DdFetchLambdaTags, true]
- !Equals [!Ref DdFetchStepFunctionsTags, true]
SetForwarderBucket: !Or
- !Condition CreateS3Bucket
- !Not
Expand Down Expand Up @@ -583,8 +586,8 @@ Resources:
Condition:
StringLike:
s3:prefix:
- "retry/*"
- "log-group-cache/*"
- "failed_events/*"
- "cache/*"
Effect: Allow
- !Ref AWS::NoValue
- Action:
Expand All @@ -606,8 +609,9 @@ Resources:
- !Sub "${DdApiKeySecretArn}*"
Effect: Allow
# Fetch Lambda resource tags for data enrichment
# Fetch Step Functions resource tags for data enrichment
- !If
- SetDdFetchLambdaTags
- ShouldDdFetchTags
- Action:
- tag:GetResources
Resource: "*"
Expand All @@ -621,14 +625,6 @@ Resources:
Resource: "*"
Effect: Allow
- !Ref AWS::NoValue
# Fetch Step Functions resource tags for data enrichment
- !If
- SetDdFetchStepFunctionsTags
- Action:
- tag:GetResources
Resource: "*"
Effect: Allow
- !Ref AWS::NoValue
# Required for Lambda deployed in VPC
- !If
- UseVPC
Expand Down
2 changes: 1 addition & 1 deletion aws/logs_monitoring/tools/build_bundle.sh
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
cd $DIR

# Read the desired version
if [ -z "$1" ]; then
if [[ -z ${1:-} ]]; then
log_error "Must specify a desired version number"
elif [[ ! $1 =~ [0-9]+\.[0-9]+\.[0-9]+ ]]; then
log_error "Must use a semantic version, e.g., 3.1.4"
Expand Down

0 comments on commit ef89890

Please sign in to comment.