diff --git a/.github/DISABLED.pre-commit-config.yaml b/.github/.pre-commit-config.yaml
similarity index 100%
rename from .github/DISABLED.pre-commit-config.yaml
rename to .github/.pre-commit-config.yaml
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index 3c44d7d..0bca217 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -15,7 +15,7 @@ Please note we have a code of conduct, please follow it in all your interactions
- [ ] Add [semantics prefix](#semantic-pull-requests) to your PR or Commits (at least one of your commit groups)
- [ ] CI tests are passing
-- [ ] README.md has been updated after any changes to variables and outputs. See https://github.com/cloudandthings/terraform-aws-clickops-notifer/#doc-generation
+- [ ] README.md has been updated after any changes to variables and outputs. See https://github.com/cloudandthings/terraform-aws-clickops-notifier/#doc-generation
- [ ] Run pre-commit hooks `pre-commit run -a`
## Semantic Pull Requests
diff --git a/.github/workflows/check-deployment.yml b/.github/workflows/check-deployment.yml
index d3daa36..22fe5c6 100644
--- a/.github/workflows/check-deployment.yml
+++ b/.github/workflows/check-deployment.yml
@@ -18,6 +18,9 @@ jobs:
# CHECK DEPLOYMENT
#--------------------------------------------------------------
check-deployment:
+ env:
+ FUNCTION: costnotifier
+ RUNTIME: python3.9
# For public repos use runs-on: ubuntu-latest
# For private repos use runs-on: self-hosted
runs-on: ${{ vars.RUNS_ON }}
@@ -27,16 +30,16 @@ jobs:
# To test whether the deployment package is up to date, we only check the application code (excluding dependencies).
- name: 🛠️ build deployment
run: |
- cp myfunction-python3.9.zip myfunction-python3.9.zip_original
- cd myfunction
- zip -r ../myfunction-python3.9.zip .
+ cp $FUNCTION-$RUNTIME.zip $FUNCTION-$RUNTIME.zip_original
+ cd $FUNCTION
+ zip -r ../$FUNCTION-$RUNTIME.zip .
- name: 🔍 check deployment
run: |
# Compare length and names of files in Zip.
# Building in Docker doesn't work, some files are still different.
- [[ -f myfunction-python3.9.zip ]] || { echo "Deployment file not found."; exit 1; }
+ [[ -f $FUNCTION-$RUNTIME.zip ]] || { echo "Deployment file not found."; exit 1; }
diff \
- <(unzip -vqq myfunction-python3.9.zip | awk '{$2=""; $3=""; $4=""; $5=""; $6=""; print}' | sort -k3 -f) \
- <(unzip -vqq myfunction-python3.9.zip_original | awk '{$2=""; $3=""; $4=""; $5=""; $6=""; print}' | sort -k3 -f)
+ <(unzip -vqq $FUNCTION-$RUNTIME.zip | awk '{$2=""; $3=""; $4=""; $5=""; $6=""; print}' | sort -k3 -f) \
+ <(unzip -vqq $FUNCTION-$RUNTIME.zip_original | awk '{$2=""; $3=""; $4=""; $5=""; $6=""; print}' | sort -k3 -f)
FILES_CHANGED=$?
echo FILES_CHANGED=$FILES_CHANGED
diff --git a/README.md b/README.md
index 98dd06f..3d20efb 100644
--- a/README.md
+++ b/README.md
@@ -6,6 +6,18 @@ This terraform module sends a daily cost report and the cost delta in a 7 day ro
The module also reports on the top 5 services attributing to the daily cost. Its a very rudimentary means of managing the cost of your AWS bill, but it does provide a 1000 ft view of the current expenses for the previous day. More on this module can be found on our [blog](https://medium.com/cloudandthings/aws-cost-notifier-e437bd311c54) on Medium.
+### Lambda deployment package
+
+The Lambda function is deployed using a `.zip` deployment package. The package is contained within this module and also attached to the GitHub release.
+
+You may choose one of the following options for the deployment:
+
+ 1. **default:** If no `s3_*` variables are specified, then the package is used by the lambda directly.
+ 2. **recommended**: `s3_bucket` can be specified to upload the package to the S3 bucket, for the lambda to use.
+ - `s3_prefix` can be specified to override the default location.
+ - `upload_deployment_to_s3=false` can be specified to disable the upload, in which case the package must be placed on s3 manually.
+
+
### Examples
@@ -31,13 +43,13 @@ See `examples` dropdown on Terraform Cloud, or [browse here](/examples/).
| [notification\_schedule](#input\_notification\_schedule) | CRON expression to schedule notification | `string` | `"cron(0 20 ? * MON-SUN *)"` | no |
| [permissions\_boundary](#input\_permissions\_boundary) | ARN of the policy that is used to set the permissions boundary for the role. | `string` | `null` | no |
| [red\_threshold](#input\_red\_threshold) | Percentage exceeded threshold to send a red alert and notify the slack channel | `string` | `"50"` | no |
-| [runtime](#input\_runtime) | The python runtime for the lambda. Currently only `python3.8` is supported. | `string` | `"python3.8"` | no |
+| [runtime](#input\_runtime) | The python runtime for the lambda. Currently only `python3.9` is supported. | `string` | `"python3.9"` | no |
| [s3\_bucket](#input\_s3\_bucket) | S3 bucket for deployment package. | `string` | `null` | no |
| [s3\_key](#input\_s3\_key) | S3 object key for deployment package. Otherwise, defaults to `var.naming_prefix/local.deployment_filename`. | `string` | `null` | no |
| [security\_group\_ids](#input\_security\_group\_ids) | List of VPC security group IDs associated with the Lambda function. | `list(string)` | `[]` | no |
| [subnet\_ids](#input\_subnet\_ids) | List of VPC subnet IDs associated with the Lambda function. | `list(string)` | `[]` | no |
| [tags](#input\_tags) | A mapping of tags to assign to the resources. | `map(string)` | `{}` | no |
-| [upload\_deployment\_to\_s3](#input\_upload\_deployment\_to\_s3) | If `true`, the deployment package within this module repo will be copied to S3. If `false` then the S3 object must be uploaded separately. | `bool` | `false` | no |
+| [upload\_deployment\_to\_s3](#input\_upload\_deployment\_to\_s3) | If `true`, the deployment package within this module repo will be copied to S3. If `false` then the S3 object must be uploaded separately. Ignored if `s3_bucket` is null. | `bool` | `true` | no |
| [webhook\_type](#input\_webhook\_type) | Either "slack" or "teams". | `string` | `"slack"` | no |
| [webhook\_urls](#input\_webhook\_urls) | Webhook URLs to receive daily cost notifications on either Slack or Teams | `list(string)` | n/a | yes |
diff --git a/costnotifier-python3.9.zip b/costnotifier-python3.9.zip
index 4d93d29..ab156c2 100644
Binary files a/costnotifier-python3.9.zip and b/costnotifier-python3.9.zip differ
diff --git a/costnotifier/app.py b/costnotifier/app.py
index c3ef05c..0d4939c 100644
--- a/costnotifier/app.py
+++ b/costnotifier/app.py
@@ -14,7 +14,7 @@
AMBER_THRESHOLD = float(os.environ.get("AMBER_THRESHOLD", 20))
RED_THRESHOLD = float(os.environ.get("RED_THRESHOLD", 50))
-WEBHOOK_URLS = json.loads(os.environ.get("WEBHOOK_URLS", []))
+WEBHOOK_URLS = json.loads(os.environ.get("WEBHOOK_URLS", "[]"))
WEBHOOK_TYPE = os.environ.get("WEBHOOK_TYPE", "slack")
TOPIC_ARN = os.environ.get("SNS_ARN", "DISABLED")
diff --git a/examples/email/README.md b/examples/email/README.md
index 038d0a9..2ee9c61 100644
--- a/examples/email/README.md
+++ b/examples/email/README.md
@@ -16,10 +16,8 @@ module "example" {
emails_for_notifications = ["velisa@cat.io", "adan@cat.io"] # Optional
- ### Uncomment to fetch the deployment package from S3
+ ### Uncomment to upload and fetch the deployment package from S3
# s3_bucket = "my_s3_bucket"
- ### Uncomment to upload the local deployment package to S3
- # upload_deployment_to_s3 = true
}
```
----
diff --git a/examples/email/main.tf b/examples/email/main.tf
index c026e03..a1dced3 100644
--- a/examples/email/main.tf
+++ b/examples/email/main.tf
@@ -12,8 +12,6 @@ module "example" {
emails_for_notifications = ["velisa@cat.io", "adan@cat.io"] # Optional
- ### Uncomment to fetch the deployment package from S3
+ ### Uncomment to upload and fetch the deployment package from S3
# s3_bucket = "my_s3_bucket"
- ### Uncomment to upload the local deployment package to S3
- # upload_deployment_to_s3 = true
}
diff --git a/examples/slack/README.md b/examples/slack/README.md
index 5d4759b..7a9af16 100644
--- a/examples/slack/README.md
+++ b/examples/slack/README.md
@@ -15,10 +15,8 @@ module "example" {
notification_schedule = "cron(0 7 ? * MON-FRI *)"
- ### Uncomment to fetch the deployment package from S3
+ ### Uncomment to upload and fetch the deployment package from S3
# s3_bucket = "my_s3_bucket"
- ### Uncomment to upload the local deployment package to S3
- # upload_deployment_to_s3 = true
}
```
----
diff --git a/examples/slack/main.tf b/examples/slack/main.tf
index 6968dcb..8d59912 100644
--- a/examples/slack/main.tf
+++ b/examples/slack/main.tf
@@ -11,8 +11,6 @@ module "example" {
notification_schedule = "cron(0 7 ? * MON-FRI *)"
- ### Uncomment to fetch the deployment package from S3
+ ### Uncomment to upload and fetch the deployment package from S3
# s3_bucket = "my_s3_bucket"
- ### Uncomment to upload the local deployment package to S3
- # upload_deployment_to_s3 = true
}
diff --git a/examples/teams/README.md b/examples/teams/README.md
index e2b30bc..08cd5c5 100644
--- a/examples/teams/README.md
+++ b/examples/teams/README.md
@@ -15,10 +15,8 @@ module "billing_notifier_root_account_teams" {
notification_schedule = "cron(0 20 * * ? *)"
- ### Uncomment to fetch the deployment package from S3
+ ### Uncomment to upload and fetch the deployment package from S3
# s3_bucket = "my_s3_bucket"
- ### Uncomment to upload the local deployment package to S3
- # upload_deployment_to_s3 = true
}
```
----
diff --git a/examples/teams/main.tf b/examples/teams/main.tf
index 063b6a9..7d6a335 100644
--- a/examples/teams/main.tf
+++ b/examples/teams/main.tf
@@ -11,8 +11,6 @@ module "billing_notifier_root_account_teams" {
notification_schedule = "cron(0 20 * * ? *)"
- ### Uncomment to fetch the deployment package from S3
+ ### Uncomment to upload and fetch the deployment package from S3
# s3_bucket = "my_s3_bucket"
- ### Uncomment to upload the local deployment package to S3
- # upload_deployment_to_s3 = true
}
diff --git a/main.tf b/main.tf
index ebcd60e..4b25f43 100644
--- a/main.tf
+++ b/main.tf
@@ -34,16 +34,14 @@ locals {
deployment_path = "${path.module}/${local.deployment_filename}"
use_s3 = (
- var.upload_deployment_to_s3 ||
- var.s3_bucket != null ||
- var.s3_key != null
+ var.upload_deployment_to_s3 && var.s3_bucket != null
)
s3_key = coalesce(var.s3_key, join("/", [var.naming_prefix, local.deployment_filename]))
}
resource "aws_s3_object" "deployment" {
- count = var.upload_deployment_to_s3 ? 1 : 0
+ count = local.use_s3 && var.upload_deployment_to_s3 ? 1 : 0
bucket = var.s3_bucket
key = local.s3_key
source = local.deployment_path
diff --git a/tests/requirements.txt b/tests/requirements.txt
index aa619f1..877379b 100644
--- a/tests/requirements.txt
+++ b/tests/requirements.txt
@@ -6,5 +6,6 @@
-r ../costnotifier/requirements.txt
# Additional packages to install when doing testing
+boto3
pytest
tftest
diff --git a/tests/test_lambda_handler.py b/tests/test_lambda_handler.py
index b901487..61d2027 100644
--- a/tests/test_lambda_handler.py
+++ b/tests/test_lambda_handler.py
@@ -1,6 +1,4 @@
-import sys
-
-from costnotifer import app
+from costnotifier import app
def test_lambda_handler():
@@ -8,8 +6,4 @@ def test_lambda_handler():
This test ensures that the lambda handler function is callable.
"""
test_event = None
- result = app.lambda_handler(test_event)
-
- expected_result = "Hello from AWS Lambda using Python" + sys.version + "! 200"
-
- assert result == expected_result
+ app.lambda_handler(test_event, None)
diff --git a/variables.tf b/variables.tf
index 87c2d7f..bfd15ca 100644
--- a/variables.tf
+++ b/variables.tf
@@ -26,13 +26,13 @@ variable "cloudwatch_logs_retention_in_days" {
}
variable "runtime" {
- description = "The python runtime for the lambda. Currently only `python3.8` is supported."
+ description = "The python runtime for the lambda. Currently only `python3.9` is supported."
type = string
- default = "python3.8"
+ default = "python3.9"
validation {
- condition = contains(["python3.8"], lower(var.runtime))
- error_message = "Must be one of: \"python3.8\"."
+ condition = contains(["python3.9"], lower(var.runtime))
+ error_message = "Must be one of: \"python3.9\"."
}
}
@@ -155,7 +155,7 @@ variable "s3_key" {
}
variable "upload_deployment_to_s3" {
- description = "If `true`, the deployment package within this module repo will be copied to S3. If `false` then the S3 object must be uploaded separately."
+ description = "If `true`, the deployment package within this module repo will be copied to S3. If `false` then the S3 object must be uploaded separately. Ignored if `s3_bucket` is null."
type = bool
- default = false
+ default = true
}