Skip to content

Commit

Permalink
Automatic commit before release [release=1.3.5] | [skip actions]
Browse files Browse the repository at this point in the history
  • Loading branch information
eytannnaim authored and github-actions[bot] committed Jan 26, 2023
1 parent 97d4140 commit 9f7f947
Show file tree
Hide file tree
Showing 9 changed files with 50 additions and 50 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ The first thing to do in this deployment mode is to [download Terraform ](https:
**NOTE:** Update the values for the required parameters to complete the installation: example_name, aws_access_key_id, aws_secret_access_key and region
1. Download the zip file of the example you've chosen (See the [Choosing the Example/Recipe that Fits Your Use Case](#choosing-the-examplerecipe-that-fits-your-use-case) section) from the <a href="https://github.com/imperva/dsfkit/tree/1.3.5">DSFKit GitHub Repository</a>, e.g., if you choose the "basic_deployment" example, you should download <a href="https://github.com/imperva/dsfkit/tree/1.3.4/examples/poc/basic_deployment/basic_deployment.zip">basic_deployment.zip</a>.
1. Download the zip file of the example you've chosen (See the [Choosing the Example/Recipe that Fits Your Use Case](#choosing-the-examplerecipe-that-fits-your-use-case) section) from the <a href="https://github.com/imperva/dsfkit/tree/1.3.5">DSFKit GitHub Repository</a>, e.g., if you choose the "basic_deployment" example, you should download <a href="https://github.com/imperva/dsfkit/tree/1.3.5/examples/poc/basic_deployment/basic_deployment.zip">basic_deployment.zip</a>.
2. Unzip the zip file in CLI or using your operating system's UI.
For example, in CLI:
Expand Down Expand Up @@ -614,7 +614,7 @@ Complete the following instructions to automate the creation of an installer mac
DSFKit provides a number of out-of-the-box examples which are already configured to deploy common DSF environments.
These examples can be found in the <a href="https://github.com/imperva/dsfkit/tree/1.3.5">DSFKit GitHub Repository</a> under the <a href="https://github.com/imperva/dsfkit/tree/1.3.4/examples">examples</a> directory.
These examples can be found in the <a href="https://github.com/imperva/dsfkit/tree/1.3.5">DSFKit GitHub Repository</a> under the <a href="https://github.com/imperva/dsfkit/tree/1.3.5/examples">examples</a> directory.
Some examples are intended for Lab or POC and others for actual DSF deployments by Professional Services and customers.
For more details about each example, click on the example name.
Expand Down
Binary file not shown.
Binary file modified examples/poc/basic_deployment/basic_deployment.zip
Binary file not shown.
2 changes: 1 addition & 1 deletion examples/poc/basic_deployment/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ variable "db_types_to_onboard" {
description = "DB types to onboard, available types are - 'RDS MySQL', 'RDS MsSQL' with data"
validation {
condition = alltrue([
for db_type in var.db_types_to_onboard : contains(["RDS MySQL", "RDS MsSQL"], db_type)
for db_type in var.db_types_to_onboard : contains(["RDS MySQL", "RDS MsSQL"], db_type)
])
error_message = "Valid values should contain at least one of the following: 'RDS MySQL', 'RDS MsSQL'."
}
Expand Down
Binary file modified examples/poc/hadr_deployment/hadr_deployment.zip
Binary file not shown.
2 changes: 1 addition & 1 deletion examples/poc/hadr_deployment/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ variable "db_types_to_onboard" {
description = "DB types to onboard, available types are - 'RDS MySQL', 'RDS MsSQL' with data"
validation {
condition = alltrue([
for db_type in var.db_types_to_onboard : contains(["RDS MySQL", "RDS MsSQL"], db_type)
for db_type in var.db_types_to_onboard : contains(["RDS MySQL", "RDS MsSQL"], db_type)
])
error_message = "Valid values should contain at least one of the following: 'RDS MySQL', 'RDS MsSQL'."
}
Expand Down
4 changes: 2 additions & 2 deletions installer_machine/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ variable "example_name" {
}

variable "example_type" {
type = string
default = "poc"
type = string
default = "poc"
description = "poc or installation, according to where your example is located in the DSFKit GitHub repository under the 'examples' directory"
}

Expand Down
32 changes: 16 additions & 16 deletions modules/aws/poc-db-onboarder/iam_role.tf
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@ locals {
Version = "2012-10-17"
Statement = [
{
"Sid": "VisualEditor0",
"Effect": "Allow",
"Action": [
"Sid" : "VisualEditor0",
"Effect" : "Allow",
"Action" : [
"logs:Describe*",
"logs:List*",
"rds:DescribeDBInstances",
Expand All @@ -20,7 +20,7 @@ locals {
"rds:DescribeDBClusters",
"rds:DescribeOptionGroups"
],
"Resource": "*"
"Resource" : "*"
}
]
})
Expand All @@ -29,31 +29,31 @@ locals {
Version = "2012-10-17"
Statement = [
{
"Sid": "VisualEditor0",
"Effect": "Allow",
"Action": [
"Sid" : "VisualEditor0",
"Effect" : "Allow",
"Action" : [
"rds:DescribeDBInstances",
"rds:DescribeDBClusters",
"rds:DescribeOptionGroups"
],
"Resource": "*"
"Resource" : "*"
},
{
"Sid": "VisualEditor1",
"Effect": "Allow",
"Action": [
"Sid" : "VisualEditor1",
"Effect" : "Allow",
"Action" : [
"s3:ListAllMyBuckets"
],
"Resource": "*"
"Resource" : "*"
},
{
"Sid": "VisualEditor2",
"Effect": "Allow",
"Action": [
"Sid" : "VisualEditor2",
"Effect" : "Allow",
"Action" : [
"s3:ListBucket",
"s3:GetObject"
],
"Resource": "arn:aws:s3:::${var.database_details.db_identifier}-*" # bucket name starts with db_identifier prefix
"Resource" : "arn:aws:s3:::${var.database_details.db_identifier}-*" # bucket name starts with db_identifier prefix
}
]
})
Expand Down
56 changes: 28 additions & 28 deletions modules/aws/rds-mssql-db/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,14 @@ resource "random_id" "salt" {
data "aws_region" "current" {}

locals {
db_username = var.username
db_password = length(var.password) > 0 ? var.password : random_password.db_password.result
db_identifier = length(var.identifier) > 0 ? var.identifier : "edsf-db-demo-${random_pet.db_id.id}"
db_name = length(var.name) > 0 ? var.name : replace("edsf-db-demo-${random_pet.db_id.id}", "-", "_")
mssql_connect_db_name = "rdsadmin"
lambda_salt = random_id.salt.hex
lambda_package = "${path.module}/installation_resources/mssqlLambdaPackage.zip"
db_audit_bucket_name = "${local.db_identifier}-audit-bucket"
db_username = var.username
db_password = length(var.password) > 0 ? var.password : random_password.db_password.result
db_identifier = length(var.identifier) > 0 ? var.identifier : "edsf-db-demo-${random_pet.db_id.id}"
db_name = length(var.name) > 0 ? var.name : replace("edsf-db-demo-${random_pet.db_id.id}", "-", "_")
mssql_connect_db_name = "rdsadmin"
lambda_salt = random_id.salt.hex
lambda_package = "${path.module}/installation_resources/mssqlLambdaPackage.zip"
db_audit_bucket_name = "${local.db_identifier}-audit-bucket"
}

resource "aws_db_subnet_group" "rds_db_sg" {
Expand Down Expand Up @@ -129,12 +129,12 @@ data "aws_iam_role" "lambda_mssql_assignee_role" {
}

resource "aws_lambda_function" "lambda_mssql_infra" {
function_name = join("-", ["dsf-mssql-infra", local.lambda_salt])
filename = local.lambda_package
role = data.aws_iam_role.lambda_mssql_assignee_role.arn
handler = "createDBsAndEnableAudit.lambda_handler"
runtime = "python3.9"
timeout = 900
function_name = join("-", ["dsf-mssql-infra", local.lambda_salt])
filename = local.lambda_package
role = data.aws_iam_role.lambda_mssql_assignee_role.arn
handler = "createDBsAndEnableAudit.lambda_handler"
runtime = "python3.9"
timeout = 900

vpc_config {
security_group_ids = [aws_security_group.rds_mssql_access.id]
Expand All @@ -147,7 +147,7 @@ resource "aws_lambda_function" "lambda_mssql_infra" {
DB_PORT = aws_db_instance.rds_db.port
DB_NAME = local.mssql_connect_db_name
DB_USER = aws_db_instance.rds_db.username
DB_PWD = nonsensitive(aws_db_instance.rds_db.password)
DB_PWD = nonsensitive(aws_db_instance.rds_db.password)
}
}

Expand Down Expand Up @@ -185,10 +185,10 @@ data "aws_route_tables" "vpc_route_tables" {
}

resource "aws_vpc_endpoint" "s3_vpc_endpoint" {
service_name = "com.amazonaws.${data.aws_region.current.name}.s3"
vpc_id = data.aws_subnet.subnet.vpc_id
service_name = "com.amazonaws.${data.aws_region.current.name}.s3"
vpc_id = data.aws_subnet.subnet.vpc_id
vpc_endpoint_type = "Gateway"
route_table_ids = data.aws_route_tables.vpc_route_tables.ids
route_table_ids = data.aws_route_tables.vpc_route_tables.ids
}

resource "aws_s3_object" "mssql_lambda_objects" {
Expand Down Expand Up @@ -254,11 +254,11 @@ resource "aws_cloudwatch_event_target" "trafficEachMinuteTarget" {
}

resource "aws_lambda_permission" "allow_cloudwatchTraffic" {
statement_id = "AllowTrafficExecutionFromCloudWatch"
action = "lambda:InvokeFunction"
statement_id = "AllowTrafficExecutionFromCloudWatch"
action = "lambda:InvokeFunction"
function_name = aws_lambda_function.lambda_mssql_scheduled.function_name
principal = "events.amazonaws.com"
source_arn = aws_cloudwatch_event_rule.trafficEachMinute.arn
principal = "events.amazonaws.com"
source_arn = aws_cloudwatch_event_rule.trafficEachMinute.arn
}

# add scheduled events each 10 minutes for the suspicious activity queries
Expand All @@ -269,15 +269,15 @@ resource "aws_cloudwatch_event_rule" "suspiciousActivityEach10Minutes" {
}

resource "aws_cloudwatch_event_target" "suspiciousActivityEach10MinutesTarget" {
arn = aws_lambda_function.lambda_mssql_scheduled.arn
rule = aws_cloudwatch_event_rule.suspiciousActivityEach10Minutes.name
arn = aws_lambda_function.lambda_mssql_scheduled.arn
rule = aws_cloudwatch_event_rule.suspiciousActivityEach10Minutes.name
input = "{\"S3_FILE_PREFIX\":\"mssql_suspicious_activity\",\"SHOULD_RUN_FAILED_LOGINS\":\"true\",\"DBS_FAILED_LOGINS\":\"financedb;HealthCaredb;Insurancedb;telecomdb\",\"DB_USER2\":\"finance:Teller;health:public_health_nurse;insurance:Broker;telecom:Technician\"}"
}

resource "aws_lambda_permission" "allow_cloudwatchSuspicious" {
statement_id = "AllowSuspiciousExecutionFromCloudWatch"
action = "lambda:InvokeFunction"
statement_id = "AllowSuspiciousExecutionFromCloudWatch"
action = "lambda:InvokeFunction"
function_name = aws_lambda_function.lambda_mssql_scheduled.function_name
principal = "events.amazonaws.com"
source_arn = aws_cloudwatch_event_rule.suspiciousActivityEach10Minutes.arn
principal = "events.amazonaws.com"
source_arn = aws_cloudwatch_event_rule.suspiciousActivityEach10Minutes.arn
}

0 comments on commit 9f7f947

Please sign in to comment.