Skip to content

Commit

Permalink
Merge branch 'develop' into SQS-fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
aloftus23 committed Mar 14, 2024
2 parents c0af290 + fe796f8 commit 76cc3d2
Show file tree
Hide file tree
Showing 27 changed files with 62 additions and 55 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ jobs:
- uses: actions/cache@v3
with:
path: ~/.cache/pip
key: pip-${{ hashFiles(**/requirements.txt) }}
key: pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: pip-
- run: pip install -r worker/requirements.txt
- run: pytest
Expand Down
4 changes: 2 additions & 2 deletions backend/db-init/create-test-db.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
set -e
set -u

psql -v ON_ERROR_STOP=1 --username "$DB_USERNAME" <<-EOSQL
psql -v ON_ERROR_STOP=1 --username "$DB_USERNAME" <<- EOSQL
CREATE DATABASE crossfeed_test;
GRANT ALL PRIVILEGES ON DATABASE crossfeed_test TO $DB_USERNAME;
EOSQL
EOSQL
Empty file modified backend/src/api/search/buildRequest.ts
100755 → 100644
Empty file.
Empty file modified backend/src/api/search/buildRequestFilter.ts
100755 → 100644
Empty file.
2 changes: 1 addition & 1 deletion backend/tools/build-worker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@ set -e

docker build -t crossfeed-worker -f Dockerfile.worker .

docker build -t pe-worker -f Dockerfile.pe .
docker build -t pe-worker -f Dockerfile.pe .
8 changes: 4 additions & 4 deletions backend/tools/deploy-worker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ PE_WORKER_TAG=${1:-pe-staging-worker}

./tools/build-worker.sh
aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin $AWS_ECR_DOMAIN
docker tag crossfeed-worker:latest $AWS_ECR_DOMAIN/$WORKER_TAG:latest
docker push $AWS_ECR_DOMAIN/$WORKER_TAG:latest
docker tag crossfeed-worker:latest $AWS_ECR_DOMAIN/"$WORKER_TAG":latest
docker push $AWS_ECR_DOMAIN/"$WORKER_TAG":latest

docker tag pe-worker:latest $AWS_ECR_DOMAIN/$PE_WORKER_TAG:latest
docker push $AWS_ECR_DOMAIN/$PE_WORKER_TAG:latest
docker tag pe-worker:latest $AWS_ECR_DOMAIN/"$PE_WORKER_TAG":latest
docker push $AWS_ECR_DOMAIN/"$PE_WORKER_TAG":latest
4 changes: 2 additions & 2 deletions backend/worker/generate_config.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/bin/bash

# Generate database.ini
cat <<EOF > pe-reports/src/pe_reports/data/database.ini
cat << EOF > pe-reports/src/pe_reports/data/database.ini
[postgres]
host=${DB_HOST}
database=${PE_DB_NAME}
Expand Down Expand Up @@ -54,4 +54,4 @@ pe_reports_path="${pe_reports_path%/pe-reports}/pe_reports"
# Copy database.ini to the module's installation directory
cp /app/pe-reports/src/pe_reports/data/database.ini "${pe_reports_path}/data/"

exec "$@"
exec "$@"
37 changes: 18 additions & 19 deletions backend/worker/pe-worker-entry.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,22 +16,22 @@ fi
# Function to retrieve a message from RabbitMQ queue
get_rabbitmq_message() {
curl -s -u "guest:guest" \
-H "content-type:application/json" \
-X POST "http://rabbitmq:15672/api/queues/%2F/$SERVICE_QUEUE_URL/get" \
--data '{"count": 1, "requeue": false, "encoding": "auto", "ackmode": "ack_requeue_false"}'
-H "content-type:application/json" \
-X POST "http://rabbitmq:15672/api/queues/%2F/$SERVICE_QUEUE_URL/get" \
--data '{"count": 1, "requeue": false, "encoding": "auto", "ackmode": "ack_requeue_false"}'
}


while true; do
# Receive message from the Scan specific queue
if [ "$IS_LOCAL" = true ]; then
echo "Running local RabbitMQ logic..."
# Call the function and capture the response
RESPONSE=$(get_rabbitmq_message) &&
echo "Response from get_rabbitmq_message: $RESPONSE" &&
# Extract the JSON payload from the response body
MESSAGE=$(echo "$RESPONSE" | jq -r '.[0].payload')
MESSAGE=$(echo "$MESSAGE" | sed 's/\\"/"/g')
RESPONSE=$(get_rabbitmq_message) \
&& echo "Response from get_rabbitmq_message: $RESPONSE" \
&&
# Extract the JSON payload from the response body
MESSAGE=$(echo "$RESPONSE" | jq -r '.[0].payload')
MESSAGE=${MESSAGE//\\\"/\"}
echo "MESSAGE: $MESSAGE"

else
Expand All @@ -41,7 +41,7 @@ while true; do
fi

# Check if there are no more messages. If no more, then exit Fargate container
if [ -z "$MESSAGE" ] || [ "$MESSAGE" == "null" ]; then
if [ -z "$MESSAGE" ] || [ "$MESSAGE" == "null" ]; then
echo "No more messages in the queue. Exiting."
break
fi
Expand All @@ -53,15 +53,15 @@ while true; do
ORG=$(echo "$MESSAGE" | jq -r '.Messages[0].Body | fromjson | .org')
fi

if [[ "$SERVICE_TYPE" = *"shodan"* ]]; then
if [[ "$SERVICE_TYPE" = *"shodan"* ]]; then
COMMAND="pe-source shodan --soc_med_included --org=$ORG"
elif [[ "$SERVICE_TYPE" = *"dnstwist"* ]]; then
elif [[ "$SERVICE_TYPE" = *"dnstwist"* ]]; then
COMMAND="pe-source dnstwist --org=$ORG"
elif [[ "$SERVICE_TYPE" = *"hibp"* ]]; then
elif [[ "$SERVICE_TYPE" = *"hibp"* ]]; then
COMMAND="pe-source hibp --org=$ORG"
elif [[ "$SERVICE_TYPE" = *"intelx"* ]]; then
elif [[ "$SERVICE_TYPE" = *"intelx"* ]]; then
COMMAND="pe-source intelx --org=$ORG --soc_med_included"
elif [[ "$SERVICE_TYPE" = *"cybersixgill"* ]]; then
elif [[ "$SERVICE_TYPE" = *"cybersixgill"* ]]; then
COMMAND="pe-source cybersixgill --org=$ORG --soc_med_included"
else
echo "Unsupported SERVICE_TYPE: $SERVICE_TYPE"
Expand All @@ -71,9 +71,8 @@ while true; do
echo "Running $COMMAND"

# Run the pe-source command
eval "$COMMAND" &&

cat /app/pe_reports_logging.log
eval "$COMMAND" \
&& cat /app/pe_reports_logging.log

# Delete the processed message from the queue
if [ "$IS_LOCAL" = true ]; then
Expand All @@ -84,4 +83,4 @@ while true; do
aws sqs delete-message --queue-url "$SERVICE_QUEUE_URL" --receipt-handle "$RECEIPT_HANDLE"
echo "Done with $ORG"
fi
done
done
4 changes: 2 additions & 2 deletions backend/worker/pe_scripts/runPeAlerts.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash

cd /app/pe-reports
cd /app/pe-reports || return

pe-source cybersixgill --cybersix-methods=alerts --soc_med_included
pe-source cybersixgill --cybersix-methods=alerts --soc_med_included
4 changes: 2 additions & 2 deletions backend/worker/pe_scripts/runPeCredentials.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash

cd /app/pe-reports
cd /app/pe-reports || return

pe-source cybersixgill --cybersix-methods=credentials --soc_med_included
pe-source cybersixgill --cybersix-methods=credentials --soc_med_included
4 changes: 2 additions & 2 deletions backend/worker/pe_scripts/runPeDnsMonitor.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash

cd /app/pe-reports
cd /app/pe-reports || return

pe-source dnsmonitor
pe-source dnsmonitor
4 changes: 2 additions & 2 deletions backend/worker/pe_scripts/runPeDnstwist.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash

cd /app/pe-reports/src/adhoc
cd /app/pe-reports/src/adhoc || return

python3 run_dnstwist.py
python3 run_dnstwist.py
4 changes: 2 additions & 2 deletions backend/worker/pe_scripts/runPeHibp.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash

cd /app/pe-reports/src/adhoc
cd /app/pe-reports/src/adhoc || return

python3 hibp_latest.py
python3 hibp_latest.py
4 changes: 2 additions & 2 deletions backend/worker/pe_scripts/runPeIntelx.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash

cd /app/pe-reports
cd /app/pe-reports || return

pe-source intelx
pe-source intelx
4 changes: 2 additions & 2 deletions backend/worker/pe_scripts/runPeMentions.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash

cd /app/pe-reports
cd /app/pe-reports || return

pe-source cybersixgill --cybersix-methods=mentions --soc_med_included
pe-source cybersixgill --cybersix-methods=mentions --soc_med_included
4 changes: 2 additions & 2 deletions backend/worker/pe_scripts/runPeShodan.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash

cd /app/pe-reports
cd /app/pe-reports || return

pe-source shodan --soc_med_included
pe-source shodan --soc_med_included
4 changes: 2 additions & 2 deletions backend/worker/pe_scripts/runPeTopCVEs.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash

cd /app/pe-reports
cd /app/pe-reports || return

pe-source cybersixgill --cybersix-methods=topCVEs --soc_med_included
pe-source cybersixgill --cybersix-methods=topCVEs --soc_med_included
2 changes: 1 addition & 1 deletion backend/worker/shodan.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@ echo "Starting Shodan"

pe-source shodan --orgs=DHS --soc_med_included

echo "Done"
echo "Done"
3 changes: 2 additions & 1 deletion backend/worker/worker-entry.sh
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#!/bin/bash
# Sets up an explicit proxy using mitmproxy.

set -e
Expand Down Expand Up @@ -30,4 +31,4 @@ echo "Printing pm2 error logs (if available):"

cat ~/pm2-error.log

echo "Done"
echo "Done"
4 changes: 3 additions & 1 deletion build.sh
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#!/bin/bash

docker-compose down --volumes --rmi all
cd backend && npm run build-worker && cd .. && npm start
cd backend && npm run syncdb && npm run syncdb -- -d populate
cd backend && npm run syncdb && npm run syncdb -- -d populate
2 changes: 1 addition & 1 deletion frontend/serverless.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ provider:
resourcePolicy:
- Effect: Allow
Principal: '*'
Action: execute-api:Invoke'
Action: execute-api:Invoke
Resource: 'execute-api:/${self:provider.stage}/*/*'
logs:
restApi: true
Expand Down
Empty file modified frontend/src/context/SearchProvider/applyDisjunctiveFaceting.js
100755 → 100644
Empty file.
Empty file modified frontend/src/context/SearchProvider/buildState.js
100755 → 100644
Empty file.
Empty file modified frontend/src/context/SearchProvider/buildStateFacets.js
100755 → 100644
Empty file.
5 changes: 3 additions & 2 deletions infrastructure/ssm-agent-install.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
#!/bin/bash

sudo mkdir /tmp/ssm
cd /tmp/ssm
cd /tmp/ssm || return
wget https://s3.amazonaws.com/ec2-downloads-windows/SSMAgent/latest/debian_amd64/amazon-ssm-agent.deb
sudo dpkg -i amazon-ssm-agent.deb
sudo systemctl enable amazon-ssm-agent
rm amazon-ssm-agent.deb
rm amazon-ssm-agent.deb
4 changes: 3 additions & 1 deletion rebuild.sh
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
docker-compose up -d --build
#!/bin/bash

docker-compose up -d --build
4 changes: 3 additions & 1 deletion setup-matomo.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
#!/bin/bash

# Fixes the trusted_hosts issue to allow Matomo to run on a custom port locally, as
# a workaround for https://github.com/matomo-org/matomo/issues/9549.
# Run this after initially setting up Matomo through the UI.
docker-compose exec matomo sed -i 's/"localhost"/"localhost:3000"/g' /var/www/html/config/config.ini.php
docker-compose exec matomo sed -i 's/"localhost"/"localhost:3000"/g' /var/www/html/config/config.ini.php

0 comments on commit 76cc3d2

Please sign in to comment.