diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml
index ae7be5a6..34b515da 100644
--- a/.github/workflows/backend.yml
+++ b/.github/workflows/backend.yml
@@ -1,3 +1,4 @@
+---
 name: Backend Pipeline
 
 on:
@@ -145,7 +146,7 @@ jobs:
     needs: [build_worker, lint, test, test_worker, test_python]
     runs-on: ubuntu-latest
     environment: staging
-    concurrency: 1
+    concurrency: '1'
     if: github.event_name == 'push' && github.ref == 'refs/heads/develop'
     steps:
       - uses: actions/checkout@v3
@@ -194,7 +195,7 @@ jobs:
     needs: [build_worker, lint, test, test_python]
     runs-on: ubuntu-latest
     environment: production
-    concurrency: 1
+    concurrency: '1'
     if: github.event_name == 'push' && github.ref == 'refs/heads/production'
     steps:
       - uses: actions/checkout@v3
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index 009d1244..58670985 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -1,3 +1,4 @@
+---
 name: "CodeQL"
 
 on:
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 858f2d8b..0207bc93 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -1,3 +1,4 @@
+---
 name: Docs
 on:
   push:
diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml
index 4b740bdf..c0765c83 100644
--- a/.github/workflows/frontend.yml
+++ b/.github/workflows/frontend.yml
@@ -1,3 +1,4 @@
+---
 name: Frontend Pipeline
 
 on:
@@ -64,7 +65,7 @@ jobs:
     needs: [lint, test]
     runs-on: ubuntu-latest
     environment: staging
-    concurrency: 1
+    concurrency: '1'
     if: github.event_name == 'push' && github.ref == 'refs/heads/develop'
     steps:
       - uses: actions/checkout@v3
@@ -94,7 +95,7 @@ jobs:
     needs: [lint, test]
     runs-on: ubuntu-latest
     environment: production
-    concurrency: 1
+    concurrency: '1'
     if: github.event_name == 'push' && github.ref == 'refs/heads/production'
     steps:
       - uses: actions/checkout@v3
diff --git a/.github/workflows/infrastructure.yml b/.github/workflows/infrastructure.yml
index f156a464..b502ebab 100644
--- a/.github/workflows/infrastructure.yml
+++ b/.github/workflows/infrastructure.yml
@@ -1,3 +1,4 @@
+---
 name: Infrastructure Pipeline
 
 on:
@@ -39,7 +40,7 @@ jobs:
     timeout-minutes: 4320
     runs-on: ubuntu-latest
     environment: staging
-    concurrency: 1
+    concurrency: '1'
     steps:
       - uses: actions/checkout@v3
 
@@ -78,7 +79,7 @@ jobs:
     timeout-minutes: 4320
     runs-on: ubuntu-latest
     environment: production
-    concurrency: 1
+    concurrency: '1'
     steps:
       - uses: actions/checkout@v3
 
diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml
index 1ae792b3..13167c4d 100644
--- a/.github/workflows/security.yml
+++ b/.github/workflows/security.yml
@@ -1,8 +1,9 @@
+---
 name: Check for Vulnerabilities
 
 on:
   schedule:
-    - cron: '0 1 * * *' # every day at 1 AM
+    - cron: '0 1 * * *'  # every day at 1 AM
   workflow_dispatch:
   push:
 
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index e7cce058..24361b71 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,4 +1,4 @@
-# Welcome #
+# Welcome
 
 We're so glad you're thinking about contributing to this open source
 project!  If you're unsure or afraid of anything, just ask or submit
@@ -11,7 +11,7 @@ Before contributing, we encourage you to read our CONTRIBUTING policy
 (you are here), our [LICENSE](LICENSE), and our [README](README.md),
 all of which should be in this repository.
 
-## Issues ##
+## Issues
 
 If you want to report a bug or request a new feature, the most direct
 method is to [create an
@@ -22,7 +22,7 @@ already been reported.  If it has then you might want to add a comment
 to the existing issue.  If it hasn't then feel free to create a new
 one.
 
-## Pull requests ##
+## Pull requests
 
 If you choose to [submit a pull
 request](https://github.com/cisagov/ASM-Dashboard/pulls), you will
@@ -38,7 +38,7 @@ regular contributor, then you will want to set up
 do that, the CI checks will run locally before you even write your
 commit message.  This speeds up your development cycle considerably.
 
-### Setting up pre-commit ###
+### Setting up pre-commit
 
 There are a few ways to do this, but we prefer to use
 [`pyenv`](https://github.com/pyenv/pyenv) and
@@ -57,7 +57,7 @@ entire environment configuration process.
 Otherwise, follow the steps below to manually configure your
 environment.
 
-#### Installing and using `pyenv` and `pyenv-virtualenv` ####
+#### Installing and using `pyenv` and `pyenv-virtualenv`
 
 On the Mac, we recommend installing [brew](https://brew.sh/).  Then
 installation is as simple as `brew install pyenv pyenv-virtualenv` and
@@ -128,7 +128,7 @@ the many things that `pyenv` can do.  See
 additional capabilities that pyenv-virtualenv adds to the `pyenv`
 command.
 
-#### Creating the Python virtual environment ####
+#### Creating the Python virtual environment
 
 Once `pyenv` and `pyenv-virtualenv` are installed on your system, you
 can create and configure the Python virtual environment with these
@@ -141,7 +141,7 @@ pyenv local ASM-Dashboard
 pip install --requirement requirements-dev.txt
 ```
 
-#### Installing the pre-commit hook ####
+#### Installing the pre-commit hook
 
 Now setting up pre-commit is as simple as:
 
@@ -153,7 +153,7 @@ At this point the pre-commit checks will run against any files that
 you attempt to commit.  If you want to run the checks against the
 entire repo, just execute `pre-commit run --all-files`.
 
-## Public domain ##
+## Public domain
 
 This project is in the public domain within the United States, and
 copyright and related rights in the work worldwide are waived through
diff --git a/README.md b/README.md
index 0202f3a4..2f7dcd2d 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,8 @@
-# XFD #
+# XFD
 
 [![GitHub Build Status](https://github.com/cisagov/ASM-Dashboard/workflows/build/badge.svg)](https://github.com/cisagov/ASM-Dashboard/actions)
 
-## Contributing ##
+## Contributing
 
 We welcome contributions!  Please see [`CONTRIBUTING.md`](CONTRIBUTING.md) for
 details.
diff --git a/backend/.eslintrc.yml b/backend/.eslintrc.yml
index 28a7af26..b3bbd73e 100644
--- a/backend/.eslintrc.yml
+++ b/backend/.eslintrc.yml
@@ -1,3 +1,4 @@
+---
 {
   "env": { "es6": true, "node": true },
   "parser": "@typescript-eslint/parser",
diff --git a/backend/serverless.yml b/backend/serverless.yml
index bcc6005c..16775047 100644
--- a/backend/serverless.yml
+++ b/backend/serverless.yml
@@ -1,3 +1,4 @@
+---
 service: crossfeed
 
 frameworkVersion: '3'
@@ -111,44 +112,44 @@ resources:
       Type: AWS::SQS::Queue
       Properties:
         QueueName: ${self:provider.stage}-worker-control-queue
-        VisibilityTimeout: 300 # Should match or exceed function timeout
-        MaximumMessageSize: 262144 # 256 KB
-        MessageRetentionPeriod: 604800 # 7 days
+        VisibilityTimeout: 300  # Should match or exceed function timeout
+        MaximumMessageSize: 262144  # 256 KB
+        MessageRetentionPeriod: 604800  # 7 days
     ShodanQueue:
       Type: AWS::SQS::Queue
       Properties:
         QueueName: ${self:provider.stage}-shodan-queue
         VisibilityTimeout: 300
-        MaximumMessageSize: 262144 # 256 KB
-        MessageRetentionPeriod: 604800 # 7 days
+        MaximumMessageSize: 262144  # 256 KB
+        MessageRetentionPeriod: 604800  # 7 days
     DnstwistQueue:
       Type: AWS::SQS::Queue
       Properties:
         QueueName: ${self:provider.stage}-dnstwist-queue
         VisibilityTimeout: 300
-        MaximumMessageSize: 262144 # 256 KB
-        MessageRetentionPeriod: 604800 # 7 days
+        MaximumMessageSize: 262144  # 256 KB
+        MessageRetentionPeriod: 604800  # 7 days
     HibpQueue:
       Type: AWS::SQS::Queue
       Properties:
         QueueName: ${self:provider.stage}-hibp-queue
         VisibilityTimeout: 300
-        MaximumMessageSize: 262144 # 256 KB
-        MessageRetentionPeriod: 604800 # 7 days
+        MaximumMessageSize: 262144  # 256 KB
+        MessageRetentionPeriod: 604800  # 7 days
     IntelxQueue:
       Type: AWS::SQS::Queue
       Properties:
         QueueName: ${self:provider.stage}-intelx-queue
         VisibilityTimeout: 300
-        MaximumMessageSize: 262144 # 256 KB
-        MessageRetentionPeriod: 604800 # 7 days
+        MaximumMessageSize: 262144  # 256 KB
+        MessageRetentionPeriod: 604800  # 7 days
     CybersixgillQueue:
       Type: AWS::SQS::Queue
       Properties:
         QueueName: ${self:provider.stage}-cybersixgill-queue
         VisibilityTimeout: 300
-        MaximumMessageSize: 262144 # 256 KB
-        MessageRetentionPeriod: 604800 # 7 days
+        MaximumMessageSize: 262144  # 256 KB
+        MessageRetentionPeriod: 604800  # 7 days
 
 functions:
   - ${file(./src/tasks/functions.yml)}
diff --git a/backend/src/api/functions.yml b/backend/src/api/functions.yml
index 32a73fed..751db85d 100644
--- a/backend/src/api/functions.yml
+++ b/backend/src/api/functions.yml
@@ -1,12 +1,13 @@
+---
 api:
   handler: src/api.handler
   events:
     - http:
-        path: / # this matches the base path
+        path: /  # this matches the base path
         method: ANY
         cors: true
     - http:
-        path: /{any+} # this matches any path, the token 'any' doesn't mean anything special
+        path: /{any+}  # this matches any path, the token 'any' doesn't mean anything special
         method: ANY
         cors: true
   # provisionedConcurrency: 1
diff --git a/backend/src/tasks/functions.yml b/backend/src/tasks/functions.yml
index a2344f5b..c821a00c 100644
--- a/backend/src/tasks/functions.yml
+++ b/backend/src/tasks/functions.yml
@@ -1,3 +1,4 @@
+---
 cloudwatchToS3:
   handler: src/tasks/cloudwatchToS3.handler
   timeout: 900
@@ -33,10 +34,10 @@ checkUserExpiration:
   timeout: 300
   handler: src/tasks/checkUserExpiration.handler
   events:
-    - schedule: cron(0 0 * * ? *) # Runs every day at midnight
+    - schedule: cron(0 0 * * ? *)  # Runs every day at midnight
 scanExecution:
   handler: src/tasks/scanExecution.handler
-  timeout: 300 # 5 minutes
+  timeout: 300  # 5 minutes
   environment:
     SQS_QUEUE_NAME: ${self:provider.stage}-worker-control-queue
   events:
diff --git a/backend/worker/.safety-policy.yml b/backend/worker/.safety-policy.yml
index 9db97d6f..08666deb 100644
--- a/backend/worker/.safety-policy.yml
+++ b/backend/worker/.safety-policy.yml
@@ -1,14 +1,15 @@
+---
 # Safety Security and License Configuration file
 # We recommend checking this file into your source control in the root of your Python project
 # If this file is named .safety-policy.yml and is in the same directory where you run `safety check` it will be used by default.
 # Otherwise, you can use the flag `safety check --policy-file <path-to-this-file>` to specify a custom location and name for the file.
 # To validate and review your policy file, run the validate command: `safety validate policy_file --path <path-to-this-file>`
-security: # configuration for the `safety check` command
-  ignore-cvss-severity-below: 0 # A severity number between 0 and 10. Some helpful reference points: 9=ignore all vulnerabilities except CRITICAL severity. 7=ignore all vulnerabilities except CRITICAL & HIGH severity. 4=ignore all vulnerabilities except CRITICAL, HIGH & MEDIUM severity.
-  ignore-cvss-unknown-severity: False # True or False. We recommend you set this to False.
-  ignore-vulnerabilities: # Here you can list multiple specific vulnerabilities you want to ignore (optionally for a time period)
+security:  # configuration for the `safety check` command
+  ignore-cvss-severity-below: 0  # A severity number between 0 and 10. Some helpful reference points: 9=ignore all vulnerabilities except CRITICAL severity. 7=ignore all vulnerabilities except CRITICAL & HIGH severity. 4=ignore all vulnerabilities except CRITICAL, HIGH & MEDIUM severity.
+  ignore-cvss-unknown-severity: False  # True or False. We recommend you set this to False.
+  ignore-vulnerabilities:  # Here you can list multiple specific vulnerabilities you want to ignore (optionally for a time period)
     # We recommend making use of the optional `reason` and `expires` keys for each vulnerability that you ignore.
-    54672: # Vulnerability found in scrapy version >= 0.7
-      reason: No fix currently available # optional, for internal note purposes to communicate with your team. This reason will be reported in the Safety reports
-      expires: '2024-06-01' # We will revisit for a fix in 6 months.
-  continue-on-vulnerability-error: False # Suppress non-zero exit codes when vulnerabilities are found. Enable this in pipelines and CI/CD processes if you want to pass builds that have vulnerabilities. We recommend you set this to False.
+    54672:  # Vulnerability found in scrapy version >= 0.7
+      reason: No fix currently available  # optional, for internal note purposes to communicate with your team. This reason will be reported in the Safety reports
+      expires: '2024-06-01'  # We will revisit for a fix in 6 months.
+  continue-on-vulnerability-error: False  # Suppress non-zero exit codes when vulnerabilities are found. Enable this in pipelines and CI/CD processes if you want to pass builds that have vulnerabilities. We recommend you set this to False.
diff --git a/docker-compose.yml b/docker-compose.yml
index b64e0279..9851c415 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -135,8 +135,8 @@ services:
   rabbitmq:
     image: 'rabbitmq:3.8-management'
     ports:
-      - '5672:5672' # RabbitMQ default port
-      - '15672:15672' # RabbitMQ management plugin
+      - '5672:5672'  # RabbitMQ default port
+      - '15672:15672'  # RabbitMQ management plugin
     networks:
       - backend
     environment:
diff --git a/docs/src/documentation-pages/dev/pe.md b/docs/src/documentation-pages/dev/pe.md
index 5a5e13e3..05314358 100644
--- a/docs/src/documentation-pages/dev/pe.md
+++ b/docs/src/documentation-pages/dev/pe.md
@@ -69,7 +69,9 @@ Then enter pe database password.
 Follow [this guide](https://www.postgresqltutorial.com/psql-commands/) for psql command basics.
 
 ## Populate the database with pg dump file
+
 Locate the latest postgres dump file and run:
+
 ```bash
 pg_restore -U pe -d pe "[path to sql dump file]"
 ```
diff --git a/docs/src/documentation-pages/dev/quickstart.md b/docs/src/documentation-pages/dev/quickstart.md
index b2e05f5d..c6744d18 100644
--- a/docs/src/documentation-pages/dev/quickstart.md
+++ b/docs/src/documentation-pages/dev/quickstart.md
@@ -10,18 +10,25 @@ This quickstart describes the initial setup required to run an instance of Cross
 1. Mac Users - before starting the initial setup, ensure you have already completed the following: [development environment for mac-based computers](https://github.com/cisagov/development-guide/blob/develop/dev_envs/mac-env-setup.md).
 2. Install [Node.js](https://nodejs.org/en/download/) 18 and [Docker Compose](https://docs.docker.com/compose/install/).
 3. Copy root `dev.env.example` file to a `.env` file.
+
    ```bash
    cp dev.env.example .env
    ```
+
 4. Build the crossfeed-worker Docker image:
+
    ```bash
    cd backend && npm run build-worker
    ```
+
 5. Start the entire environment from the root directory:
-      ```bash
-      npm start
-      ```
+
+   ```bash
+   npm start
+   ```
+
 6. Generate the initial DB schema and populate it with sample data:
+
    ```bash
    cd backend
    # Generate schema
@@ -29,7 +36,9 @@ This quickstart describes the initial setup required to run an instance of Cross
    # Populate sample data
    npm run syncdb -- -d populate
    ```
+
    If you ever need to drop and recreate the database, you can run `npm run syncdb -- -d dangerouslyforce`.
+
 7. Navigate to [http://localhost](http://localhost) in a browser. The first time please navigate to [http://localhost/signup](http://localhost/signup) to create account. Local accounts can be set to Global Admin to aide in development.
 8. Hot reloading for source files is enabled, but after changes to non-source code files stopping and starting Docker Compose is required. The following are examples of changes that will require restarting the environment:
    - Frontend or backend dependency changes
@@ -41,22 +50,29 @@ This quickstart describes the initial setup required to run an instance of Cross
 
 1. Make sure to complete the Initial Setup above
 2. Fill dev.env.example with necessary credentials and rerun:
+
    ```bash
       cp dev.env.example .env
    ```
+
 3. Generate the P&E DB Schema
+
    ```bash
    cd backend
    npm run pesyncdb
    ```
+
 4. Start the RabbitMQ listener. This will listen for any messages sent to the queue and
    trigger the scanExecution.ts function. This will stay running with this message: "Waiting for messages from ControlQueue..."
+
    ```bash
    cd backend
    npm run control-queue
    ```
+
 5. Run sendMessage.js to send a sample message to the queue. Feel free to edit this file
    while testing.
+
    ```bash
    cd backend
    node sendMessage.js
@@ -65,6 +81,7 @@ This quickstart describes the initial setup required to run an instance of Cross
 ### Running tests
 
 To run tests, first make sure you have already started Crossfeed with `npm start` (or, at bare minimum, that the database container is running). Then run:
+
 ```bash
 cd backend
 npm test
@@ -73,6 +90,7 @@ npm test
 If snapshot tests fail, update snapshots by running `npm test -- -u`.
 
 To run tests for the subset of worker code that is written in Python, you need to run:
+
 ```bash
 pip install -r worker/requirements.txt
 pytest
@@ -83,6 +101,7 @@ To view a code coverage report (a minimum code coverage threshold is checked in
 ### Monitoring Docker containers
 
 To see which Docker containers are running, you can run:
+
 ```bash{outputLines: 2-10}
 docker ps
 CONTAINER ID        IMAGE                                                 COMMAND                  CREATED             STATUS              PORTS                                            NAMES
@@ -97,6 +116,7 @@ c3ed457a71d2        postgres:latest                                       "docke
 ```
 
 You can then check the logs of a particular container by specifying a container's name with the `docker logs` command. For example:
+
 ```bash
 docker logs crossfeed_backend_1 --follow
 ```
@@ -105,6 +125,7 @@ docker logs crossfeed_backend_1 --follow
 
 To see more information about the design and development of each component of Crossfeed,
 see the following links:
+
 - [Frontend](frontend.md) for the React frontend.
 - [REST API](rest-api.md) for the REST API.
 - [Database](database.md) for the database models stored in Postgres.
@@ -121,6 +142,7 @@ The docs are based on the [federalist-uswds-gatsby](https://github.com/18F/feder
 ### Common Issues
 
 - Node Error issue occurs due to "npm install"
+
 ```bash
     npm ERR! code EBADENGINE
 	npm ERR! engine Unsupported engine
@@ -137,6 +159,7 @@ In this case install nvm for nodes 16.0.0 to 17.0.0.
 for example `nvm install 16.19.0` then check it by `node -- version` and `npm -- version`
 
 - Sometimes you may get an error in package-lock.json. This error is due to the package downloading the docker build. Remove the package-lock.json file and reinstall it using `npm install`.
+
 ```bash
    rm package-lock.json
    npm install
@@ -145,6 +168,7 @@ for example `nvm install 16.19.0` then check it by `node -- version` and `npm --
 If successful then continue to step 3.
 
 - Permission Issue / Permissions not permitted / Operation not permitted / Module build Failed
+
 ```bash
    Failed to compile.
 	crossfeed-frontend-1  |
diff --git a/frontend/serverless.yml b/frontend/serverless.yml
index 5e1b2590..2a266e71 100644
--- a/frontend/serverless.yml
+++ b/frontend/serverless.yml
@@ -42,10 +42,10 @@ functions:
     handler: scripts/api.handler
     events:
       - http:
-          path: / # this matches the base path
+          path: /  # this matches the base path
           method: GET
       - http:
-          path: /{any+} # this matches any path, the token 'any' doesn't mean anything special
+          path: /{any+}  # this matches any path, the token 'any' doesn't mean anything special
           method: GET
     # provisionedConcurrency: 1
   docs: