diff --git a/.DS_Store b/.DS_Store deleted file mode 100644 index 90cb935..0000000 Binary files a/.DS_Store and /dev/null differ diff --git a/.ansible-lint b/.ansible-lint new file mode 100755 index 0000000..057c65e --- /dev/null +++ b/.ansible-lint @@ -0,0 +1,24 @@ +--- + +parseable: true +quiet: true +skip_list: + - 'schema' + - 'no-changed-when' + - 'var-spacing' + - 'fqcn-builtins' + - 'experimental' + - 'name[play]' + - 'name[casing]' + - 'name[template]' + - 'fqcn[action]' + - 'key-order[task]' + - '204' + - '305' + - '303' + - '403' + - '306' + - '602' + - '208' +use_default_rules: true +verbosity: 0 diff --git a/.config/.gitleaks-report.json b/.config/.gitleaks-report.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/.config/.gitleaks-report.json @@ -0,0 +1 @@ +[] diff --git a/.config/.secrets.baseline b/.config/.secrets.baseline new file mode 100644 index 0000000..dbeb8bc --- /dev/null +++ b/.config/.secrets.baseline @@ -0,0 +1,119 @@ +{ + "version": "1.4.0", + "plugins_used": [ + { + "name": "ArtifactoryDetector" + }, + { + "name": "AWSKeyDetector" + }, + { + "name": "AzureStorageKeyDetector" + }, + { + "name": "Base64HighEntropyString", + "limit": 4.5 + }, + { + "name": "BasicAuthDetector" + }, + { + "name": "CloudantDetector" + }, + { + "name": "DiscordBotTokenDetector" + }, + { + "name": "GitHubTokenDetector" + }, + { + "name": "HexHighEntropyString", + "limit": 3.0 + }, + { + "name": "IbmCloudIamDetector" + }, + { + "name": "IbmCosHmacDetector" + }, + { + "name": "JwtTokenDetector" + }, + { + "name": "KeywordDetector", + "keyword_exclude": "" + }, + { + "name": "MailchimpDetector" + }, + { + "name": "NpmDetector" + }, + { + "name": "PrivateKeyDetector" + }, + { + "name": "SendGridDetector" + }, + { + "name": "SlackDetector" + }, + { + "name": "SoftlayerDetector" + }, + { + "name": "SquareOAuthDetector" + }, + { + "name": "StripeDetector" + }, + { + "name": "TwilioKeyDetector" + } + ], + "filters_used": [ + { + "path": "detect_secrets.filters.allowlist.is_line_allowlisted" + }, + { + "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", + "min_level": 2 + }, + { + "path": "detect_secrets.filters.heuristic.is_indirect_reference" + }, + { + "path": "detect_secrets.filters.heuristic.is_likely_id_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_lock_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_not_alphanumeric_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_potential_uuid" + }, + { + "path": "detect_secrets.filters.heuristic.is_prefixed_with_dollar_sign" + }, + { + "path": "detect_secrets.filters.heuristic.is_sequential_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_swagger_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_templated_secret" + }, + { + "path": "detect_secrets.filters.regex.should_exclude_file", + "pattern": [ + ".config/.gitleaks-report.json", + "tasks/parse_etc_password.yml" + ] + } + ], + "results": {}, + "generated_at": "2023-09-20T15:36:36Z" +} diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..b2daffb --- /dev/null +++ b/.gitattributes @@ -0,0 +1,6 @@ +# adding github settings to show correct language +*.sh linguist-detectable=true +*.yml linguist-detectable=true +*.ps1 linguist-detectable=true +*.j2 linguist-detectable=true +*.md linguist-documentation diff --git a/.github/.DS_Store b/.github/.DS_Store deleted file mode 100644 index 2d0bb70..0000000 Binary files a/.github/.DS_Store and /dev/null differ diff --git a/.github/workflows/communitytodevel.yml b/.github/workflows/communitytodevel.yml deleted file mode 100644 index fa5b18d..0000000 --- a/.github/workflows/communitytodevel.yml +++ /dev/null @@ -1,38 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: CommunityToDevel - -# Controls when the action will run. Triggers the workflow on push or pull request -# events but only for the devel branch -on: - pull_request: - branches: [ devel ] - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: ubuntu-latest - - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v2 - - # Refactr pipeline for devel pull request/merge - - name: Refactr - Run Pipeline (to devel) - # You may pin to the exact commit or the version. - # uses: refactr/action-run-pipeline@be91e2796aa225268e4685c0e01a26d5f800cd53 - uses: refactr/action-run-pipeline@v0.1.2 - with: - # API token - api_token: '${{ secrets.REFACTR_KEY }}' - # Project ID - project_id: 5f47f0c4a13c7b18373e5556 - # Job ID - job_id: 5f933cbcf9c74e86b1609c00 - # Variables - variables: '{ "gitrepo": "https://github.com/ansible-lockdown/UBUNTU18-CIS.git", "image": "ami-0608f6bd6e0eec7cc", "githubBranch": "${{ github.head_ref }}", "username": "ubuntu" }' - # Refactr API base URL - api_url: # optional diff --git a/.github/workflows/devel_pipeline_validation.yml b/.github/workflows/devel_pipeline_validation.yml new file mode 100644 index 0000000..dba39dc --- /dev/null +++ b/.github/workflows/devel_pipeline_validation.yml @@ -0,0 +1,138 @@ +--- + + name: Devel pipeline + + on: # yamllint disable-line rule:truthy + pull_request_target: + types: [opened, reopened, synchronize] + branches: + - devel + paths: + - '**.yml' + - '**.sh' + - '**.j2' + - '**.ps1' + - '**.cfg' + + # A workflow run is made up of one or more jobs + # that can run sequentially or in parallel + jobs: + # This will create messages for first time contributers and direct them to the Discord server + welcome: + runs-on: ubuntu-latest + + steps: + - uses: actions/first-interaction@main + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + pr-message: |- + Congrats on opening your first pull request and thank you for taking the time to help improve Ansible-Lockdown! + Please join in the conversation happening on the [Discord Server](https://www.lockdownenterprise.com/discord) as well. + + # This workflow contains a single job which tests the playbook + playbook-test: + # The type of runner that the job will run on + runs-on: ubuntu-latest + env: + ENABLE_DEBUG: ${{ vars.ENABLE_DEBUG }} + # Imported as a variable by terraform + TF_VAR_repository: ${{ github.event.repository.name }} + defaults: + run: + shell: bash + working-directory: .github/workflows/github_linux_IaC + + steps: + - name: Clone ${{ github.event.repository.name }} + uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha }} + + # Pull in terraform code for linux servers + - name: Clone github IaC plan + uses: actions/checkout@v3 + with: + repository: ansible-lockdown/github_linux_IaC + path: .github/workflows/github_linux_IaC + + - name: Add_ssh_key + working-directory: .github/workflows + env: + SSH_AUTH_SOCK: /tmp/ssh_agent.sock + PRIVATE_KEY: "${{ secrets.SSH_PRV_KEY }}" + run: | + mkdir .ssh + chmod 700 .ssh + echo $PRIVATE_KEY > .ssh/github_actions.pem + chmod 600 .ssh/github_actions.pem + + - name: DEBUG - Show IaC files + if: env.ENABLE_DEBUG == 'true' + run: | + echo "OSVAR = $OSVAR" + echo "benchmark_type = $benchmark_type" + pwd + ls + env: + # Imported from github variables this is used to load the relvent OS.tfvars file + OSVAR: ${{ vars.OSVAR }} + benchmark_type: ${{ vars.BENCHMARK_TYPE }} + + - name: Terraform_Init + id: init + run: terraform init + env: + # Imported from github variables this is used to load the relvent OS.tfvars file + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + + - name: Terraform_Validate + id: validate + run: terraform validate + env: + # Imported from github variables this is used to load the relvent OS.tfvars file + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + + - name: Terraform_Apply + id: apply + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + run: terraform apply -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false + + ## Debug Section + - name: DEBUG - Show Ansible hostfile + if: env.ENABLE_DEBUG == 'true' + run: cat hosts.yml + + # Aws deployments taking a while to come up insert sleep or playbook fails + + - name: Sleep for 60 seconds + run: sleep 60s + + # Run the ansible playbook + - name: Run_Ansible_Playbook + uses: arillso/action.playbook@master + with: + playbook: site.yml + inventory: .github/workflows/github_linux_IaC/hosts.yml + galaxy_file: collections/requirements.yml + private_key: ${{ secrets.SSH_PRV_KEY }} + # verbose: 3 + env: + ANSIBLE_HOST_KEY_CHECKING: "false" + ANSIBLE_DEPRECATION_WARNINGS: "false" + + # Remove test system - User secrets to keep if necessary + + - name: Terraform_Destroy + if: always() && env.ENABLE_DEBUG == 'false' + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + run: terraform destroy -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false diff --git a/.github/workflows/develtomaster.yml b/.github/workflows/develtomaster.yml deleted file mode 100644 index d01ba6a..0000000 --- a/.github/workflows/develtomaster.yml +++ /dev/null @@ -1,38 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: DevelToMaster - -# Controls when the action will run. Triggers the workflow on push or pull request -# events but only for the devel branch -on: - pull_request: - branches: [ main ] - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: ubuntu-latest - - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v2 - - # Refactr pipeline for devel pull request/merge - - name: Refactr - Run Pipeline (to master) - # You may pin to the exact commit or the version. - # uses: refactr/action-run-pipeline@be91e2796aa225268e4685c0e01a26d5f800cd53 - uses: refactr/action-run-pipeline@v0.1.2 - with: - # API token - api_token: '${{ secrets.REFACTR_KEY }}' - # Project ID - project_id: 5f47f0c4a13c7b18373e5556 - # Job ID - job_id: 5f90ad90f9c74e6d1e606e33 - # Variables - variables: '{ "gitrepo": "https://github.com/ansible-lockdown/UBUNTU18-CIS.git", "image": "ami-0608f6bd6e0eec7cc", "githubBranch": "${{ github.head_ref }}", "username": "ubuntu" }' - # Refactr API base URL - api_url: # optional diff --git a/.github/workflows/main_pipeline_validation.yml b/.github/workflows/main_pipeline_validation.yml new file mode 100644 index 0000000..0b149fb --- /dev/null +++ b/.github/workflows/main_pipeline_validation.yml @@ -0,0 +1,127 @@ +--- + + name: Main pipeline + + on: # yamllint disable-line rule:truthy + pull_request_target: + types: [opened, reopened, synchronize] + branches: + - main + paths: + - '**.yml' + - '**.sh' + - '**.j2' + - '**.ps1' + - '**.cfg' + + # A workflow run is made up of one or more jobs + # that can run sequentially or in parallel + jobs: + + # This workflow contains a single job which tests the playbook + playbook-test: + # The type of runner that the job will run on + runs-on: ubuntu-latest + env: + ENABLE_DEBUG: ${{ vars.ENABLE_DEBUG }} + # Imported as a variable by terraform + TF_VAR_repository: ${{ github.event.repository.name }} + defaults: + run: + shell: bash + working-directory: .github/workflows/github_linux_IaC + + steps: + - name: Clone ${{ github.event.repository.name }} + uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha }} + + # Pull in terraform code for linux servers + - name: Clone github IaC plan + uses: actions/checkout@v3 + with: + repository: ansible-lockdown/github_linux_IaC + path: .github/workflows/github_linux_IaC + + - name: Add_ssh_key + working-directory: .github/workflows + env: + SSH_AUTH_SOCK: /tmp/ssh_agent.sock + PRIVATE_KEY: "${{ secrets.SSH_PRV_KEY }}" + run: | + mkdir .ssh + chmod 700 .ssh + echo $PRIVATE_KEY > .ssh/github_actions.pem + chmod 600 .ssh/github_actions.pem + + - name: DEBUG - Show IaC files + if: env.ENABLE_DEBUG == 'true' + run: | + echo "OSVAR = $OSVAR" + echo "benchmark_type = $benchmark_type" + pwd + ls + env: + # Imported from github variables this is used to load the relvent OS.tfvars file + OSVAR: ${{ vars.OSVAR }} + benchmark_type: ${{ vars.BENCHMARK_TYPE }} + + - name: Terraform_Init + id: init + run: terraform init + env: + # Imported from github variables this is used to load the relvent OS.tfvars file + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + + - name: Terraform_Validate + id: validate + run: terraform validate + env: + # Imported from github variables this is used to load the relvent OS.tfvars file + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + + - name: Terraform_Apply + id: apply + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + run: terraform apply -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false + + ## Debug Section + - name: DEBUG - Show Ansible hostfile + if: env.ENABLE_DEBUG == 'true' + run: cat hosts.yml + + # Aws deployments taking a while to come up insert sleep or playbook fails + + - name: Sleep for 60 seconds + run: sleep 60s + + # Run the ansible playbook + - name: Run_Ansible_Playbook + uses: arillso/action.playbook@master + with: + playbook: site.yml + inventory: .github/workflows/github_linux_IaC/hosts.yml + galaxy_file: collections/requirements.yml + private_key: ${{ secrets.SSH_PRV_KEY }} + # verbose: 3 + env: + ANSIBLE_HOST_KEY_CHECKING: "false" + ANSIBLE_DEPRECATION_WARNINGS: "false" + + # Remove test system - User secrets to keep if necessary + + - name: Terraform_Destroy + if: always() && env.ENABLE_DEBUG == 'false' + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + run: terraform destroy -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false diff --git a/.github/workflows/update_galaxy.yml b/.github/workflows/update_galaxy.yml new file mode 100644 index 0000000..951a53c --- /dev/null +++ b/.github/workflows/update_galaxy.yml @@ -0,0 +1,21 @@ +--- + +# This is a basic workflow to help you get started with Actions + +name: update galaxy + +# Controls when the action will run. +# Triggers the workflow on merge request events to the main branch +on: + push: + branches: + - main +jobs: + update_role: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: robertdebock/galaxy-action@master + with: + galaxy_api_key: ${{ secrets.GALAXY_API_KEY }} + git_branch: main diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7f0e241 --- /dev/null +++ b/.gitignore @@ -0,0 +1,44 @@ +.env +*.log +*.retry +.vagrant +tests/*redhat-subscription +tests/Dockerfile +*.iso +*.box +packer_cache +delete* +ignore* +# VSCode +.vscode + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# DS_Store +.DS_Store +._* + +# Linux Editors +*~ +\#*\# +/.emacs.desktop +/.emacs.desktop.lock +.elc +auto-save-list +tramp +.\#* +*.swp +*.swo +rh-creds.env +travis.env + +# Lockdown-specific +benchparse/ +*xccdf.xml +*.retry + +# GitHub Action/Workflow files +.github/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..97c7943 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,67 @@ +--- +##### CI for use by github no need for action to be added +##### Inherited +ci: + autofix_prs: false + skip: [detect-aws-credentials, ansible-lint ] + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.2.0 + hooks: + # Safety + - id: detect-aws-credentials + - id: detect-private-key + + # git checks + - id: check-merge-conflict + - id: check-added-large-files + - id: check-case-conflict + + # General checks + - id: trailing-whitespace + name: Trim Trailing Whitespace + description: This hook trims trailing whitespace. + entry: trailing-whitespace-fixer + language: python + types: [text] + args: [--markdown-linebreak-ext=md] + - id: end-of-file-fixer + +# Scan for passwords +- repo: https://github.com/Yelp/detect-secrets + rev: v1.4.0 + hooks: + - id: detect-secrets + args: [ '--baseline', '.config/.secrets.baseline' ] + exclude: .config/.gitleaks-report.json + +- repo: https://github.com/gitleaks/gitleaks + rev: v8.17.0 + hooks: + - id: gitleaks + args: ['--baseline-path', '.config/.gitleaks-report.json'] + +- repo: https://github.com/ansible-community/ansible-lint + rev: v6.17.2 + hooks: + - id: ansible-lint + name: Ansible-lint + description: This hook runs ansible-lint. + entry: python3 -m ansiblelint --force-color site.yml -c .ansible-lint + language: python + # do not pass files to ansible-lint, see: + # https://github.com/ansible/ansible-lint/issues/611 + pass_filenames: false + always_run: true + additional_dependencies: + # https://github.com/pre-commit/pre-commit/issues/1526 + # If you want to use specific version of ansible-core or ansible, feel + # free to override `additional_dependencies` in your own hook config + # file. + - ansible-core>=2.10.1 + +- repo: https://github.com/adrienverge/yamllint.git + rev: v1.32.0 # or higher tag + hooks: + - id: yamllint diff --git a/.travis.yml b/.travis.yml index 36bbf62..246299f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,23 +7,23 @@ sudo: false # Install ansible addons: - apt: - packages: - - python-pip + apt: + packages: + - python-pip install: - # Install ansible - - pip install ansible + # Install ansible + - pip install ansible - # Check ansible version - - ansible --version + # Check ansible version + - ansible --version - # Create ansible.cfg with correct roles_path - - printf '[defaults]\nroles_path=../' >ansible.cfg + # Create ansible.cfg with correct roles_path + - printf '[defaults]\nroles_path=../' >ansible.cfg script: - # Basic role syntax check - - ansible-playbook tests/test.yml -i tests/inventory --syntax-check + # Basic role syntax check + - ansible-playbook tests/test.yml -i tests/inventory --syntax-check notifications: - webhooks: https://galaxy.ansible.com/api/v1/notifications/ \ No newline at end of file + webhooks: https://galaxy.ansible.com/api/v1/notifications/ diff --git a/.yamllint b/.yamllint new file mode 100755 index 0000000..ec46929 --- /dev/null +++ b/.yamllint @@ -0,0 +1,33 @@ +--- +extends: default + +ignore: | + tests/ + molecule/ + .github/ + .gitlab-ci.yml + *molecule.yml + +rules: + indentation: + # Requiring 4 space indentation + spaces: 4 + # Requiring consistent indentation within a file, either indented or not + indent-sequences: consistent + braces: + max-spaces-inside: 1 + level: error + brackets: + max-spaces-inside: 1 + level: error + empty-lines: + max: 1 + line-length: disable + key-duplicates: enable + new-line-at-end-of-file: enable + new-lines: + type: unix + trailing-spaces: enable + truthy: + allowed-values: ['true', 'false'] + check-keys: false diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 0000000..9030d8a --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,69 @@ +Contributing to MindPoint Group Projects +======================================== + +Rules +----- +1) All commits must be GPG signed (details in Signing section) +2) All commits must have Signed-off-by (Signed-off-by: Joan Doe ) in the commit message (details in Signing section) +3) All work is done in your own branch or own fork +4) Pull requests + a) From within the repo: All pull requests go into the devel branch. There are automated checks for signed commits, signoff in commit message, and functional testing + b) From a forked repo: All pull requests will go into a staging branch within the repo. There are automated checks for signed commits, signoff in commit message, and functional testing when going from staging to devel +5) Be open and nice to each other + +Workflow +-------- +- Your work is done in your own individual branch. Make sure to to Signed-off and GPG sign all commits you intend to merge +- All community Pull Requests are into the devel branch (from forked repos they go to staging before devel). There are automated checks for GPG signed, Signed-off in commits, and functional tests before being approved. If your pull request comes in from outside of our repo, the pull request will go into a staging branch. There is info needed from our repo for our CI/CD testing. +- Once your changes are merged and a more detailed review is complete, an authorized member will merge your changes into the main branch for a new release + +Signing your contribution +------------------------- + +We've chosen to use the Developer's Certificate of Origin (DCO) method +that is employed by the Linux Kernel Project, which provides a simple +way to contribute to MindPoint Group projects. + +The process is to certify the below DCO 1.1 text +:: + + Developer's Certificate of Origin 1.1 + + By making a contribution to this project, I certify that: + + (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + + (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + + (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + + (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. +:: + +Then, when it comes time to submit a contribution, include the +following text in your contribution commit message: + +:: + + Signed-off-by: Joan Doe + +:: + + +This message can be entered manually, or if you have configured git +with the correct `user.name` and `user.email`, you can use the `-s` +option to `git commit` to automatically include the signoff message. diff --git a/ChangeLog.md b/ChangeLog.md new file mode 100644 index 0000000..314012c --- /dev/null +++ b/ChangeLog.md @@ -0,0 +1,39 @@ +# Changelog + +## 1.4.0 + +- new workflow +- linting +- readme updates +- wifi control update +- import_tasks - set file + +## 1.3.1 + +- issue 84 from ubuntu20 fixed vartmp +- fixed other mount points in 1.1.x.yml and options +- updated the audit process list check + +## 1.3 + +- Audit integration +- fqcn +- linting +- idempotent improvements +- warning method +- naming tidy up +- minimum Ansible version +- updated workflow to use audit benchmark version branch +- readme update + +## Release 1.2.1 + +- Updated Readme +- Added Changelog.md and updated +- Added Warning Count To Entire Role +- Updated Controls + - 1.4.2 + - 6.1.1 + - Updated True / False + - Fixed Linting + - Update Deprecated Module Names diff --git a/LICENSE b/LICENSE index afad05e..39810af 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2020 Ansible Lockdown +Copyright (c) 2023 Mindpoint Group / Lockdown Enterprise Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index ee120cc..e5d51a4 100644 --- a/README.md +++ b/README.md @@ -1,123 +1,191 @@ -# ubuntu-18 -Ubuntu 18 CIS -========= +# UBUNTU18 CIS -Configure Ubuntu 18 machine to be [CIS](https://www.cisecurity.org/cis-benchmarks/) compliant. There are some intrusive tasks that have a toggle in defaults main.yml to disable to automated fix +## Configure a Ubuntu18 machine to be [CIS](https://www.cisecurity.org/cis-benchmarks/) compliant -This role **will make changes to the system** that could break things. This is not an auditing tool but rather a remediation tool to be used after an audit has been conducted. +### Based on [ CIS Ubuntu1804 Benchmark v2.1.0 ](https://www.cisecurity.org/cis-benchmarks/) -Based on [CIS Ubuntu Linux 18.04 LTS Benchmark ](https://community.cisecurity.org/collab/public/index.php). +--- +![Org Stars](https://img.shields.io/github/stars/ansible-lockdown?label=Org%20Stars&style=social) +![Stars](https://img.shields.io/github/stars/ansible-lockdown/UBUNTU18-CIS?label=Repo%20Stars&style=social) +![Forks](https://img.shields.io/github/forks/ansible-lockdown/UBUNTU18-CIS?style=social) +![followers](https://img.shields.io/github/followers/ansible-lockdown?style=social) +[![Twitter URL](https://img.shields.io/twitter/url/https/twitter.com/AnsibleLockdown.svg?style=social&label=Follow%20%40AnsibleLockdown)](https://twitter.com/AnsibleLockdown) -Requirements ------------- +![Ansible Galaxy Quality](https://img.shields.io/ansible/quality/54776?label=Quality&&logo=ansible) +![Discord Badge](https://img.shields.io/discord/925818806838919229?logo=discord) -You should carefully read through the tasks to make sure these changes will not break your systems before running this playbook. -If you want to do a dry run without changing anything, set the below sections (rhel8cis_section1-6) to false. +![Release Branch](https://img.shields.io/badge/Release%20Branch-Main-brightgreen) +![Release Tag](https://img.shields.io/github/v/release/ansible-lockdown/UBUNTU18-CIS) +![Release Date](https://img.shields.io/github/release-date/ansible-lockdown/UBUNTU18-CIS) -Role Variables --------------- +[![Main Pipeline Status](https://github.com/ansible-lockdown/UBUNTU18-CIS/actions/workflows/main_pipeline_validation.yml/badge.svg?)](https://github.com/ansible-lockdown/UBUNTU18-CIS/actions/workflows/main_pipeline_validation.yml) -There are many role variables defined in defaults/main.yml. This list shows the most important. +[![Devel Pipeline Status](https://github.com/ansible-lockdown/UBUNTU18-CIS/actions/workflows/devel_pipeline_validation.yml/badge.svg?)](https://github.com/ansible-lockdown/UBUNTU18-CIS/actions/workflows/devel_pipeline_validation.yml) +![Devel Commits](https://img.shields.io/github/commit-activity/m/ansible-lockdown/UBUNTU18-CIS/devel?color=dark%20green&label=Devel%20Branch%20Commits) -**ubtu18cis_disruption_high**: Run CIS checks that we typically do NOT want to automate due to the high probability of breaking the system (Default: false) +![Issues Open](https://img.shields.io/github/issues-raw/ansible-lockdown/UBUNTU18-CIS?label=Open%20Issues) +![Issues Closed](https://img.shields.io/github/issues-closed-raw/ansible-lockdown/UBUNTU18-CIS?label=Closed%20Issues&&color=success) +![Pull Requests](https://img.shields.io/github/issues-pr/ansible-lockdown/UBUNTU18-CIS?label=Pull%20Requests) -**ubtu18cis_section1_patch**: CIS - Iniitial setup (Section 1) (Default: true) +![License](https://img.shields.io/github/license/ansible-lockdown/UBUNTU18-CIS?label=License) +--- -**ubtu18cis_section2_patch**: CIS - Services settings (Section 2) (Default: true) +## Looking for support? -**ubtu18cis_section3_patch**: CIS - Network Configuration (Section 3) (Default: true) +[Lockdown Enterprise](https://www.lockdownenterprise.com#GH_AL_UB18_cis) -**ubtu18cis_section4_patch**: CIS - Logging and Auditing settings (Section 4) (Default: true) +[Ansible support](https://www.mindpointgroup.com/cybersecurity-products/ansible-counselor#GH_AL_UB18_cis) -**ubtu18cis_section5_patch**: CIS - Access, Authentication and Authorization settings (Section 5) (Default: true) +### Community -**ubtu18cis_section6_patch**: CIS - System Maintenance settings (Section 6) (Default: true) +Join us on our [Discord Server](https://www.lockdownenterprise.com/discord) to ask questions, discuss features, or just chat with other Ansible-Lockdown users. +--- -##### Service variables: -###### These control whether a server should or should not be allowed to continue to run these services +## Caution(s) -``` -ubtu18cis_allow_autofs: false -ubtu18cis_allow_usb_storage: false -ubtu18cis_avahi_server: false -ubtu18cis_cups_server: false -ubtu18cis_dhcp_server: false -ubtu18cis_ldap_server: false -ubtu18cis_nfs_rpc_server: false -ubtu18cis_dns_server: false -ubtu18cis_vsftpd_server: false -ubtu18cis_httpd_server: false -ubtu18cis_dovecot_server: false -ubtu18cis_smb_server: false -ubtu18cis_squid_server: false -ubtu18cis_snmp_server: false -ubtu18cis_rsync_server: false -ubtu18cis_nis_server: false -``` +This role **will make changes to the system** which may have unintended consequences. This is not an auditing tool but rather a remediation tool to be used after an audit has been conducted. -##### Client variables: -###### These control whether a client should or should not be allowed to continue to run these services -``` -ubtu18cis_nis_required: false -ubtu18cis_rsh_required: false -ubtu18cis_talk_required: false -ubtu18cis_telnet_required: false -ubtu18cis_ldap_clients_required: false -ubtu18cis_is_router: false -``` +Check Mode is not supported! The role will complete in check mode without errors, but it is not supported and should be used with caution. The UBUNTU18-CIS-Audit role or a compliance scanner should be used for compliance checking over check mode. + +This role was developed against a clean install of the Operating System. If you are implementing to an existing system please review this role for any site specific changes that are needed. + +To use release version please point to main branch and relevant release for the cis benchmark you wish to work with. + +--- + +## Matching a security Level for CIS + +It is possible to to only run level 1 or level 2 controls for CIS. +This is managed using tags: -##### IPv6 required -`ubtu18cis_ipv6_required` +- level1_server +- level1_workstation +- level2_server +- level2_workstation -##### X-Windows required -`ubtu18cis_xwindows_required` +The control found in defaults main also need to reflect this as this control the testing thet takes place if you are using the audit component. -##### Is system a container -`ubtu18cis_system_is_container` +## Coming from a previous release -Dependencies ------------- +CIS release always contains changes, it is highly recommended to review the new references and available variables. This have changed significantly since ansible-lockdown initial release. +This is now compatible with python3 if it is found to be the default interpreter. This does come with pre-requisites which it configures the system accordingly. -Ansible > 2.6.5 +Further details can be seen in the [Changelog](./ChangeLog.md) -Example Playbook ----------------- +## Auditing (new) -This sample playbook should be run in a folder that is above the main UBUNTU-18-CIS folder. +This can be turned on or off within the defaults/main.yml file with the variable run_audit. The value is false by default, please refer to the wiki for more details. The defaults file also populates the goss checks to check only the controls that have been enabled in the ansible role. +This is a much quicker, very lightweight, checking (where possible) config compliance and live/running settings. + +A new form of auditing has been developed, by using a small (12MB) go binary called [goss](https://github.com/goss-org/goss) along with the relevant configurations to check. Without the need for infrastructure or other tooling. +This audit will not only check the config has the correct setting but aims to capture if it is running with that configuration also trying to remove [false positives](https://www.mindpointgroup.com/blog/is-compliance-scanning-still-relevant/) in the process. + +Refer to [UBUNTU18-CIS-Audit](https://github.com/ansible-lockdown/UBUNTU18-CIS-Audit). + +## Example Audit Summary + +This is based on a vagrant image with selections enabled. e.g. No Gui or firewall. +Note: More tests are run during audit as we check config and running state. + +```txt + +ok: [default] => { + "msg": [ + "The pre remediation results are: ['Total Duration: 5.454s', 'Count: 338, Failed: 47, Skipped: 5'].", + "The post remediation results are: ['Total Duration: 5.007s', 'Count: 338, Failed: 46, Skipped: 5'].", + "Full breakdown can be found in /var/tmp", + "" + ] +} + +PLAY RECAP ******************************************************************************************************************************************* +default : ok=270 changed=23 unreachable=0 failed=0 skipped=140 rescued=0 ignored=0 ``` -- hosts: all - become: true - vars: - is_container: false - roles: +## Documentation + +- [Read The Docs](https://ansible-lockdown.readthedocs.io/en/latest/) +- [Getting Started](https://www.lockdownenterprise.com/docs/getting-started-with-lockdown#GH_AL_UB18_cis) +- [Customizing Roles](https://www.lockdownenterprise.com/docs/customizing-lockdown-enterprise#GH_AL_UB18_cis) +- [Per-Host Configuration](https://www.lockdownenterprise.com/docs/per-host-lockdown-enterprise-configuration#GH_AL_UB18_cis) +- [Getting the Most Out of the Role](https://www.lockdownenterprise.com/docs/get-the-most-out-of-lockdown-enterprise#GH_AL_UB18_cis) - - role: "{{ playbook_dir }}" - ubtu18cis_system_is_container: "{{ is_container | default(false) }}" - ubtu18cis_skip_for_travis: false - ubtu18cis_oscap_scan: yes +## Requirements -License -------- +**General:** -BSD +- Basic knowledge of Ansible, below are some links to the Ansible documentation to help get started if you are unfamiliar with Ansible -Author Information ------------------- + - [Main Ansible documentation page](https://docs.ansible.com) + - [Ansible Getting Started](https://docs.ansible.com/ansible/latest/user_guide/intro_getting_started.html) + - [Tower User Guide](https://docs.ansible.com/ansible-tower/latest/html/userguide/index.html) + - [Ansible Community Info](https://docs.ansible.com/ansible/latest/community/index.html) +- Functioning Ansible and/or Tower Installed, configured, and running. This includes all of the base Ansible/Tower configurations, needed packages installed, and infrastructure setup. +- Please read through the tasks in this role to gain an understanding of what each control is doing. Some of the tasks are disruptive and can have unintended consiquences in a live production system. Also familiarize yourself with the variables in the defaults/main.yml file. -An optional section for the role authors to include contact information, or a website (HTML is not allowed). +**Technical Dependencies:** + +- Access to download or add the goss binary and content to the system if using auditing +(other options are available on how to get the content to the system.) +- Python3 +- Ansible 2.9+ +- python-def +- libselinux-python +- jmespath + +## Role Variables + +This role is designed that the end user should not have to edit the tasks themselves. All customizing should be done via the defaults/main.yml file or with extra vars within the project, job, workflow, etc. + +## Tags + +There are many tags available for added control precision. Each control has it's own set of tags noting what level, if it's scored/notscored, what OS element it relates to, if it's a patch or audit, and the rule number. + +Below is an example of the tag section from a control within this role. Using this example if you set your run to skip all controls with the tag services, this task will be skipped. The opposite can also happen where you run only controls tagged with services. + +```sh + tags: + - level1-server + - level1-workstation + - scored + - avahi + - services + - patch + - rule_2.2.4 ``` -Tags ----- -Many tags are available for precise control of what is and is not changed. +## Community Contribution + +We encourage you (the community) to contribute to this role. Please read the rules below. + +- Your work is done in your own individual branch. Make sure to Signed-off and GPG sign all commits you intend to merge. +- All community Pull Requests are pulled into the devel branch +- Pull Requests into devel will confirm your commits have a GPG signature, Signed-off, and a functional test before being approved +- Once your changes are merged and a more detailed review is complete, an authorized member will merge your changes into the main branch for a new release + +## Known Issues + +cloud0init - due to a bug this will stop working if noexec is added to /var. +ubtu18cis_rule_1_1_3_3 + +[bug 1839899](https://bugs.launchpad.net/cloud-init/+bug/1839899) + +## Pipeline Testing + +uses: + +- ansible-core 2.12 +- ansible collections - pulls in the latest version based on requirements file +- runs the audit using the devel branch +- This is an automated test that occurs on pull requests into devel + +## Added Extras -Some examples of using tags: +- [pre-commit](https://pre-commit.com) can be tested and can be run from within the directory +```sh +pre-commit run ``` - # Audit and patch the site - ansible-playbook site.yml --tags="patch" -``` \ No newline at end of file diff --git a/collections/requirements.yml b/collections/requirements.yml new file mode 100644 index 0000000..23596ec --- /dev/null +++ b/collections/requirements.yml @@ -0,0 +1,8 @@ +--- + +collections: + - name: community.general + + - name: community.crypto + + - name: ansible.posix diff --git a/defaults/main.yml b/defaults/main.yml index 89a810a..61e0ece 100644 --- a/defaults/main.yml +++ b/defaults/main.yml @@ -1,9 +1,4 @@ --- -# If you would like a report at the end accordin to OpenSCAP as to the report results -# then you should set ubtu18cis_oscap_scan to true/yes. -# NOTE: This requires the python_xmltojson package on the control host. -ubtu18cis_oscap_scan: false -ubtu18cis_report_dir: /tmp ubtu18cis_section1_patch: true ubtu18cis_section2_patch: true @@ -12,6 +7,37 @@ ubtu18cis_section4_patch: true ubtu18cis_section5_patch: true ubtu18cis_section6_patch: true +# System will reboot if false, can give better audit results +ubtu18_skip_reboot: true + +## Benchmark name used by auditing control role +# The audit variable found at the base +benchmark: UBUNTU18-CIS +benchmark_version: '2.1.0' + +### Audit Binary is required on the remote host +### Goss is required on the remote host +setup_audit: false +# How to retrive goss +# Options are copy or download - detailed settings at the bottom of this file +# you will need to access to either github or the file already dowmloaded +get_goss_file: download + +# how to get audit files onto host options +# options are git/copy/get_url - use local if already available to to the host (adjust paths accordingly) +audit_content: git + +# enable audits to run - this runs the audit and get the latest content +run_audit: false + +# Timeout for those cmds that take longer to run where timeout set +audit_cmd_timeout: 60000 + +# Run heavy tests - some tests can have more impact on a system enabling these can have greater impact on a system +audit_run_heavy_tests: true + +### End Audit enablements #### + # We've defined complexity-high to mean that we cannot automatically remediate # the rule in question. In the future this might mean that the remediation # may fail in some cases. @@ -47,16 +73,14 @@ ubtu18cis_system_is_container: false system_is_ec2: false # Section 1 Fixes -# Section 1 is Iniitial setup (FileSystem Configuration, Configure Software Updates, Configure sudo, Filesystem Integrity Checking, Secure Boot Settings, -# Additional Process Hardening, Mandatory Access Control, and Warning Banners) +# Section 1 is Initial setup (FileSystem Configuration, Configure Software Updates, Filesystem Integrity Checking, Secure Boot Settings, +# Additional Process Hardening, Mandatory Access Control, Warning Banners, and GNOME Display Manager) ubtu18cis_rule_1_1_1_1: true ubtu18cis_rule_1_1_1_2: true ubtu18cis_rule_1_1_1_3: true ubtu18cis_rule_1_1_1_4: true ubtu18cis_rule_1_1_1_5: true ubtu18cis_rule_1_1_1_6: true -ubtu18cis_rule_1_1_1_7: true -ubtu18cis_rule_1_1_1_8: true ubtu18cis_rule_1_1_1: true ubtu18cis_rule_1_1_2: true ubtu18cis_rule_1_1_3: true @@ -80,113 +104,115 @@ ubtu18cis_rule_1_1_20: true ubtu18cis_rule_1_1_21: true ubtu18cis_rule_1_1_22: true ubtu18cis_rule_1_1_23: true +ubtu18cis_rule_1_1_24: true ubtu18cis_rule_1_2_1: true ubtu18cis_rule_1_2_2: true ubtu18cis_rule_1_3_1: true ubtu18cis_rule_1_3_2: true -ubtu18cis_rule_1_3_3: true ubtu18cis_rule_1_4_1: true ubtu18cis_rule_1_4_2: true +ubtu18cis_rule_1_4_3: true +ubtu18cis_rule_1_4_4: true ubtu18cis_rule_1_5_1: true ubtu18cis_rule_1_5_2: true ubtu18cis_rule_1_5_3: true ubtu18cis_rule_1_5_4: true -ubtu18cis_rule_1_6_1: true -ubtu18cis_rule_1_6_2: true -ubtu18cis_rule_1_6_3: true -ubtu18cis_rule_1_6_4: true -ubtu18cis_rule_1_7_1_1: true -ubtu18cis_rule_1_7_1_2: true -ubtu18cis_rule_1_7_1_3: true -ubtu18cis_rule_1_7_1_4: true -ubtu18cis_rule_1_8_1_1: true -ubtu18cis_rule_1_8_1_2: true -ubtu18cis_rule_1_8_1_3: true -ubtu18cis_rule_1_8_1_4: true -ubtu18cis_rule_1_8_1_5: true -ubtu18cis_rule_1_8_1_6: true +ubtu18cis_rule_1_6_1_1: true +ubtu18cis_rule_1_6_1_2: true +ubtu18cis_rule_1_6_1_3: true +ubtu18cis_rule_1_6_1_4: true +ubtu18cis_rule_1_7_1: true +ubtu18cis_rule_1_7_2: true +ubtu18cis_rule_1_7_3: true +ubtu18cis_rule_1_7_4: true +ubtu18cis_rule_1_7_5: true +ubtu18cis_rule_1_7_6: true +ubtu18cis_rule_1_8_1: true ubtu18cis_rule_1_8_2: true +ubtu18cis_rule_1_8_3: true +ubtu18cis_rule_1_8_4: true ubtu18cis_rule_1_9: true # Section 2 Fixes -# Section 2 is Services (inetd, special purpose, and service clients) -ubtu18cis_rule_2_1_1: true +# Section 2 is Services (Special Purpose, and Service Clients) +ubtu18cis_rule_2_1_1_1: true +ubtu18cis_rule_2_1_1_2: true +ubtu18cis_rule_2_1_1_3: true +ubtu18cis_rule_2_1_1_4: true ubtu18cis_rule_2_1_2: true -ubtu18cis_rule_2_2_1_1: true -ubtu18cis_rule_2_2_1_2: true -ubtu18cis_rule_2_2_1_3: true -ubtu18cis_rule_2_2_1_4: true +ubtu18cis_rule_2_1_3: true +ubtu18cis_rule_2_1_4: true +ubtu18cis_rule_2_1_5: true +ubtu18cis_rule_2_1_6: true +ubtu18cis_rule_2_1_7: true +ubtu18cis_rule_2_1_8: true +ubtu18cis_rule_2_1_9: true +ubtu18cis_rule_2_1_10: true +ubtu18cis_rule_2_1_11: true +ubtu18cis_rule_2_1_12: true +ubtu18cis_rule_2_1_13: true +ubtu18cis_rule_2_1_14: true +ubtu18cis_rule_2_1_15: true +ubtu18cis_rule_2_1_16: true +ubtu18cis_rule_2_1_17: true +ubtu18cis_rule_2_2_1: true ubtu18cis_rule_2_2_2: true ubtu18cis_rule_2_2_3: true ubtu18cis_rule_2_2_4: true ubtu18cis_rule_2_2_5: true ubtu18cis_rule_2_2_6: true -ubtu18cis_rule_2_2_7: true -ubtu18cis_rule_2_2_8: true -ubtu18cis_rule_2_2_9: true -ubtu18cis_rule_2_2_10: true -ubtu18cis_rule_2_2_11: true -ubtu18cis_rule_2_2_12: true -ubtu18cis_rule_2_2_13: true -ubtu18cis_rule_2_2_14: true -ubtu18cis_rule_2_2_15: true -ubtu18cis_rule_2_2_16: true -ubtu18cis_rule_2_2_17: true -ubtu18cis_rule_2_3_1: true -ubtu18cis_rule_2_3_2: true -ubtu18cis_rule_2_3_3: true -ubtu18cis_rule_2_3_4: true -ubtu18cis_rule_2_3_5: true +ubtu18cis_rule_2_3: true # Section 3 Fixes -# Section 3 is Network Configuration (network parameters(host), network parameters (host and router), tcp rappers, uncommon network protocols, and firewall configuration) +# Section 3 is Network Configuration (Disable unused network protocols and devices, Network Parameters(host), Network Parameters (host and router), Uncommon Network Protocols, and Firewall Configuration) ubtu18cis_rule_3_1_1: true ubtu18cis_rule_3_1_2: true ubtu18cis_rule_3_2_1: true ubtu18cis_rule_3_2_2: true -ubtu18cis_rule_3_2_3: true -ubtu18cis_rule_3_2_4: true -ubtu18cis_rule_3_2_5: true -ubtu18cis_rule_3_2_6: true -ubtu18cis_rule_3_2_7: true -ubtu18cis_rule_3_2_8: true -ubtu18cis_rule_3_2_9: true ubtu18cis_rule_3_3_1: true ubtu18cis_rule_3_3_2: true ubtu18cis_rule_3_3_3: true ubtu18cis_rule_3_3_4: true ubtu18cis_rule_3_3_5: true +ubtu18cis_rule_3_3_6: true +ubtu18cis_rule_3_3_7: true +ubtu18cis_rule_3_3_8: true +ubtu18cis_rule_3_3_9: true ubtu18cis_rule_3_4_1: true ubtu18cis_rule_3_4_2: true ubtu18cis_rule_3_4_3: true ubtu18cis_rule_3_4_4: true ubtu18cis_rule_3_5_1_1: true +ubtu18cis_rule_3_5_1_2: true +ubtu18cis_rule_3_5_1_3: true +ubtu18cis_rule_3_5_1_4: true +ubtu18cis_rule_3_5_1_5: true +ubtu18cis_rule_3_5_1_6: true +ubtu18cis_rule_3_5_1_7: true ubtu18cis_rule_3_5_2_1: true ubtu18cis_rule_3_5_2_2: true ubtu18cis_rule_3_5_2_3: true ubtu18cis_rule_3_5_2_4: true ubtu18cis_rule_3_5_2_5: true -ubtu18cis_rule_3_5_3_1: true -ubtu18cis_rule_3_5_3_2: true -ubtu18cis_rule_3_5_3_3: true -ubtu18cis_rule_3_5_3_4: true -ubtu18cis_rule_3_5_3_5: true -ubtu18cis_rule_3_5_3_6: true -ubtu18cis_rule_3_5_3_7: true -ubtu18cis_rule_3_5_3_8: true -ubtu18cis_rule_3_5_4_1_1: true -ubtu18cis_rule_3_5_4_1_2: true -ubtu18cis_rule_3_5_4_1_3: true -ubtu18cis_rule_3_5_4_1_4: true -ubtu18cis_rule_3_5_4_2_1: true -ubtu18cis_rule_3_5_4_2_2: true -ubtu18cis_rule_3_5_4_2_3: true -ubtu18cis_rule_3_5_4_2_4: true -ubtu18cis_rule_3_6: true -ubtu18cis_rule_3_7: true +ubtu18cis_rule_3_5_2_6: true +ubtu18cis_rule_3_5_2_7: true +ubtu18cis_rule_3_5_2_8: true +ubtu18cis_rule_3_5_2_9: true +ubtu18cis_rule_3_5_2_10: true +ubtu18cis_rule_3_5_3_1_1: true +ubtu18cis_rule_3_5_3_1_2: true +ubtu18cis_rule_3_5_3_1_3: true +ubtu18cis_rule_3_5_3_2_1: true +ubtu18cis_rule_3_5_3_2_2: true +ubtu18cis_rule_3_5_3_2_3: true +ubtu18cis_rule_3_5_3_2_4: true +ubtu18cis_rule_3_5_3_3_1: true +ubtu18cis_rule_3_5_3_3_2: true +ubtu18cis_rule_3_5_3_3_3: true +ubtu18cis_rule_3_5_3_3_4: true # Section 4 Fixes -# Section 4 is Logging and Auditing (configure system accounting and configure logging) +# Section 4 is Logging and Auditing (Configure System Accounting and Configure Logging) ubtu18cis_rule_4_1_1_1: true ubtu18cis_rule_4_1_1_2: true ubtu18cis_rule_4_1_1_3: true @@ -220,10 +246,11 @@ ubtu18cis_rule_4_2_2_2: true ubtu18cis_rule_4_2_2_3: true ubtu18cis_rule_4_2_3: true ubtu18cis_rule_4_3: true +ubtu18cis_rule_4_4: true # Section 5 Fixes -# Section 5 is Acces, Authentication, and Authorization (configure cron, SSH server config, configure PAM -# and user accounts and environment) +# Section 5 is Access, Authentication, and Authorization (Configure Time-Based Job Schedulers, Configure sudo, Configure SSH Server, Configure PAM +# and User Accounts and Environment) ubtu18cis_rule_5_1_1: true ubtu18cis_rule_5_1_2: true ubtu18cis_rule_5_1_3: true @@ -232,48 +259,50 @@ ubtu18cis_rule_5_1_5: true ubtu18cis_rule_5_1_6: true ubtu18cis_rule_5_1_7: true ubtu18cis_rule_5_1_8: true +ubtu18cis_rule_5_1_9: true ubtu18cis_rule_5_2_1: true ubtu18cis_rule_5_2_2: true ubtu18cis_rule_5_2_3: true -ubtu18cis_rule_5_2_4: true -ubtu18cis_rule_5_2_5: true -ubtu18cis_rule_5_2_6: true -ubtu18cis_rule_5_2_7: true -ubtu18cis_rule_5_2_8: true -ubtu18cis_rule_5_2_9: true -ubtu18cis_rule_5_2_10: true -ubtu18cis_rule_5_2_11: true -ubtu18cis_rule_5_2_12: true -ubtu18cis_rule_5_2_13: true -ubtu18cis_rule_5_2_14: true -ubtu18cis_rule_5_2_15: true -ubtu18cis_rule_5_2_16: true -ubtu18cis_rule_5_2_17: true -ubtu18cis_rule_5_2_18: true -ubtu18cis_rule_5_2_19: true -ubtu18cis_rule_5_2_20: true -ubtu18cis_rule_5_2_21: true -ubtu18cis_rule_5_2_22: true -ubtu18cis_rule_5_2_23: true ubtu18cis_rule_5_3_1: true ubtu18cis_rule_5_3_2: true ubtu18cis_rule_5_3_3: true ubtu18cis_rule_5_3_4: true -ubtu18cis_rule_5_4_1_1: true -ubtu18cis_rule_5_4_1_2: true -ubtu18cis_rule_5_4_1_3: true -ubtu18cis_rule_5_4_1_4: true -ubtu18cis_rule_5_4_1_5: true +ubtu18cis_rule_5_3_5: true +ubtu18cis_rule_5_3_6: true +ubtu18cis_rule_5_3_7: true +ubtu18cis_rule_5_3_8: true +ubtu18cis_rule_5_3_9: true +ubtu18cis_rule_5_3_10: true +ubtu18cis_rule_5_3_11: true +ubtu18cis_rule_5_3_12: true +ubtu18cis_rule_5_3_13: true +ubtu18cis_rule_5_3_14: true +ubtu18cis_rule_5_3_15: true +ubtu18cis_rule_5_3_16: true +ubtu18cis_rule_5_3_17: true +ubtu18cis_rule_5_3_18: true +ubtu18cis_rule_5_3_19: true +ubtu18cis_rule_5_3_20: true +ubtu18cis_rule_5_3_21: true +ubtu18cis_rule_5_3_22: true ubtu18cis_rule_5_4_1: true ubtu18cis_rule_5_4_2: true ubtu18cis_rule_5_4_3: true ubtu18cis_rule_5_4_4: true -ubtu18cis_rule_5_4_5: true -ubtu18cis_rule_5_5: true +ubtu18cis_rule_5_5_1_1: true +ubtu18cis_rule_5_5_1_2: true +ubtu18cis_rule_5_5_1_3: true +ubtu18cis_rule_5_5_1_4: true +ubtu18cis_rule_5_5_1_5: true +ubtu18cis_rule_5_5_2: true +ubtu18cis_rule_5_5_3: true +ubtu18cis_rule_5_5_4: true +ubtu18cis_rule_5_5_5: true ubtu18cis_rule_5_6: true +ubtu18cis_rule_5_7: true # Section 6 Fixes -# Section 6 is System Maintenance (system file permissions and user and group settings) +# Section 6 is System Maintenance (System File Permissions and User and Group Settings) ubtu18cis_rule_6_1_1: true ubtu18cis_rule_6_1_2: true ubtu18cis_rule_6_1_3: true @@ -305,9 +334,6 @@ ubtu18cis_rule_6_2_14: true ubtu18cis_rule_6_2_15: true ubtu18cis_rule_6_2_16: true ubtu18cis_rule_6_2_17: true -ubtu18cis_rule_6_2_18: true -ubtu18cis_rule_6_2_19: true -ubtu18cis_rule_6_2_20: true # Service configuration variables, set to true to keep service ubtu18cis_allow_autofs: false @@ -316,7 +342,7 @@ ubtu18cis_avahi_server: false ubtu18cis_cups_server: false ubtu18cis_dhcp_server: false ubtu18cis_ldap_server: false -ubtu18cis_nfs_rpc_server: false +ubtu18cis_nfs_server: false ubtu18cis_dns_server: false ubtu18cis_vsftpd_server: false ubtu18cis_httpd_server: false @@ -324,8 +350,10 @@ ubtu18cis_dovecot_server: false ubtu18cis_smb_server: false ubtu18cis_squid_server: false ubtu18cis_snmp_server: false +ubtu18cis_mail_server: false ubtu18cis_rsync_server: false ubtu18cis_nis_server: false +ubtu18cis_rpc_server: false # Clients in use variables ubtu18cis_nis_required: false @@ -339,44 +367,18 @@ ubtu18cis_is_router: false ubtu18cis_ipv6_required: true # Other system wide variables -# ubtu18cis_xwindows_required is the toggle for requiring x windows. True means you use X Windoes (not recommented for servers) -# false means you do not require X Windows enabled -ubtu18cis_xwindows_required: false - -# Section 1 Control Variables -# Control 1.1.2/1.1.3/1.1.4/1.1.5 -# ubtu18cis_tmp_fstab_options are the file system options for the fstabs configuration -# To conform to CIS cotnrol 1.1.2 could use any settings -# To conform to CIS control 1.1.3 nodev needs to be present -# To conform to CIS control 1.1.4 nosuid needs to be present -# To conform to CIS control 1.1.5 noexec needs to present -ubtu18cis_tmp_fstab_options: "defaults,rw,nosuid,nodev,noexec,relatime" - -# Control 1.1.8/1.1.9/1.1.10 -# These are the settings for the /var/tmp mount -# To conform to CIS control 1.1.8 nodev needs to be present in opts -# To conform to CIS control 1.1.9 nosuid needs to be present in opts -# To conform to CIS control 1.1.10 noexec needs to be present in opts -ubtu18cis_vartmp: - source: /tmp - fstype: none - opts: "defaults,nodev,nosuid,noexec,bind" - enabled: false - -# Control 1.3.1 -# ubtu18cis_sudo_package is the name of the sudo package to install -# The possible values are "sudo" or "sudo-ldap" -ubtu18cis_sudo_package: "sudo" - -# Control 1.3.3 -# ubtu18cis_sudo_logfile is the path and file name of the sudo log file -ubtu18cis_sudo_logfile: "/var/log/sudo.log" - -# Control 1.4.2 +# ubtu18cis_desktop_required is the toggle for requiring desktop environments. True means you use a desktop and will not disable/remove needed items to run a desktop (not recommented for servers) +# false means you do not require a desktop +ubtu18cis_desktop_required: false +# Toggle to have automation install gdm3. +# The gdm related handlers won't run if you have this set to true but gdm3 is not installed. +ubtu18cis_install_gdm3: true + +# Control 1.3.2 # These are the crontab settings for file system integrity enforcement ubtu18cis_aide_cron: cron_user: root - cron_file: /etc/crontab + cron_file: /etc/cron.d/cis_aide aide_job: '/usr/bin/aide.wrapper --config /etc/aide/aide.conf --check' aide_minute: 0 aide_hour: 5 @@ -384,29 +386,25 @@ ubtu18cis_aide_cron: aide_month: '*' aide_weekday: '*' -# Control 1.5.2 -ubtu18cis_bootloader_password: random -rhel7cis_set_boot_pass: false - -# Control 1.5.3 -# THIS VARAIBLE SHOULD BE CHANGED AND INCORPROATED INTO VAULT +# Control 1.4.4 +# THIS VARIABLE SHOULD BE CHANGED AND INCORPORATED INTO VAULT # THIS VALUE IS WHAT THE ROOT PW WILL BECOME!!!!!!!! # HAVING THAT PW EXPOSED IN RAW TEXT IS NOT SECURE!!!! ubtu18cis_root_pw: "Password1" -# Control 1.8.1.1 +# Control 1.8.2 # This will be the motd banner must not contain the below items in order to be compliant with Ubuntu 18 CIS # \m, \r, \s, \v or references to the OS platform ubtu18cis_warning_banner: | - Authorized uses only. All activity may be monitored and reported. + 'Authorized uses only. All activity may be monitored and reported.' # Section 2 Control Variables -# Control 2.2.1.1 +# Control 2.1.1.1 # ubtu18cis_time_sync_tool is the tool in which to synchronize time # The two options are chrony or ntp ubtu18cis_time_sync_tool: "ntp" -# Control 2.2.1.3 +# Control 2.1.1.4 # ubtu18cis_chrony_server_options is the server options for chrony ubtu18cis_chrony_server_options: "minpoll 8" # ubtu18cis_ntp_server_options is the server options for ntp @@ -417,6 +415,7 @@ ubtu18cis_time_synchronization_servers: - 1.pool.ntp.org - 2.pool.ntp.org - 3.pool.ntp.org +ubtu18cis_ntp_fallback_server_list: "2.debian.pool.ntp.org 3.debian.pool.ntp.org" # Section 3 Control Variables # Control 3.3.2 @@ -426,7 +425,7 @@ ubtu18cis_host_allow: - "172.16.0.0/255.240.0.0" - "192.168.0.0/255.255.0.0" -# Control 3.5.1.1 +# All Control 3.5.x.yml # ubtu18cis_firewall_package is the firewall package you will be using. # the options are ufw, nftables, or iptables # you much chose only one firewall package @@ -434,21 +433,26 @@ ubtu18cis_firewall_package: "ufw" # ubtu18cis_iptables_v6 toggles iptables vs ip6tables CIS firewall rules and is used with # variable ubtu18cis_firewall_package set to iptables ubtu18cis_iptables_v6: true +# ubtu18cis_ufw_system_sysctlconf changes the /etc/default/ufw IPT_SYSCTL to use the main /etc/sysctl.conf file +# By default UFW will use it's own sysctl.conf file located in /etc/ufw which could/will override /etc/sysctl.conf +# Setting this value to true will change the UFW configuration to use the /etc/sysctl.conf file +ubtu18cis_ufw_system_sysctlconf: true -# Controls 3.5.4.1.1 through 3.5.4.1.4 -# The iptables module only writes to memory which means a reboot could revert settings -# The below toggle will install iptables-persistent and save the rules in memory (/etc/iptables/rules.v4 or rules.v6) -# This makes the CIS role changes permenant -ubtu18cis_save_iptables_cis_rules: true - -# Control 3.5.2.4 +# Control 3.5.1.5 # ubtu18cis_ufw_allow_out_ports are the ports for the firewall to allow -# if you want to allow out on all ports set variable to "all", example ubtu18cis_ufw_allow_out_ports: "all" +# if you want to allow out on all ports set variable to "all", example ubtu18cis_ufw_allow_out_ports: all +# ubtu18cis_ufw_allow_out_ports: all ubtu18cis_ufw_allow_out_ports: - 53 - 80 - 443 +# Controls 3.5.4.1.1 through 3.5.4.1.4 +# The iptables module only writes to memory which means a reboot could revert settings +# The below toggle will install iptables-persistent and save the rules in memory (/etc/iptables/rules.v4 or rules.v6) +# This makes the CIS role changes permenant +ubtu18cis_save_iptables_cis_rules: true + # Control 3.5.3.2 # ubtu18cis_nftables_table_name is the name of the table in nftables you want to create # the default nftables table name is inet filter. This variable name will be the one all @@ -490,32 +494,46 @@ ubtu18cis_system_is_log_server: true ubtu18cis_logrotate: "daily" # Section 5 Control Variables +# Control 5.2.1 +# ubtu18cis_sudo_package is the name of the sudo package to install +# The possible values are "sudo" or "sudo-ldap" +ubtu18cis_sudo_package: "sudo" + +# Control 5.2.3 +# ubtu18cis_sudo_logfile is the path and file name of the sudo log file +ubtu18cis_sudo_logfile: "/var/log/sudo.log" + # ubtu18cis_sshd will contain all sshd variables. The task association and variable descriptions for each section are listed below -# Control 5.2.5 +# Control 5.3.4 +# allow_users is the users allowed to ssh into the system +# allow_groups is teh groups allowed to ssh into the system +# deny_users is the users to deny from ssh'ing into the system +# deny_groups is the groups to deny from ssh'ing into the the system +# Control 5.3.5 # log_level is the log level variable. This needs to be set to VERBOSE or INFO to conform to CIS standards -# Control 5.2.7 +# Control 5.3.7 # max_auth_tries is the max number of authentication attampts per connection. # This value should be 4 or less to conform to CIS standards -# Control 5.2.13 +# Control 5.3.13 # ciphers is a comma seperated list of site approved ciphers # ONLY USE STRONG CIPHERS. Weak ciphers are listed below # DO NOT USE: 3des-cbc, aes128-cbc, aes192-cbc, aes256-cbc, arcfour, arcfour128, arcfour256, blowfish-cbc, cast128-cbc, rijndael-cbc@lysator.liu.se -# Control 5.2.14 +# Control 5.3.14 # MACs is the comma seperated list of site approved MAC algorithms that SSH can use during communication # ONLY USE STRONG ALGORITHMS. Weak algorithms are listed below # DO NOT USE: hmac-md5, hmac-md5-96, hmac-ripemd160, hmac-sha1, hmac-sha1-96, umac-64@openssh.com, umac-128@openssh.com, hmac-md5-etm@openssh.com, # hmac-md5-96-etm@openssh.com, hmac-ripemd160-etm@openssh.com, hmac-sha1-etm@openssh.com, hmac-sha1-96-etm@openssh.com, umac-64-etm@openssh.com, umac-128-etm@openssh.com -# Control 5.2.15 +# Control 5.3.15 # kex_algorithms is comma seperated list of the algorithms for key exchange methods # ONLY USE STRONG ALGORITHMS. Weak algorithms are listed below # DO NOT USE: diffie-hellman-group1-sha1, diffie-hellman-group14-sha1, diffie-hellman-group-exchange-sha1 -# Control 5.2.16 +# Control 5.3.16 # client_alive_interval is the amount of time idle before ssh session terminated. Set to 300 or less to conform to CIS standards # client_alive_count_max will send client alive messages at the configured interval. Set to 3 or less to conform to CIS standards -# Control 5.2.17 +# Control 5.3.17 # login_grace_time is the time allowed for successful authentication to the SSH server. This needs to be set to 60 seconds or less to conform to CIS standards -# Control 5.2.23 -# max_sessions is the max number of open sessions permitted. Set the value to 4 or less to conform to CIS standards +# Control 5.3.22 +# max_sessions is the max number of open sessions permitted. Set the value to 10 or less to conform to CIS standards ubtu18cis_sshd: log_level: "INFO" max_auth_tries: 4 @@ -525,7 +543,7 @@ ubtu18cis_sshd: client_alive_interval: 300 client_alive_count_max: 0 login_grace_time: 60 - max_sessions: 4 + max_sessions: 10 # WARNING: make sure you understand the precedence when working with these values!! # allow_users and allow_groups can be single user/group or multiple users/groups. For multiple list them with a space seperating them allow_users: "vagrant ubuntu" @@ -533,20 +551,19 @@ ubtu18cis_sshd: # deny_users: # deny_groups: -# Control 5.3.3 +# Control 5.4.3 # ubtu18cis_pamd_pwhistory_remember is number of password chnage cycles a user can re-use a password # This needs to be 5 or more to conform to CIS standards ubtu18cis_pamd_pwhistory_remember: 5 - # ubtu18cis_pass will be password based variables -# # Control 5.4.1.1 +# Control 5.5.1.1 +# min_days is the min number of days allowed between changing passwords. Set to 1 or more to conform to CIS standards +# # Control 5.5.1.2 # max_days forces passwords to expire in configured number of days. Set to 365 or less to conform to CIS standards -# Control 5.4.1.2 -# pass_min_days is the min number of days allowed between changing passwords. Set to 1 or more to conform to CIS standards -# Control 5.4.1.3 +# Control 5.5.1.3 # warn_age is how many days before pw expiry the user will be warned. Set to 7 or more to conform to CIS standards -# Control 5.4.1.4 +# Control 5.5.1.4 # inactive the number of days of inactivity before the account will lock. Set to 30 day sor less to conform to CIS standards ubtu18cis_pass: max_days: 365 @@ -554,18 +571,30 @@ ubtu18cis_pass: warn_age: 7 inactive: 30 -# Control 5.4.5 +# Control 5.5.4 +# ubtu18cis_bash_umask is the umask to set in the /etc/bash.bashrc and /etc/profile. +# The value needs to be 027 or more restrictive to comply with CIS standards +ubtu18cis_bash_umask: '027' + +# Control 5.5.5 # Session timeout setting file (TMOUT setting can be set in multiple files) # Timeout value is in seconds. Set value to 900 seconds or less ubtu18cis_shell_session_timeout: file: /etc/profile.d/tmout.sh timeout: 900 -# Control 5.6 +# Control 5.7 # ubtu18cis_su_group is the su group to use with pam_wheel ubtu18cis_su_group: "wheel" # Section 6 Control Variables + +# Control 6.1.1 +# ubtu18cis_manual_audit_dpkg is the setting that will allow the control to get a list of installed applications +# and check them using the # dpkg --verify option. This process can be very time consuming on machines. +# ubtu18cis_manual_audit_dpkg: true will result in a quicker audit but will require had checking each application. +ubtu18cis_manual_audit_dpkg: true + # Control 6.1.10 # ubtu18cis_no_world_write_adjust will toggle the automated fix to remove world-writable perms from all files # Setting to true will remove all world-writable permissions, and false will leave as-is @@ -589,5 +618,57 @@ ubtu18cis_no_group_adjust: true # Set to true this role will remove that bit, set to false we will just warn about the files ubtu18cis_suid_adjust: false +# Control 6.2.5 +# ubtu18cis_int_gid is the UID for interactive users to start at +ubtu18cis_int_gid: 1000 + +# Control 6.2.7 +# This control toggles automation to set all users dot files to not group or world writable +# Value of true will change file permissiosn, value of false will skip the automation changing permissions. +ubtu18cis_dotperm_ansiblemanaged: true + # Control 6.2.9 Allow ansible to adjust world-writable files. False will just display world-writable files, True will remove world-writable ubtu18cis_passwd_label: "{{ (this_item | default(item)).id }}: {{ (this_item | default(item)).dir }}" + +#### Audit Configuration Settings #### + +### Audit binary settings ### +goss_version: + release: v0.3.21 + checksum: 'sha256:9a9200779603acf0353d2c0e85ae46e083596c10838eaf4ee050c924678e4fe3' +audit_bin_path: /usr/local/bin/ +audit_bin: "{{ audit_bin_path }}goss" +audit_format: json + +# if get_audit_binary_method == download change accordingly +goss_url: "https://github.com/goss-org/goss/releases/download/{{ goss_version.release }}/goss-linux-amd64" + +## if get_goss_file - copy the following needs to be updated for your environment +## it is expected that it will be copied from somewhere accessible to the control node +## e.g copy from ansible control node to remote host +copy_goss_from_path: /some/accessible/path + +### Goss Audit Benchmark file ### +## managed by the control audit_content +# git +audit_file_git: "https://github.com/ansible-lockdown/{{ benchmark }}-Audit.git" +audit_git_version: "benchmark_{{ benchmark_version }}" + +# archive or copy: +audit_conf_copy: "some path to copy from" + +# get_url: +audit_files_url: "some url maybe s3?" + +audit_out_dir: '/opt' +audit_conf_dir: "{{ audit_out_dir }}/{{ benchmark }}-Audit/" +pre_audit_outfile: "{{ audit_out_dir }}/{{ ansible_hostname }}-{{ benchmark }}_pre_scan_{{ ansible_date_time.epoch }}.{{ audit_format }}" +post_audit_outfile: "{{ audit_out_dir }}/{{ ansible_hostname }}-{{ benchmark }}_post_scan_{{ ansible_date_time.epoch }}.{{ audit_format }}" + +## The following should not need changing +audit_control_file: "{{ audit_conf_dir }}goss.yml" +audit_vars_path: "{{ audit_conf_dir }}/vars/{{ ansible_hostname }}.yml" +audit_results: | + The pre remediation results are: {{ pre_audit_summary }}. + The post remediation results are: {{ post_audit_summary }}. + Full breakdown can be found in {{ audit_out_dir }} diff --git a/files/.DS_Store b/files/.DS_Store deleted file mode 100644 index e412cbf..0000000 Binary files a/files/.DS_Store and /dev/null differ diff --git a/files/etc/.DS_Store b/files/etc/.DS_Store deleted file mode 100644 index abaa98f..0000000 Binary files a/files/etc/.DS_Store and /dev/null differ diff --git a/files/etc/apparmor.d/usr.bin.ssh b/files/etc/apparmor.d/usr.bin.ssh deleted file mode 100644 index 380a218..0000000 --- a/files/etc/apparmor.d/usr.bin.ssh +++ /dev/null @@ -1,10 +0,0 @@ -# Last Modified: Mon Aug 24 20:03:44 2020 -#include - -/usr/bin/ssh { - #include - - /lib/x86_64-linux-gnu/ld-*.so mr, - /usr/bin/ssh mr, - -} \ No newline at end of file diff --git a/files/etc/systemd/.DS_Store b/files/etc/systemd/.DS_Store deleted file mode 100644 index c7f175f..0000000 Binary files a/files/etc/systemd/.DS_Store and /dev/null differ diff --git a/files/etc/systemd/system/tmp.mount b/files/etc/systemd/system/tmp.mount deleted file mode 100644 index 47ca662..0000000 --- a/files/etc/systemd/system/tmp.mount +++ /dev/null @@ -1,25 +0,0 @@ -# This file is part of systemd. -# -# systemd is free software; you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation; either version 2.1 of the License, or -# (at your option) any later version. - -[Unit] -Description=Temporary Directory -Documentation=man:hier(7) -Documentation=http://www.freedesktop.org/wiki/Software/systemd/APIFileSystems -ConditionPathIsSymbolicLink=!/tmp -DefaultDependencies=no -Conflicts=umount.target -Before=local-fs.target umount.target - -[Mount] -What=tmpfs -Where=/tmp -Type=tmpfs -Options=mode=1777,strictatime,noexec,nodev,nosuid - -# Make 'systemctl enable tmp.mount' work: -[Install] -WantedBy=local-fs.target diff --git a/handlers/main.yml b/handlers/main.yml index 03f44e6..00bfc79 100644 --- a/handlers/main.yml +++ b/handlers/main.yml @@ -1,33 +1,45 @@ --- + +- name: Remount shm + ansible.builtin.shell: mount -o remount /dev/shm + +- name: Remount var_tmp + ansible.builtin.shell: mount -o remount /var/tmp + - name: grub update - command: update-grub + ansible.builtin.shell: update-grub failed_when: false - name: restart postfix - service: + ansible.builtin.service: name: postfix state: restarted +- name: restart exim4 + ansible.builtin.service: + name: exim4 + state: restarted + - name: sysctl flush ipv4 route table - sysctl: + ansible.posix.sysctl: name: net.ipv4.route.flush value: '1' - sysctl_set: yes + sysctl_set: true when: ansible_virtualization_type != "docker" - name: sysctl flush ipv6 route table - sysctl: + ansible.posix.sysctl: name: net.ipv6.route.flush value: '1' - sysctl_set: yes + sysctl_set: true when: ansible_virtualization_type != "docker" - name: reload ufw - ufw: + community.general.ufw: state: reloaded - name: restart auditd - service: + ansible.builtin.service: name: auditd state: restarted when: @@ -36,11 +48,14 @@ - skip_ansible_lint - name: restart rsyslog - service: + ansible.builtin.service: name: rsyslog state: restarted - name: restart sshd - service: + ansible.builtin.service: name: sshd state: restarted + +- name: reload gdm + ansible.builtin.shell: dpkg-reconfigure gdm3 diff --git a/library/grub_crypt.py b/library/grub_crypt.py deleted file mode 100644 index c6737f8..0000000 --- a/library/grub_crypt.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/python - -import random, string, crypt - -def gen_pass(size=16, chars=string.ascii_letters + string.digits): - return ''.join(random.choice(chars) for _ in range(size)) - -def gen_salt(salt): - '''Generate a random salt.''' - ret = '' - if not salt: - with open('/dev/urandom', 'rb') as urandom: - while True: - byte = urandom.read(1) - if byte in ('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' - './0123456789'): - ret += byte - if len(ret) == 16: - break - return '$6$%s' % ret - else: - return '$6$%s' % salt - -def main(): - module = AnsibleModule( - argument_spec = dict( - salt = dict(required=False, default=None), - password = dict(no_log=True, required=False, default='random', type='str'), - ) - - ) - salt = module.params['salt'] - password = module.params['password'] - if password == 'random': - password = gen_pass() - sha512_salt = gen_salt(salt) - salted_pass = crypt.crypt(password, sha512_salt) - module.exit_json(changed=False, passhash=salted_pass) - -from ansible.module_utils.basic import * -if __name__ == '__main__': - main() diff --git a/meta/main.yml b/meta/main.yml index 227ad9c..2735ac7 100644 --- a/meta/main.yml +++ b/meta/main.yml @@ -1,53 +1,29 @@ +--- galaxy_info: - author: your name - description: your role description - company: your company (optional) + author: "George Nalen, Mark Bolwell, DFed, Stephen Williams" + description: "Apply the Ubuntu 18 CIS" + company: "MindPoint Group" + license: MIT + role_name: ubuntu18_cis + namespace: mindpointgroup + min_ansible_version: 2.10.1 + platforms: + - name: Ubuntu + versions: + - bionic + galaxy_tags: + - system + - security + - cis + - hardening + - benchmark + - compliance + - complianceascode + - ubuntu18 - # If the issue tracker for your role is not on github, uncomment the - # next line and provide a value - # issue_tracker_url: http://example.com/issue/tracker - - # Choose a valid license ID from https://spdx.org - some suggested licenses: - # - BSD-3-Clause (default) - # - MIT - # - GPL-2.0-or-later - # - GPL-3.0-only - # - Apache-2.0 - # - CC-BY-4.0 - license: license (GPL-2.0-or-later, MIT, etc) - - min_ansible_version: 2.9 - - # If this a Container Enabled role, provide the minimum Ansible Container version. - # min_ansible_container_version: - - # - # Provide a list of supported platforms, and for each platform a list of versions. - # If you don't wish to enumerate all versions for a particular platform, use 'all'. - # To view available platforms and versions (or releases), visit: - # https://galaxy.ansible.com/api/v1/platforms/ - # - # platforms: - # - name: Fedora - # versions: - # - all - # - 25 - # - name: SomePlatform - # versions: - # - all - # - 1.0 - # - 7 - # - 99.99 - - galaxy_tags: [] - # List tags for your role here, one per line. A tag is a keyword that describes - # and categorizes the role. Users find roles by searching for tags. Be sure to - # remove the '[]' above, if you add tags to this list. - # - # NOTE: A tag is limited to a single word comprised of alphanumeric characters. - # Maximum 20 tags per role. +collections: + - community.general + - community.crypto + - ansible.posix dependencies: [] - # List your role dependencies here, one per line. Be sure to remove the '[]' above, - # if you add dependencies to this list. - \ No newline at end of file diff --git a/site.yml b/site.yml index 471a7b5..cab52dd 100644 --- a/site.yml +++ b/site.yml @@ -1,12 +1,10 @@ --- -- hosts: all + +- name: Run ubuntu18-cis remediation + hosts: all become: true vars: is_container: false roles: - - role: "{{ playbook_dir }}" - ubtu18cis_system_is_container: "{{ is_container | default(false) }}" - ubtu18cis_skip_for_travis: false - ubtu18cis_oscap_scan: yes diff --git a/tasks/.DS_Store b/tasks/.DS_Store deleted file mode 100644 index 5008ddf..0000000 Binary files a/tasks/.DS_Store and /dev/null differ diff --git a/tasks/LE_audit_setup.yml b/tasks/LE_audit_setup.yml new file mode 100644 index 0000000..bc929ae --- /dev/null +++ b/tasks/LE_audit_setup.yml @@ -0,0 +1,30 @@ +--- + +- name: Download audit binary + ansible.builtin.get_url: + url: "{{ goss_url }}" + dest: "{{ audit_bin }}" + owner: root + group: root + checksum: "{{ goss_version.checksum }}" + mode: 0555 + when: + - get_goss_file == 'download' + +- name: Copy audit binary + ansible.builtin.copy: + src: + dest: "{{ audit_bin }}" + mode: 0555 + owner: root + group: root + when: + - get_goss_file == 'copy' + +- name: Install git if not present + ansible.builtin.package: + name: git + state: present + register: git_installed + when: + - '"git" not in ansible_facts.packages' diff --git a/tasks/main.yml b/tasks/main.yml index 1b161ee..2774c5e 100644 --- a/tasks/main.yml +++ b/tasks/main.yml @@ -1,6 +1,6 @@ --- - name: Gather distribution info - setup: + ansible.builtin.setup: gather_subset: distribution,!all,!min when: - ansible_distribution is not defined @@ -8,7 +8,7 @@ - always - name: Check OS version and family - fail: + ansible.builtin.fail: msg: "This role can only be run against Ubuntu 18. {{ ansible_distribution }} {{ ansible_distribution_major_version }} is not supported." when: - ansible_distribution == 'Ubuntu' @@ -17,53 +17,130 @@ - always - name: Check ansible version - fail: - msg: You must use ansible 2.1 or greater - when: not ansible_version.full is version_compare('2.1', '>=') + ansible.builtin.assert: + that: ansible_version.full is version_compare(min_ansible_version, '>=') + fail_msg: "You must use Ansible {{ min_ansible_version }} or greater" + success_msg: "This role is running a supported version of ansible {{ ansible_version.full }} >= {{ min_ansible_version }}" tags: - always -- import_tasks: prelim.yml +- name: Gather the package facts + ansible.builtin.package_facts: + manager: auto tags: - - prelim_tasks + - always + +- name: Prelim Import Tasks + ansible.builtin.import_tasks: + file: prelim.yml + tags: + - always -- include: parse_etc_password.yml +- name: Pre Remediate Audit Task Import + ansible.builtin.import_tasks: + file: pre_remediation_audit.yml + when: + - run_audit + tags: + - run_audit + +- name: Run Password Parsing + ansible.builtin.import_tasks: + file: parse_etc_password.yml when: - ubtu18cis_section5_patch or ubtu18cis_section6_patch +- name: Gather the package facts + ansible.builtin.package_facts: + manager: auto + tags: + - always + - name: Include section 1 patches - import_tasks: section1.yml + ansible.builtin.import_tasks: + file: section_1/main.yml when: ubtu18cis_section1_patch tags: - section1 - name: Include section 2 patches - import_tasks: section2.yml + ansible.builtin.import_tasks: + file: section_2/main.yml when: ubtu18cis_section2_patch tags: - section2 - name: Include section 3 patches - import_tasks: section3.yml + ansible.builtin.import_tasks: + file: section_3/main.yml when: ubtu18cis_section3_patch tags: - section3 - name: Include section 4 patches - import_tasks: section4.yml + ansible.builtin.import_tasks: + file: section_4/main.yml when: ubtu18cis_section4_patch tags: - section4 - name: Include section 5 patches - import_tasks: section5.yml + ansible.builtin.import_tasks: + file: section_5/main.yml when: ubtu18cis_section5_patch tags: - section5 - name: Include section 6 patches - import_tasks: section6.yml - when: ubtu18cis_section6_patch | bool + ansible.builtin.import_tasks: + file: section_6/main.yml + when: ubtu18cis_section6_patch tags: - section6 + +- name: flush handlers + ansible.builtin.meta: flush_handlers + +- name: reboot system + block: + - name: reboot system if not skipped + ansible.builtin.reboot: + when: + - not ubtu18_skip_reboot + + - name: Warning a reboot required but skip option set + ansible.builtin.debug: + msg: "Warning!! changes have been made that require a reboot to be implemented but skip reboot was set - Can affect compliance check results" + changed_when: true + when: + - ubtu18_skip_reboot + + - name: Warning for reboot + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: 'Reboot Required' + +- name: Post Remediation Task + ansible.builtin.import_tasks: + file: post_remediation_audit.yml + when: + - run_audit + +- name: Show Audit Summary + ansible.builtin.debug: + msg: "{{ audit_results.split('\n') }}" + when: + - run_audit + tags: + - run_audit + +- name: If Warnings found Output count and control IDs affected + ansible.builtin.debug: + msg: + - "You have {{ warn_count }} Warning(s) that require investigation(s). Their ID’s are listed below:" + - "{{ warn_control_list }}" + when: warn_count != 0 + tags: + - always diff --git a/tasks/parse_etc_password.yml b/tasks/parse_etc_password.yml index a9c8764..35d4971 100644 --- a/tasks/parse_etc_password.yml +++ b/tasks/parse_etc_password.yml @@ -2,13 +2,13 @@ - name: "PRELIM | {{ ubtu18cis_passwd_tasks }} | Parse /etc/passwd" block: - name: "PRELIM | {{ ubtu18cis_passwd_tasks }} | Parse /etc/passwd" - command: cat /etc/passwd + ansible.builtin.shell: cat /etc/passwd changed_when: false check_mode: false register: ubtu18cis_passwd_file_audit - name: "PRELIM | {{ ubtu18cis_passwd_tasks }} | Split passwd entries" - set_fact: + ansible.builtin.set_fact: ubtu18cis_passwd: "{{ ubtu18cis_passwd_file_audit.stdout_lines | map('regex_replace', ld_passwd_regex, ld_passwd_yaml) | map('from_yaml') | list }}" with_items: "{{ ubtu18cis_passwd_file_audit.stdout_lines }}" diff --git a/tasks/post_remediation_audit.yml b/tasks/post_remediation_audit.yml new file mode 100644 index 0000000..0eb7608 --- /dev/null +++ b/tasks/post_remediation_audit.yml @@ -0,0 +1,44 @@ +--- + +- name: "Post Audit | Run post_remediation {{ benchmark }} audit" + ansible.builtin.shell: "{{ audit_conf_dir }}/run_audit.sh -v {{ audit_vars_path }} -o {{ post_audit_outfile }} -g {{ group_names }}" + environment: "{{ audit_run_script_environment | default({}) }}" + changed_when: audit_run_post_remediation.rc == 0 + register: audit_run_post_remediation + +- name: Post Audit | ensure audit files readable by users + ansible.builtin.file: + path: "{{ item }}" + mode: 0644 + state: file + loop: + - "{{ post_audit_outfile }}" + - "{{ pre_audit_outfile }}" + +- name: Post Audit | Capture audit data if json format + block: + - name: "Capture data {{ post_audit_outfile }}" + ansible.builtin.shell: "cat {{ post_audit_outfile }}" + register: post_audit + changed_when: false + + - name: Capture post-audit result + ansible.builtin.set_fact: + post_audit_summary: "{{ post_audit.stdout | from_json | json_query(summary) }}" + vars: + summary: 'summary."summary-line"' + when: + - audit_format == "json" + +- name: Post Audit | Capture audit data if documentation format + block: + - name: "Post Audit | capture data {{ post_audit_outfile }}" + ansible.builtin.shell: "tail -2 {{ post_audit_outfile }}" + register: post_audit + changed_when: false + + - name: Post Audit | Capture post-audit result + ansible.builtin.set_fact: + post_audit_summary: "{{ post_audit.stdout_lines }}" + when: + - audit_format == "documentation" diff --git a/tasks/pre_remediation_audit.yml b/tasks/pre_remediation_audit.yml new file mode 100644 index 0000000..1ba132b --- /dev/null +++ b/tasks/pre_remediation_audit.yml @@ -0,0 +1,109 @@ +--- + +- name: Pre Audit | Setup the audit + ansible.builtin.include_tasks: LE_audit_setup.yml + when: + - setup_audit + tags: + - setup_audit + +- name: "Pre Audit | Ensure {{ audit_conf_dir }} exists" + ansible.builtin.file: + path: "{{ audit_conf_dir }}" + state: directory + mode: '0755' + +- name: Pre Audit | retrieve audit content files from git + ansible.builtin.git: + repo: "{{ audit_file_git }}" + dest: "{{ audit_conf_dir }}" + version: "{{ audit_git_version }}" + when: + - audit_content == 'git' + +- name: Pre Audit | confirm audit branch vs benchmark version + ansible.builtin.debug: + msg: "Audit will run the branch {{ audit_git_version }} for this Benchmark {{ benchmark_version }}" + +- name: Pre Audit | copy to audit content files to server + ansible.builtin.copy: + src: "{{ audit_local_copy }}" + dest: "{{ audit_conf_dir }}" + mode: 0644 + when: + - audit_content == 'copy' + +- name: Pre Audit | get audit content from url + ansible.builtin.get_url: + url: "{{ audit_files_url }}" + dest: "{{ audit_conf_dir }}" + owner: root + group: root + mode: 0755 + when: + - audit_content == 'get_url' + +- name: Pre Audit | Check Goss is available + block: + - name: Pre Audit | Check for goss file + ansible.builtin.stat: + path: "{{ audit_bin }}" + register: goss_available + + - name: Pre Audit | Alert if goss not available + ansible.builtin.assert: + that: goss_available.stat.exists + fail_msg: "Audit binary file {{ audit_bin }} does not exist" + when: + - run_audit + +- name: "Pre Audit | Check whether machine is UEFI-based" + ansible.builtin.stat: + path: /sys/firmware/efi + register: rhel9_efi_boot + tags: + - goss_template + +- name: Pre Audit | Copy ansible default vars values to test audit + ansible.builtin.template: + src: ansible_vars_goss.yml.j2 + dest: "{{ audit_vars_path }}" + mode: 0600 + when: + - run_audit + tags: + - goss_template + +- name: "Pre Audit | Run pre_remediation {{ benchmark }} audit" + ansible.builtin.shell: "{{ audit_conf_dir }}/run_audit.sh -v {{ audit_vars_path }} -o {{ pre_audit_outfile }} -g {{ group_names }}" + environment: "{{ audit_run_script_environment | default({}) }}" + changed_when: audit_run_pre_remediation.rc == 0 + register: audit_run_pre_remediation + +- name: Pre Audit | Capture audit data if json format + block: + - name: "Pre Audit | capture data {{ pre_audit_outfile }}" + ansible.builtin.shell: "cat {{ pre_audit_outfile }}" + register: pre_audit + changed_when: false + + - name: Pre Audit | Capture pre-audit result + ansible.builtin.set_fact: + pre_audit_summary: "{{ pre_audit.stdout | from_json | json_query(summary) }}" + vars: + summary: 'summary."summary-line"' + when: + - audit_format == "json" + +- name: Pre Audit | Capture audit data if documentation format + block: + - name: "Pre Audit | capture data {{ pre_audit_outfile }}" + ansible.builtin.shell: "tail -2 {{ pre_audit_outfile }}" + changed_when: false + register: pre_audit + + - name: Pre Audit | Capture pre-audit result + ansible.builtin.set_fact: + pre_audit_summary: "{{ pre_audit.stdout_lines }}" + when: + - audit_format == "documentation" diff --git a/tasks/prelim.yml b/tasks/prelim.yml index e0264ea..b53f525 100644 --- a/tasks/prelim.yml +++ b/tasks/prelim.yml @@ -1,161 +1,51 @@ --- -# - debug: var=ansible_facts -# Preliminary tasks that should always be run # List users in order to look files inside each home directory - name: "PRELIM | List users accounts" - command: "awk -F: '{print $1}' /etc/passwd" + ansible.builtin.shell: "awk -F: '{print $1}' /etc/passwd" changed_when: false register: ubtu18cis_users - when: - - ubtu18cis_rule_6_2_11 or - ubtu18cis_rule_6_2_12 or - ubtu18cis_rule_6_2_13 or - ubtu18cis_rule_6_2_14 + tags: + - always - name: "PRELIM | Check for autofs service" - shell: "systemctl show autofs | grep LoadState | cut -d = -f 2" - register: ubtu18cis_autofs_service_status + ansible.builtin.shell: "systemctl show autofs | grep LoadState | cut -d = -f 2" changed_when: false check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Run apt update" - apt: - update_cache: yes + register: ubtu18cis_autofs_service_status when: - - ubtu18cis_rule_1_4_1 - -- name: "PRELIM | Check for avahi-daemon service" - shell: "systemctl show avahi-daemon | grep LoadState | cut -d = -f 2" - register: avahi_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Check for cups service" - shell: "systemctl show cups | grep LoadState | cut -d = -f 2" - register: cups_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Check for dhcpd service" - shell: "systemctl show isc-dhcp-server | grep LoadState | cut -d = -f 2" - register: dhcp_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Check for dhcp6 service" - shell: "systemctl show isc-dhcp-server6 | grep LoadState | cut -d = -f 2" - register: dhcp6_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Check for slapd service" - shell: "systemctl show slapd | grep LoadState | cut -d = -f 2" - register: slapd_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Check for nfs service" - shell: "systemctl show nfs | grep LoadState | cut -d = -f 2" - register: nfs_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Check for rpcbind service" - shell: "systemctl show rpcbind | grep LoadState | cut -d = -f 2" - register: rpcbind_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint + - ubtu18cis_rule_1_1_23 -- name: "PRELIM | Check for named service" - shell: "systemctl show named | grep LoadState | cut -d = -f 2" - register: named_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Check for vsftpd service" - shell: "systemctl show vsftpd | grep LoadState | cut -d = -f 2" - register: vsftpd_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Check for httpd service" - shell: "systemctl show httpd | grep LoadState | cut -d = -f 2" - register: httpd_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Check for dovecot service" - shell: "systemctl show dovecot | grep LoadState | cut -d = -f 2" - register: dovecot_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Check for smb service" - shell: "systemctl show smb | grep LoadState | cut -d = -f 2" - register: smb_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Check for squid service" - shell: "systemctl show squid | grep LoadState | cut -d = -f 2" - register: squid_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint - -- name: "PRELIM | Check for snmpd service" - shell: "systemctl show snmpd | grep LoadState | cut -d = -f 2" - register: snmpd_service_status - changed_when: false - check_mode: false +- name: "PRELIM | Run apt update" + ansible.builtin.package: + update_cache: true tags: - - skip_ansible_lint + - always -- name: "PRELIM | Check for rsync service" - shell: "systemctl show rsync | grep LoadState | cut -d = -f 2" - register: rsync_service_status +- name: "PRELIM | Check for avahi-daemon service" + ansible.builtin.shell: "systemctl show avahi-daemon | grep LoadState | cut -d = -f 2" changed_when: false check_mode: false + register: avahi_service_status tags: - skip_ansible_lint -- name: "PRELIM | Check for nis service" - shell: "systemctl show nis | grep LoadState | cut -d = -f 2" - register: nis_service_status - changed_when: false - check_mode: false - tags: - - skip_ansible_lint +- name: "PRELIM | Install gdm3" + ansible.builtin.package: + name: gdm3 + state: present + when: + - ubtu18cis_desktop_required + - ubtu18cis_install_gdm3 + - ubtu18cis_rule_1_8_2 or + ubtu18cis_rule_1_8_3 - name: "PRELIM | Install nftables" - apt: + ansible.builtin.package: name: nftables state: present when: ubtu18cis_firewall_package == "nftables" + +- name: "PRELIM | Install acl if not present" + ansible.builtin.package: + name: acl + state: present diff --git a/tasks/section1.yml b/tasks/section1.yml deleted file mode 100644 index 24ab358..0000000 --- a/tasks/section1.yml +++ /dev/null @@ -1,1041 +0,0 @@ ---- -- name: "SCORED | 1.1.1.1 | PATCH | Ensure mounting of cramfs filesystems is disabled" - block: - - name: "SCORED | 1.1.1.1 | PATCH | Ensure mounting of cramfs filesystems is disabled | Edit modprobe config" - lineinfile: - dest: /etc/modprobe.d/cramfs.conf - regexp: "^(#)?install cramfs(\\s|$)" - line: install cramfs /bin/true - create: yes - - - name: "SCORED | 1.1.1.1 | PATCH | Ensure mounting of cramfs filesystems is disabled | Disable cramfs" - modprobe: - name: cramfs - state: absent - when: ansible_connection != 'docker' - when: - - ubtu18cis_rule_1_1_1_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.1.1 - - cramfs - -- name: "SCORED | 1.1.1.2 | PATCH | Ensure mounting of freevxfs filesystems is disabled" - block: - - name: "SCORED | 1.1.1.2 | PATCH | Ensure mounting of freevxfs filesystems is disabled | Edit modprobe config" - lineinfile: - dest: /etc/modprobe.d/freevxfs.conf - regexp: "^(#)?install freevxfs(\\s|$)" - line: install freevxfs /bin/true - create: yes - - - name: "SCORED | 1.1.1.2 | PATCH | Ensure mounting of freevxfs filesystems is disabled | Disable freevxfs" - modprobe: - name: freevxfs - state: absent - when: ansible_connection != 'docker' - when: - - ubtu18cis_rule_1_1_1_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.1.2 - - freevxfs - -- name: "SCORED | 1.1.1.3 | PATCH | Ensure mounting of jffs2 filesystems is disabled" - block: - - name: "SCORED | 1.1.1.3 | PATCH | Ensure mounting of jffs2 filesystems is disabled | Edit modprobe config" - lineinfile: - dest: /etc/modprobe.d/jffs2.conf - regexp: "^(#)?install jffs2(\\s|$)" - line: install jffs2 /bin/true - create: yes - - - name: "SCORED | 1.1.1.3 | PATCH | Ensure mounting of jffs2 filesystems is disabled | Disable jffs2" - modprobe: - name: jffs2 - state: absent - when: ansible_connection != 'docker' - when: - - ubtu18cis_rule_1_1_1_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.1.3 - - jffs2 - -- name: "SCORED | 1.1.1.4 | PATCH | Ensure mounting of hfs filesystems is disabled" - block: - - name: "SCORED | 1.1.1.4 | PATCH | Ensure mounting of hfs filesystems is disabled | Edit modprobe config" - lineinfile: - dest: /etc/modprobe.d/hfs.conf - regexp: "^(#)?install hfs(\\s|$)" - line: install hfs /bin/true - create: yes - - - name: "SCORED | 1.1.1.4 | PATCH | Ensure mounting of hfs filesystems is disabled | Disable hfs" - modprobe: - name: hfs - state: absent - when: ansible_connection != 'docker' - when: - - ubtu18cis_rule_1_1_1_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.1.4 - - hfs - -- name: "SCORED | 1.1.1.5 | PATCH | Ensure mounting of hfsplus filesystems is disabled" - block: - - name: "SCORED | 1.1.1.5 | PATCH | Ensure mounting of hfsplus filesystems is disabled | Edit modprobe config" - lineinfile: - dest: /etc/modprobe.d/hfsplus.conf - regexp: "^(#)?install hfsplus(\\s|$)" - line: install hfsplus /bin/true - create: yes - - - name: "SCORED | 1.1.1.5 | PATCH | Ensure mounting of hfsplus filesystems is disabled | Disable hfsplus" - modprobe: - name: hfsplus - state: absent - when: ansible_connection != 'docker' - when: - - ubtu18cis_rule_1_1_1_5 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.1.5 - - hfsplus - -- name: "SCORED | 1.1.1.6 | PATCH | Ensure mounting of squashfs filesystems is disabled" - block: - - name: "SCORED | 1.1.1.6 | PATCH | Ensure mounting of squashfs filesystems is disabled | Edit modprobe config" - lineinfile: - dest: /etc/modprobe.d/squashfs.conf - regexp: "^(#)?install squashfs(\\s|$)" - line: install squashfs /bin/true - create: yes - - - name: "SCORED | 1.1.1.6 | PATCH | Ensure mounting of squashfs filesystems is disabled | Disable squashfs" - modprobe: - name: squashfs - state: absent - when: ansible_connection != 'docker' - when: - - ubtu18cis_rule_1_1_1_6 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.1.6 - - squashfs - -- name: "SCORED | 1.1.1.7 | PATCH | Ensure mounting of udf filesystems is disabled" - block: - - name: "SCORED | 1.1.1.7 | PATCH | Ensure mounting of udf filesystems is disabled | Edit modprobe config" - lineinfile: - dest: /etc/modprobe.d/udf.conf - regexp: "^(#)?install udf(\\s|$)" - line: install udf /bin/true - create: yes - - - name: "SCORED | 1.1.1.7 | PATCH | Ensure mounting of udf filesystems is disabled | Disable udf" - modprobe: - name: udf - state: absent - when: ansible_connection != 'docker' - when: - - ubtu18cis_rule_1_1_1_7 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.1.7 - - udf - -- name: "NOTSCORED | 1.1.1.8 | PATCH | Ensure mounting of FAT filesystems is limited" - block: - - name: "NOTSCORED | 1.1.1.8 | PATCH | Ensure mounting of FAT filesystems is limited | Edit modprobe config" - lineinfile: - dest: /etc/modprobe.d/vfat.conf - regexp: "^(#)?install vfat(\\s|$)" - line: install vfat /bin/true - create: yes - - - name: "NOTSCORED | 1.1.1.8 | PATCH | Ensure mounting of FAT filesystems is limited | Disable FAT" - modprobe: - name: vfat - state: absent - when: ansible_connection != 'docker' - when: - - ubtu18cis_rule_1_1_1_8 - tags: - - level1-server - - level1-workstation - - notscored - - patch - - rule_1.1.1.8 - - vfat - -- name: "SCORED | 1.1.2 | PATCH | Ensure /tmp is configured" - mount: - path: /tmp - src: /tmp - state: mounted - fstype: tmpfs - opts: "{{ ubtu18cis_tmp_fstab_options }}" - when: - - ubtu18cis_rule_1_1_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.2 - - tmp - -- name: | - "SCORED | 1.1.3 | PATCH | Ensure nodev option set on /tmp partition" - "SCORED | 1.1.4 | PATCH | Ensure nosuid option set on /tmp partition" - "SCORED | 1.1.5 | PATCH | Ensure noexec option set on /tmp partition" - mount: - name: /tmp - src: /tmp - state: remounted - fstype: tmpfs - opts: "{{ ubtu18cis_tmp_fstab_options }}" - when: - - ubtu18cis_rule_1_1_3 or - ubtu18cis_rule_1_1_4 or - ubtu18cis_rule_1_1_5 - - ubtu18cis_vartmp['enabled'] - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.3 - - rule_1.1.4 - - rule_1.1.5 - - tmp - -- name: "SCORED | 1.1.6 | AUDIT | Ensure separate partition exists for /var" - block: - - name: "SCORED | 1.1.6 | AUDIT | Ensure separate partition exists for /var | Gather /var partition" - shell: mount | grep "on /var " - changed_when: false - failed_when: false - args: - warn: false - register: ubtu18cis_1_1_6_var_mounted - - - name: "SCORED | 1.1.6 | AUDIT | Ensure separate partition exists for /var | Alert if /var partition does not exist" - debug: - msg: - - "ALERT!!!! There is no separate partition for /var" - - "Please create a separate partition for /var" - when: ubtu18cis_1_1_6_var_mounted.stdout == "" - when: - - ubtu18cis_rule_1_1_6 - tags: - - level2-server - - level2-workstation - - scored - - audit - - rule_1.1.6 - - var - -- name: "SCORED | 1.1.7 | AUDIT | Ensure separate partition exists for /var/tmp" - block: - - name: "SCORED | 1.1.7 | AUDIT | Ensure separate partition exists for /var/tmp | Gather /var/tmp partition" - shell: mount | grep "on /var/tmp " - changed_when: false - failed_when: false - args: - warn: false - register: ubtu18cis_1_1_7_var_tmp_mounted - - - name: "SCORED | 1.1.7 | AUDIT | Ensure separate partition exists for /var/tmp | Alert if /var/tmp partition does not exist" - debug: - msg: - - "ALERT!!!! There is no separate partition for /var/tmp" - - "Please create a separate partition for /var/tmp" - when: ubtu18cis_1_1_7_var_tmp_mounted.stdout == "" - when: - - ubtu18cis_rule_1_1_7 - tags: - - level2-server - - level2-workstation - - scored - - audit - - rule_1.1.7 - - var/tmp - -- name: | - "SCORED | 1.1.8 | PATCH | Ensure nodev option set on /var/tmp partition" - "SCORED | 1.1.9 | PATCH | Ensure nosuid option set on /var/tmp partition" - "SCORED | 1.1.10 | PATCH | Ensure noexec option set on /var/tmp partition" - mount: - name: /var/tmp - src: "{{ ubtu18cis_vartmp['source'] }}" - state: present - fstype: "{{ ubtu18cis_vartmp['fstype'] }}" - opts: "{{ ubtu18cis_vartmp['opts'] }}" - - when: - - ubtu18cis_rule_1_1_8 or - ubtu18cis_rule_1_1_9 or - ubtu18cis_rule_1_1_10 - - ubtu18cis_vartmp['enabled'] - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.8 - - rule_1.1.9 - - rule_1.1.10 - - var/tmp - -- name: "SCORED | 1.1.11 | AUDIT | Ensure separate partition exists for /var/log" - block: - - name: "SCORED | 1.1.11 | AUDIT | Ensure separate partition exists for /var/log | Gather /var/log partition" - shell: mount | grep "on /var/log " - changed_when: false - failed_when: false - register: ubtu18cis_1_1_11_var_log_mounted - args: - warn: false - - - name: "SCORED | 1.1.11 | AUDIT | Ensure separate partition exists for /var/log | Alert if /var/log partition does not exist" - debug: - msg: - - "ALERT!!!! There is no separate partition for /var/log" - - "Please create a separate partition for /var/log" - when: ubtu18cis_1_1_11_var_log_mounted.stdout == "" - when: - - ubtu18cis_rule_1_1_11 - tags: - - level2-server - - level2-workstation - - scored - - audit - - rule_1.1.11 - - var/log - -- name: "SCORED | 1.1.12 | AUDIT | Ensure separate partition exists for /var/log/audit" - block: - - name: "SCORED | 1.1.12 | AUDIT | Ensure separate partition exists for /var/log/audit | Gather /var/log/audit" - shell: mount | grep "on /var/log/audit " - changed_when: false - failed_when: false - register: ubtu18cis_1_1_12_var_log_audit_mounted - args: - warn: false - - - name: "SCORED | 1.1.12 | AUDIT | Ensure separate partition exists for /var/log/audit | Alert if /var/log/audit partition does not exist" - debug: - msg: - - "ALERT!!!! There is no separate partition for /var/log/audit" - - "Please create a separate partition for /var/log/audit" - when: ubtu18cis_1_1_12_var_log_audit_mounted.stdout == "" - when: - - ubtu18cis_rule_1_1_12 - tags: - - level2-server - - level2-workstation - - scored - - audit - - var/log/audit - -- name: "SCORED | 1.1.13 | AUDIT | Ensure separate partition exists for /home" - block: - - name: "SCORED | 1.1.13 | AUDIT | Ensure separate partition exists for /home | Gather /home" - shell: mount | grep "on /home" - changed_when: false - failed_when: false - register: ubtu18cis_1_1_13_home_mounted - args: - warn: false - - - name: "SCORED | 1.1.13 | AUDIT | Ensure separate partition exists for /home | Alert if /home partition does not exist" - debug: - msg: - - "ALERT!!!! There is no separate partition for /home" - - "Please create a separate partition for /home" - when: ubtu18cis_1_1_13_home_mounted.stdout == "" - when: - - ubtu18cis_rule_1_1_13 - tags: - - level2-server - - level2-workstation - - scored - - audit - - /home - -- name: "SCORED | 1.1.14 | PATCH | Ensure nodev option set on /home partition" - mount: - name: "/home" - src: "{{ item.device }}" - state: mounted - fstype: "{{ item.fstype }}" - opts: "nodev" - with_items: "{{ ansible_mounts }}" - when: - - ubtu18cis_rule_1_1_14 - - item.mount == "/home" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.14 - - /home - -- name: | - "SCORED | 1.1.15 | PATCH | Ensure nodev option set on /dev/shm partition" - "SCORED | 1.1.16 | PATCH | Ensure nosuid option set on /dev/shm partition" - "SCORED | 1.1.17 | PATCH | Ensure noexec option set on /dev/shm partition" - mount: - name: /dev/shm - src: tmpfs - state: mounted - fstype: tmpfs - opts: "defaults,nodev,nosuid,noexec" - when: - - ubtu18cis_rule_1_1_15 or - ubtu18cis_rule_1_1_16 or - ubtu18cis_rule_1_1_17 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.15 - - rule_1.1.16 - - rule_1.1.17 - - /dev/shm - -- name: "NOTSCORED | 1.1.18 | AUDIT | Ensure nodev option set on removable media partitions" - debug: - msg: "Warning!!!! Not relevent control" - when: - - ubtu18cis_rule_1_1_18 - tags: - - level1-server - - level1-workstation - - notscored - - audit - - rule_1.1.18 - - removable_media - -- name: "NOTSCORED | 1.1.19 | AUDIT | Ensure nosuid option set on removable media partitions" - debug: - msg: "Warning!!!! Not relevent control" - when: - - ubtu18cis_rule_1_1_19 - tags: - - level1-server - - level1-workstation - - notscored - - audit - - rule_1.1.19 - - removable_media - -- name: "NOTSCORED | 1.1.20 | AUDIT | Ensure noexec option set on removable media partitions" - debug: - msg: "Warning!!!! Not relevent control" - when: - - ubtu18cis_rule_1_1_20 - tags: - - level1-server - - level1-workstation - - notscored - - audit - - rule_1.1.20 - - removable_media - -- name: "SCORED | 1.1.21 | PATCH | Ensure sticky bit is set on all world-writable directories" - shell: df --local -P | awk '{if (NR!=1) print $6}' | xargs -I '{}' find '{}' -xdev -type d \( -perm -0002 -a ! -perm -1000 \) 2>/dev/null | xargs -I '{}' chmod a+t '{}' - failed_when: ubtu18cis_1_1_21_status.rc>0 - register: ubtu18cis_1_1_21_status - when: - - ubtu18cis_rule_1_1_21 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.21 - - sticky_bit - -- name: "SCORED | 1.1.22 | PATCH | Disable Automounting" - service: - name: autofs - state: stopped - enabled: no - when: - - ubtu18cis_rule_1_1_22 - - ubtu18cis_autofs_service_status.stdout == "loaded" - - not ubtu18cis_allow_autofs - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.1.22 - - automounting - -- name: "SCORED | 1.1.23 | PATCH | Disable USB Storage" - block: - - name: "SCORED | 1.1.23 | PATCH | Disable USB Storage | Set modprobe config" - lineinfile: - path: /etc/modprobe.d/usb_storage.conf - regexp: '^install usb-storage' - line: 'install usb-storage /bin/true' - create: yes - - - name: "SCORED | 1.1.23 | PATCH | Disable USB Storage | Remove usb-storage module" - modprobe: - name: usb-storage - state: absent - when: ansible_connection != 'docker' - when: - - ubtu18cis_rule_1_1_23 - tags: - - level1-server - - level2-workstation - - scored - - patch - - rule_1.1.23 - - usb_storage - -- name: "NOTSCORED | 1.2.1 | AUDIT | Ensure package manager repositories are configured" - block: - - name: "NOTSCORED | 1.2.1 | AUDIT | Ensure package manager repositories are configured | Get repositories" - command: apt-cache policy - changed_when: false - failed_when: false - register: ubtu18cis_1_2_1_apt_policy - - - name: "NOTSCORED | 1.2.1 | AUDIT | Ensure package manager repositories are configured | Message out repository configs" - debug: - msg: - - "Alert!!!! Below are the apt package repositories" - - "Please review to make sure they conform to your sites policies" - - "{{ ubtu18cis_1_2_1_apt_policy.stdout_lines }}" - when: - - ubtu18cis_rule_1_2_1 - tags: - - level1-server - - level1-workstation - - notscored - - audit - - rule_1.2.1 - - apt - -- name: "NOTSCORED | 1.2.2 | AUDIT | Ensure GPG keys are configured" - block: - - name: "NOTSCORED | 1.2.2 | AUDIT | Ensure GPG keys are configured | Get apt gpg keys" - command: apt-key list - changed_when: false - failed_when: false - register: ubtu18cis_1_2_2_apt_gpgkeys - - - name: "NOTSCORED | 1.2.2 | AUDIT | Ensure GPG keys are configured | Message out apt gpg keys" - debug: - msg: - - "Alert!!!! Below are the apt gpg kyes configured" - - "Please review to make sure they are configured" - - "in accordance with site policy" - - "{{ ubtu18cis_1_2_2_apt_gpgkeys.stdout_lines }}" - when: - - ubtu18cis_rule_1_2_2 - tags: - - level1-server - - level1-workstation - - notscored - - audit - - rule_1.2.2 - - gpg - - keys - -- name: "SCORED | 1.3.1 | PATCH | Ensure sudo is installed" - apt: - name: "{{ ubtu18cis_sudo_package }}" - state: present - when: - - ubtu18cis_rule_1_3_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.3.1 - - sudo - -- name: "SCORED | 1.3.2 | PATCH | Ensure sudo commands use pty" - lineinfile: - path: /etc/sudoers - regexp: '^Defaults use_' - line: 'Defaults use_pty' - insertafter: '^Defaults' - when: - - ubtu18cis_rule_1_3_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.3.2 - - sudo - -- name: "SCORED | 1.3.3 | PATCH | Ensure sudo log file exists" - lineinfile: - path: /etc/sudoers - regexp: '^Defaults logfile' - line: 'Defaults logfile="{{ ubtu18cis_sudo_logfile }}"' - insertafter: '^Defaults' - when: - - ubtu18cis_rule_1_3_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.3.3 - - sudo - -- name: "SCORED | 1.4.1 | PATCH | Ensure AIDE is installed" - apt: - name: ['aide', 'aide-common'] - state: present - when: - - ubtu18cis_rule_1_4_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.4.1 - - aide - -- name: "SCORED | 1.4.2 | PATCH | Ensure filesystem integrity is regularly checked" - cron: - name: Run AIDE integrity check - cron_file: "{{ ubtu18cis_aide_cron['cron_file'] }}" - user: "{{ ubtu18cis_aide_cron['cron_user'] }}" - minute: "{{ ubtu18cis_aide_cron['aide_minute'] | default('0') }}" - hour: "{{ ubtu18cis_aide_cron['aide_hour'] | default('5') }}" - day: "{{ ubtu18cis_aide_cron['aide_day'] | default('*') }}" - month: "{{ ubtu18cis_aide_cron['aide_month'] | default('*') }}" - weekday: "{{ ubtu18cis_aide_cron['aide_weekday'] | default('*') }}" - job: "{{ ubtu18cis_aide_cron['aide_job'] }}" - when: - - ubtu18cis_rule_1_4_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.4.2 - - cron - -- name: "SCORED | 1.5.1 | PATCH | Ensure permissions on bootloader config are configured" - block: - - name: "SCORED | 1.5.1 | AUDIT | Ensure permissions on bootloader config are configured | Check for Grub file" - stat: - path: /boot/grub/grub.cfg - register: ubtu18cis_1_5_1_grub_cfg_status - - - name: "SCORED | 1.5.1 | PATCH | Ensure permissions on bootloader config are configured | Set permissions" - file: - path: /boot/grub/grub.cfg - owner: root - group: root - mode: 0600 - when: - - ubtu18cis_1_5_1_grub_cfg_status.stat.exists - when: - - ubtu18cis_rule_1_5_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.5.1 - - grub - -# --------------- -# --------------- -# The RHEL7 based control for this does not use a valid module -# I need to research best way to set grub pw for Ubuntu using the -# grub-mkpasswd-pbkdf2 command and passing the data at the same time. -# --------------- -# --------------- -# - name: "SCORED | 1.5.2 | PATCH | Ensure bootloader password is set" -# block: -# - name: "SCORED | 1.5.2 | PATCH | Ensure bootloader password is set" -# grub_crypt: -# password: "{{ ubtu18cis_bootloader_password }}" -# register: grub_pass - -# - debug: var=grub_pass - -# when: -# - ubtu18cis_rule_1_5_2 -# tags: -# - level1-server -# - level1-workstation -# - scored -# - patch -# - rule_1.5.2 -# - grub -# - notimplemented - -- name: "SCORED | 1.5.3 | PATCH | Ensure authentication required for single user mode" - user: - name: root - password: "{{ ubtu18cis_root_pw }}" - when: - - ubtu18cis_rule_1_5_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.5.3 - - passwd - -- name: "SCORED | 1.5.4 | PATCH | Ensure interactive boot is not enabled" - block: - - name: "SCORED | 1.5.4 | PATCH | Ensure interactive boot is not enabled | Check for interactive login setting" - shell: grep "^PROMPT_FOR_CONFIRM=" /etc/sysconfig/boot - changed_when: false - failed_when: false - register: ubtu18cis_1_5_4_interactive_login_status - - - name: "SCORED | 1.5.4 | PATCH | Ensure interactive boot is not enabled | Set interactive login to no" - lineinfile: - path: /etc/sysconfig/boot - regexp: '^PROMPT_FOR_CONFIRM' - line: 'PROMPT_FOR_CONFIRM="no"' - when: "'PROMPT_FOR_CONFIRM' in ubtu18cis_1_5_4_interactive_login_status.stdout" - when: - - ubtu18cis_rule_1_5_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.5.4 - - interactive_boot - -- name: "SCORED | 1.6.1 | AUDIT | Ensure XD/NX support is enabled" - block: - - name: "SCORED | 1.6.1 | AUDIT | Ensure XD/NX support is enabled | Find status of XD/NX" - shell: "journalctl | grep 'protection: active'" - changed_when: false - failed_when: false - register: ubtu18cis_1_6_1_xdnx_status - - - name: "SCORED | 1.6.1 | AUDIT | Ensure XD/NX support is enabled | Alert if XD/NX is not enabled" - debug: - msg: - - "ALERT!!!!You do not have XD/NX (Execute Disable/No Execute) enabled" - - "To conform to CIS standards this needs to be enabled" - when: "'active'not in ubtu18cis_1_6_1_xdnx_status.stdout" - when: - - ubtu18cis_rule_1_6_1 - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_1.6.1 - - xd/nx - -- name: "SCORED | 1.6.2 | PATCH | Ensure address space layout randomization (ASLR) is enabled" - block: - - name: "SCORED | 1.6.2 | PATCH | Ensure address space layout randomization (ASLR) is enabled | Set ASLR settings" - lineinfile: - path: /etc/sysctl.conf - regexp: '^kernel.randomize_va_space' - line: 'kernel.randomize_va_space = 2' - - - name: "SCORED | 1.6.2 | PATCH | Ensure address space layout randomization (ASLR) is enabled | Set active kernel parameter" - sysctl: - name: kernel.randomize_va_space - value: '2' - when: - - ubtu18cis_rule_1_6_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.6.2 - - aslr - -- name: "SCORED | 1.6.3 | PATCH | Ensure prelink is disabled" - block: - - name: "SCORED | 1.6.3 | PATCH | Ensure prelink is disabled | Restore binaries to normal" - command: prelink -ua - changed_when: false - failed_when: false - - - name: "SCORED | 1.6.3 | PATCH | Ensure prelink is disabled | Remove prelink package" - apt: - name: prelink - state: absent - when: - - ubtu18cis_rule_1_6_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.6.3 - - prelink - -- name: "SCORED | 1.6.4 | PATCH | Ensure core dumps are restricted" - sysctl: - name: fs.suid_dumpable - value: '0' - state: present - reload: yes - sysctl_set: yes - ignoreerrors: yes - when: - - ubtu18cis_rule_1_6_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.6.4 - - coredump - -- name: "SCORED | 1.7.1.1 | PATCH | Ensure AppArmor is installed" - apt: - name: ['apparmor', 'apparmor-utils'] - state: present - when: - - ubtu18cis_rule_1_7_1_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.7.1.1 - - apparmor - -- name: "SCORED | 1.7.1.2 | PATCH | Ensure AppArmor is enabled in the bootloader configuration" - block: - - name: "SCORED | 1.7.1.2 | AUDIT | Ensure AppArmor is enabled in the bootloader configuration | Get current settings" - shell: grep "GRUB_CMDLINE_LINUX=" /etc/default/grub | cut -f2 -d'"' - changed_when: false - failed_when: false - register: ubtu18cis_1_7_1_2_cmdline_settings - - - name: "SCORED | 1.7.1.2 | PATCH | Ensure AppArmor is enabled in the bootloader configuration | Set apparmor settings if none exist" - lineinfile: - path: /etc/default/grub - regexp: '^GRUB_CMDLINE_LINUX' - line: 'GRUB_CMDLINE_LINUX="apparmor=1 security=apparmor {{ ubtu18cis_1_7_1_2_cmdline_settings.stdout }}"' - insertafter: '^GRUB_' - when: - - "'apparmor' not in ubtu18cis_1_7_1_2_cmdline_settings.stdout" - - "'security' not in ubtu18cis_1_7_1_2_cmdline_settings.stdout" - notify: grub update - - - name: "SCORED | 1.7.1.2 | PATCH | Ensure AppArmor is enabled in the bootloader configuration | Set apparmor settings if none exist | Replace apparmor settings when exists" - replace: - path: /etc/default/grub - regexp: "{{ item.regexp }}" - replace: "{{ item.replace }}" - with_items: - - { regexp: 'apparmor=\S+', replace: 'apparmor=1' } - - { regexp: 'security=\S+', replace: 'security=apparmor' } - when: - - "'apparmor' in ubtu18cis_1_7_1_2_cmdline_settings.stdout" - - "'security' in ubtu18cis_1_7_1_2_cmdline_settings.stdout" - notify: grub update - when: - - ubtu18cis_rule_1_7_1_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.7.1.2 - - apparmor - -- name: "SCORED | 1.7.1.3 | PATCH | Ensure all AppArmor Profiles are in enforce or complain mode" - command: aa-enforce /etc/apparmor.d/* - failed_when: false - when: - - ubtu18cis_rule_1_7_1_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.7.1.3 - - apparmor - -- name: "SCORED | 1.7.1.4 | PATCH | Ensure all AppArmor Profiles are enforcing" - command: aa-enforce /etc/apparmor.d/* - failed_when: false - when: - - ubtu18cis_rule_1_7_1_4 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_1.7.1.4 - - apparmor - -- name: "SCORED | 1.8.1.1 | PATCH | Ensure message of the day is configured properly" - template: - src: etc/motd.j2 - dest: /etc/motd - when: - - ubtu18cis_rule_1_8_1_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.8.1.1 - - motd - -- name: "SCORED | 1.8.1.2 | PATCH | Ensure local login warning banner is configured properly" - template: - src: etc/issue.j2 - dest: /etc/issue - when: - - ubtu18cis_rule_1_8_1_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - banner - -- name: "SCORED | 1.8.1.3 | PATCH | Ensure remote login warning banner is configured properly" - template: - src: etc/issue.net.j2 - dest: /etc/issue.net - when: - - ubtu18cis_rule_1_8_1_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - banner - -- name: "SCORED | 1.8.1.4 | PATCH | Ensure permissions on /etc/motd are configured" - file: - path: /etc/motd - owner: root - group: root - mode: 0644 - when: - - ubtu18cis_rule_1_8_1_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.8.1.4 - - permissions - - motd - -- name: "SCORED | 1.8.1.5 | PATCH | Ensure permissions on /etc/issue are configured" - file: - path: /etc/issue - owner: root - group: root - mode: 0644 - when: - - ubtu18cis_rule_1_8_1_5 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.8.1.5 - - permissions - - banner - -- name: "SCORED | 1.8.1.6 | PATCH | Ensure permissions on /etc/issue.net are configured" - file: - path: /etc/issue.net - owner: root - group: root - mode: 0644 - when: - - ubtu18cis_rule_1_8_1_6 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.8.1.6 - - permissions - - banner - -- name: "SCORED | 1.8.2 | PATCH | Ensure GDM login banner is configured" - lineinfile: - path: /etc/gdm3/greeter.dconf-defaults - regexp: "{{ item.regexp }}" - line: "{{ item.line }}" - insertafter: "{{ item.insertafter }}" - create: yes - owner: root - group: root - mode: 0644 - with_items: - - { regexp: '\[org\/gnome\/login-screen\]', line: '[org/gnome/login-screen]', insertafter: EOF } - - { regexp: 'banner-message-enable', line: 'banner-message-enable=true', insertafter: '\[org\/gnome\/login-screen\]'} - - { regexp: 'banner-message-text', line: 'banner-message-text={{ ubtu18cis_warning_banner }}', insertafter: 'banner-message-enable' } - when: - - ubtu18cis_rule_1_8_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_1.8.2 - - banner - -- name: "NOTSCORED | 1.9 | PATCH | Ensure updates, patches, and additional security software are installed" - apt: - name: "*" - state: latest - when: - - ubtu18cis_rule_1_9 - tags: - - level1-server - - level1-workstation - - notscored - - patch - - rule_1.9 - - patching diff --git a/tasks/section2.yml b/tasks/section2.yml deleted file mode 100644 index 28d50b4..0000000 --- a/tasks/section2.yml +++ /dev/null @@ -1,522 +0,0 @@ ---- -- name: "SCORED | 2.1.1 | PATCH | Ensure xinetd is not installed" - apt: - name: xinetd - state: absent - when: - - ubtu18cis_rule_2_1_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.1.1 - - xinetd - -- name: "SCORED | 2.1.2 | PATCH | Ensure openbsd-inetd is not installed" - apt: - name: openbsd-inetd - state: absent - when: - - ubtu18cis_rule_2_1_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.1.2 - - openbsd-inetd - -- name: "SCORED | 2.2.1.1 | PATCH | Ensure time synchronization is in use" - apt: - name: "{{ ubtu18cis_time_sync_tool }}" - state: present - when: - - ubtu18cis_rule_2_2_1_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.1.1 - - chrony - -- name: "NOTSCORED | 2.2.1.2 | PATCH | Ensure systemd-timesyncd is configured" - block: - - name: "NOTSCORED | 2.2.1.2 | PATCH | Ensure systemd-timesyncd is configured | Set configuration for systemd-timesyncd" - lineinfile: - path: /etc/systemd/timesyncd.conf - regexp: "{{ item.regexp }}" - line: "{{ item.line }}" - insertafter: "{{ item.insertafter }}" - with_items: - - { regexp: '^\[Time\]', line: '[Time]', insertafter: EOF } - - { regexp: '^#NTP|^NTP', line: 'NTP=0.ubuntu.pool.ntp.org 1.ubuntu.pool.ntp.org 2.ubuntu.pool.ntp.org', insertafter: '\[Time\]' } - - { regexp: '^#FallbackNTP|^FallbackNTP', line: 'FallbackNTP=ntp.ubuntu.com 3.ubuntu.pool.ntp.org', insertafter: '\[Time\]' } - - { regexp: '^#RootDistanceMaxSec|^RootDistanceMaxSec', line: 'RootDistanceMaxSec=1', insertafter: '\[Time\]'} - - - name: "NOTSCORED | 2.2.1.2 | PATCH | Ensure systemd-timesyncd is configured | Start and enable the systemd-timesyncd service" - service: - name: systemd-timesyncd.service - state: started - enabled: yes - - - name: "NOTSCORED | 2.2.1.2 | PATCH | Ensure systemd-timesyncd is configured | Set timedatectl to ntp" - command: timedatectl set-ntp true - when: - - ubtu18cis_rule_2_2_1_2 - tags: - - level1-server - - level1-workstation - - notscored - - patch - - rule_2.2.1.2 - - systemd-timesyncd - -- name: "SCORED | 2.2.1.3 | PATCH | Ensure chrony is configured" - block: - - name: "SCORED | 2.2.1.3 | AUDIT | Ensure chrony is configured | Check for chrony user" - shell: grep chrony /etc/passwd - changed_when: false - failed_when: false - register: ubtu18cis_2_2_1_3_chrony_user_status - - - name: "SCORED | 2.2.1.3 | PATCH | Ensure chrony is configured | Set chrony.conf file" - template: - src: chrony.conf.j2 - dest: /etc/chrony/chrony.conf - owner: root - group: root - mode: 0644 - - - name: "SCORED | 2.2.1.3 | PATCH | Ensure chrony is configured | Create chrony user" - user: - name: chrony - shell: /usr/sbin/nologin - system: true - when: ubtu18cis_2_2_1_3_chrony_user_status.stdout != "" - - - name: "SCORED | 2.2.1.3 | PATCH | Ensure chrony is configured | Set option to use chrony user" - lineinfile: - path: /etc/default/chrony - regexp: '^DAEMON_OPTS' - line: 'DAEMON_OPTS="-u chrony"' - when: - - ubtu18cis_rule_2_2_1_3 - - ubtu18cis_time_sync_tool == "chrony" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.1.3 - - chrony - -- name: "SCORED | 2.2.1.4 | PATCH | Ensure ntp is configured" - block: - - name: "SCORED | 2.2.1.4 | PATCH | Ensure ntp is configured | Set ntp.conf settings" - template: - src: ntp.conf.j2 - dest: /etc/ntp.conf - owner: root - group: root - mode: 0644 - - - name: "SCORED | 2.2.1.4 | PATCH | Ensure ntp is configured | Modify sysconfig/ntpd" - lineinfile: - path: /etc/sysconfig/ntpd - regexp: "{{ item.regexp }}" - line: "{{ item. line }}" - create: yes - with_items: - - { regexp: '^OPTIONS', line: 'OPTIONS="-u ntp:ntp"'} - - { regexp: '^NTPD_OPTIONS', line: 'NTPD_OPTIONS="-u ntp:ntp"' } - - - name: "SCORED | 2.2.1.4 | PATCH | Ensure ntp is configured | Modify /etc/init.d/npt" - lineinfile: - path: /etc/init.d/ntp - regexp: '^RUNAUSER' - line: 'RUNAUSER=npt' - when: - - ubtu18cis_rule_2_2_1_4 - - ubtu18cis_time_sync_tool == "ntp" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.1.4 - - ntp - -- name: "SCORED | 2.2.2 | PATCH | Ensure X Window System is not installed" - apt: - name: xserver-xorg* - state: absent - when: - - ubtu18cis_rule_2_2_2 - - not ubtu18cis_xwindows_required - tags: - - level1-server - - scored - - patch - - rule_2.2.2 - - xwindows - -- name: "SCORED | 2.2.3 | PATCH | Ensure Avahi Server is not enabled" - service: - name: avahi-daemon - state: stopped - enabled: no - when: - - ubtu18cis_rule_2_2_3 - - not ubtu18cis_avahi_server - - avahi_service_status.stdout == "loaded" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.3 - - avahi - - services - -- name: "SCORED | 2.2.4 | PATCH | Ensure CUPS is not enabled" - service: - name: cups - state: stopped - enabled: no - when: - - ubtu18cis_rule_2_2_4 - - not ubtu18cis_cups_server - - cups_service_status.stdout == "loaded" - tags: - - level1-server - - level2-workstation - - scored - - patch - - rule_2.2.4 - - cups - - services - -- name: "SCORED | 2.2.5 | PATCH | Ensure DHCP Server is not enabled" - block: - - name: "SCORED | 2.2.5 | PATCH | Ensure DHCP Server is not enabled | Disable isc-dhcp-server service" - service: - name: isc-dhcp-server - state: stopped - enabled: no - when: dhcp_service_status == "loaded" - - - name: "SCORED | 2.2.5 | PATCH | Ensure DHCP Server is not enabled | Disable isc-dhcp-server6 service" - service: - name: isc-dhcp-server6 - state: stopped - enabled: no - when: dhcp6_service_status == "loaded" - when: - - ubtu18cis_rule_2_2_5 - - not ubtu18cis_dhcp_server - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.5 - - dhcp - - services - -- name: "SCORED | 2.2.6 | PATCH | Ensure LDAP server is not enabled" - service: - name: slapd - state: stopped - enabled: no - when: - - ubtu18cis_rule_2_2_6 - - not ubtu18cis_ldap_server - - slapd_service_status.stdout == "loaded" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.6 - - ldap - - services - -- name: "SCORED | 2.2.7 | PATCH | Ensure NFS and RPC are not enabled" - block: - - name: "SCORED | 2.2.7 | PATCH | Ensure NFS and RPC are not enabled | Disable NFS service" - service: - name: nfs-server - state: stopped - enabled: no - when: nfs_service_status.stdout == "loaded" - - - name: "SCORED | 2.2.7 | PATCH | Ensure NFS and RPC are not enabled | Disable RPC service" - service: - name: rpcbind - state: stopped - enabled: no - when: rpcbind_service_status.stdout == "loaded" - when: - - ubtu18cis_rule_2_2_7 - - not ubtu18cis_nfs_rpc_server - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.7 - - nfs - - rpc - - services - -- name: "SCORED | 2.2.8 | PATCH | Ensure DNS Server is not enabled" - service: - name: bind9 - status: stopped - enabled: no - when: - - ubtu18cis_rule_2_2_8 - - not ubtu18cis_dns_server - - named_service_status.stdout == "loaded" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.8 - - dns - - service - -- name: "SCORED | 2.2.9 | PATCH | Ensure FTP Server is not enabled" - service: - name: vsftpd - state: stopped - enabled: no - when: - - ubtu18cis_rule_2_2_9 - - not ubtu18cis_vsftpd_server - - vsftpd_service_status.stdout == "loaded" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.9 - - ftp - - service - -- name: "SCORED | 2.2.10 | PATCH | Ensure HTTP server is not enabled" - service: - name: apache2 - state: stopped - enabled: no - when: - - ubtu18cis_rule_2_2_10 - - not ubtu18cis_httpd_server - - httpd_service_status.stdout == "loaded" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.10 - - httpd - - service - -- name: "SCORED | 2.2.11 | PATCH | Ensure email services are not enabled" - service: - name: dovecot - state: stopped - enabled: no - when: - - ubtu18cis_rule_2_2_11 - - not ubtu18cis_dovecot_server - - dovecot_service_status.stdout == "loaded" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.11 - - dovecot - - service - -- name: "SCORED | 2.2.12 | PATCH | Ensure Samba is not enabled" - service: - name: smbd - state: stopped - enabled: no - when: - - ubtu18cis_rule_2_2_12 - - not ubtu18cis_smb_server - - smb_service_status.stdout == "loaded" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.12 - - samba - - service - -- name: "SCORED | 2.2.13 | PATCH | Ensure HTTP Proxy Server is not enabled" - service: - name: squid - states: stopped - enabled: no - when: - - ubtu18cis_rule_2_2_13 - - not ubtu18cis_squid_server - - squid_service_status.stdout == "loaded" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.13 - - http_proxy - - service - -- name: "SCORED | 2.2.14 | PATCH | Ensure SNMP Server is not enabled" - service: - name: snmpd - state: stopped - enabled: no - when: - - ubtu18cis_rule_2_2_14 - - not ubtu18cis_snmp_server - - snmpd_service_status.stdout == "loaded" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.14 - - snmp - - service - -- name: "SCORED | 2.2.15 | PATCH | Ensure mail transfer agent is configured for local-only mode" - lineinfile: - path: /etc/postfix/main.cf - regexp: '^(#)?inet_interfaces' - line: 'inet_interfaces = loopback-only' - notify: restart postfix - when: - - ubtu18cis_rule_2_2_15 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.15 - - postfix - -- name: "SCORED | 2.2.16 | PATCH | Ensure rsync service is not enabled" - service: - name: rsync - state: stopped - enabled: no - when: - - ubtu18cis_rule_2_2_16 - - not ubtu18cis_rsync_server - - rsync_service_status.stdout == "loaded" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.2.16 - - rsync - -- name: "SCORED | 2.2.17 | PATCH | Ensure NIS Server is not enabled" - service: - name: nis - state: stopped - enabled: no - when: - - ubtu18cis_rule_2_2_17 - - not ubtu18cis_nis_server - - nis_service_status.stdout == "loaded" - tags: - - level1-server - - level1-workstation - - scored - - rule_2.2.17 - - nis - - service - -- name: "SCORED | 2.3.1 | PATCH | Ensure NIS Client is not installed" - apt: - name: nis - state: absent - when: - - ubtu18cis_rule_2_3_1 - - not ubtu18cis_nis_required - tags: - - level1-server - - level1-workstation - - scored - - rule_2.3.1 - - nis - -- name: "SCORED | 2.3.2 | PATCH | Ensure rsh client is not installed" - apt: - name: rsh-client - state: absent - when: - - ubtu18cis_rule_2_3_2 - - not ubtu18cis_rsh_required - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.3.2 - - rsh - -- name: "SCORED | 2.3.3 | PATCH | Ensure talk client is not installed" - apt: - name: talk - state: absent - when: - - ubtu18cis_rule_2_3_3 - - not ubtu18cis_talk_required - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.3.3 - - talk - -- name: "SCORED | 2.3.4 | PATCH | Ensure telnet client is not installed" - apt: - name: telnet - state: absent - when: - - ubtu18cis_rule_2_3_4 - - not ubtu18cis_telnet_required - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.3.4 - - telnet - -- name: "SCORED | 2.3.5 | PATCH | Ensure LDAP client is not installed" - apt: - name: ldap-utils - state: absent - when: - - ubtu18cis_rule_2_3_5 - - not ubtu18cis_ldap_clients_required - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_2.3.5 - - ldap diff --git a/tasks/section3.yml b/tasks/section3.yml deleted file mode 100644 index fb0190a..0000000 --- a/tasks/section3.yml +++ /dev/null @@ -1,1160 +0,0 @@ ---- -- name: "SCORED | 3.1.1 | PATCH | Ensure packet redirect sending is disabled" - sysctl: - name: "{{ item }}" - value: '0' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - with_items: - - net.ipv4.conf.all.send_redirects - - net.ipv4.conf.default.send_redirects - notify: sysctl flush ipv4 route table - when: - - ubtu18cis_rule_3_1_1 - - not ubtu18cis_is_router - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.1.1 - - packet_redirect - - sysctl - -- name: "SCORED | 3.1.2 | PATCH | Ensure IP forwarding is disabled" - block: - - name: "SCORED | 3.1.2 | PATCH | Ensure IP forwarding is disabled | IPv4 settings" - sysctl: - name: net.ipv4.ip_forward - value: '0' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - notify: - - sysctl flush ipv4 route table - - - name: "SCORED | 3.1.2 | PATCH | Ensure IP forwarding is disabled | IPv6 settings" - sysctl: - name: net.ipv6.conf.all.forwarding - value: '0' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - notify: - - sysctl flush ipv6 route table - when: - - ubtu18cis_rule_3_1_2 - - not ubtu18cis_is_router - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.1.2 - - ip_forwarding - - sysctl - -- name: "SCORED | 3.2.1 | PATCH | Ensure source routed packets are not accepted" - block: - - name: "SCORED | 3.2.1 | PATCH | Ensure source routed packets are not accepted | IPv4 settings" - sysctl: - name: "{{ item }}" - value: '0' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - with_items: - - net.ipv4.conf.all.accept_source_route - - net.ipv4.conf.default.accept_source_route - notify: sysctl flush ipv4 route table - - - name: "SCORED | 3.2.1 | PATCH | Ensure source routed packets are not accepted | IPv6 settings" - sysctl: - name: "{{ item }}" - value: '0' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - with_items: - - net.ipv6.conf.all.accept_source_route - - net.ipv6.conf.default.accept_source_route - notify: sysctl flush ipv6 route table - when: - - ubtu18cis_rule_3_2_1 - - not ubtu18cis_is_router - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.2.1 - - routed_packets - - sysctl - -- name: "SCORED | 3.2.2 | PATCH | Ensure ICMP redirects are not accepted" - block: - - name: "SCORED | 3.2.2 | PATCH | Ensure ICMP redirects are not accepted | IPv4 settings" - sysctl: - name: "{{ item }}" - value: '0' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - with_items: - - net.ipv4.conf.all.accept_redirects - - net.ipv4.conf.default.accept_redirects - notify: sysctl flush ipv4 route table - - - name: "SCORED | 3.2.2 | PATCH | Ensure ICMP redirects are not accepted | IPv6 settings" - sysctl: - name: "{{ item }}" - value: '0' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - with_items: - - net.ipv6.conf.all.accept_redirects - - net.ipv6.conf.default.accept_redirects - notify: sysctl flush ipv6 route table - when: - - ubtu18cis_rule_3_2_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.2.2 - - icmp - - sysctl - -- name: "SCORED | 3.2.3 | PATCH | Ensure secure ICMP redirects are not accepted" - sysctl: - name: "{{ item }}" - value: '0' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - with_items: - - net.ipv4.conf.all.secure_redirects - - net.ipv4.conf.default.secure_redirects - notify: sysctl flush ipv4 route table - when: - - ubtu18cis_rule_3_2_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.2.3 - - icmp - - sysctl - -- name: "SCORED | 3.2.4 | PATCH | Ensure suspicious packets are logged" - sysctl: - name: "{{ item }}" - value: '1' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - with_items: - - net.ipv4.conf.all.log_martians - - net.ipv4.conf.default.log_martians - notify: sysctl flush ipv4 route table - when: - - ubtu18cis_rule_3_2_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.2.4 - - suspicious_packets - - sysctl - -- name: "SCORED | 3.2.5 | PATCH | Ensure broadcast ICMP requests are ignored" - sysctl: - name: net.ipv4.icmp_echo_ignore_broadcasts - value: '1' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - notify: sysctl flush ipv4 route table - when: - - ubtu18cis_rule_3_2_5 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.2.5 - - icmp - - sysctl - -- name: "SCORED | 3.2.6 | PATCH | Ensure bogus ICMP responses are ignored" - sysctl: - name: net.ipv4.icmp_ignore_bogus_error_responses - value: '1' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - notify: sysctl flush ipv4 route table - when: - - ubtu18cis_rule_3_2_6 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.2.6 - - icmp - - sysctl - -- name: "SCORED | 3.2.7 | PATCH | Ensure Reverse Path Filtering is enabled" - sysctl: - name: "{{ item }}" - value: '1' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - with_items: - - net.ipv4.conf.all.rp_filter - - net.ipv4.conf.default.rp_filter - notify: sysctl flush ipv4 route table - when: - - ubtu18cis_rule_3_2_7 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.2.7 - - reverse_path_filtering - - sysctl - -- name: "SCORED | 3.2.8 | PATCH | Ensure TCP SYN Cookies is enabled" - sysctl: - name: net.ipv4.tcp_syncookies - value: '1' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - notify: sysctl flush ipv4 route table - when: - - ubtu18cis_rule_3_2_8 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.2.8 - - tcp_syn_cookies - - sysctl - -- name: "SCORED | 3.2.9 | PATCH | Ensure IPv6 router advertisements are not accepted" - sysctl: - name: "{{ item }}" - value: '0' - sysctl_set: yes - state: present - reload: yes - ignoreerrors: yes - with_items: - - net.ipv6.conf.all.accept_ra - - net.ipv6.conf.default.accept_ra - notify: sysctl flush ipv6 route table - when: - - ubtu18cis_rule_3_2_9 - - ubtu18cis_ipv6_required - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.2.9 - - ipv6 - - router_advertisements - - sysctl - -- name: "NOTSCORED | 3.3.1 | PATCH | Ensure TCP Wrappers is installed" - apt: - name: tcpd - state: present - when: - - ubtu18cis_rule_3_3_1 - tags: - - level1-server - - level1-workstation - - notscored - - patch - - rule_3.3.1 - - tcp_wrapper - - apt - -- name: "NOTSCORED | 3.3.2 | PATCH | Ensure /etc/hosts.allow is configured" - template: - src: hosts.allow.j2 - dest: /etc/hosts.allow - owner: root - group: root - mode: 0644 - when: - - ubtu18cis_rule_3_3_2 - tags: - - level1-server - - level1-workstation - - notscored - - patch - - rule_3.3.2 - -- name: "NOTSCORED | 3.3.3 | PATCH | Ensure /etc/hosts.deny is configured" - lineinfile: - path: /etc/hosts.deny - regexp: '^(#)?ALL' - line: 'ALL: ALL' - when: - - ubtu18cis_rule_3_3_3 - - not system_is_ec2 - tags: - - level1-server - - level1-workstation - - notscored - - patch - - rule_3.3.3 - -- name: "SCORED | 3.3.4 | PATCH | Ensure permissions on /etc/hosts.allow are configured" - file: - path: /etc/hosts.allow - owner: root - group: root - mode: 0644 - when: - - ubtu18cis_rule_3_3_4 - tags: - - level1-server - - level1-workstation - - scored - - rule_3.3.4 - - permissions - -- name: "SCORED | 3.3.5 | PATCH | Ensure permissions on /etc/hosts.deny are configured" - file: - path: /etc/hosts.deny - owner: root - group: root - mode: 0644 - when: - - ubtu18cis_rule_3_3_5 - tags: - - level1-server - - level1-workstation - - scored - - rule_3.3.5 - - permissions - -- name: "SCORED | 3.4.1 | PATCH | Ensure DCCP is disabled" - lineinfile: - path: /etc/modprobe.d/dccp.conf - regexp: '^(#)?install dccp(\\s|$)' - line: 'install dccp /bin/true' - create: yes - when: - - ubtu18cis_rule_3_4_1 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_3.4.1 - - DCCP - -- name: "SCORED | 3.4.2 | PATCH | Ensure SCTP is disabled" - lineinfile: - path: /etc/modprobe.d/sctp.conf - regexp: "^(#)?install sctp(\\s|$)" - line: 'install sctp /bin/true' - create: yes - when: - - ubtu18cis_rule_3_4_2 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_3.4.2 - - sctp - -- name: "SCORED | 3.4.3 | PATCH | Ensure RDS is disabled" - lineinfile: - path: /etc/modprobe.d/rds.conf - regexp: '^(#)?install rds(\\s|$)' - line: 'install rds /bin/true' - create: yes - when: - - ubtu18cis_rule_3_4_3 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_3.4.3 - - rds - -- name: "SCORED | 3.4.4 | PATCH | Ensure TIPC is disabled" - lineinfile: - path: /etc/modprobe.d/tipc.conf - regexp: '^(#)?install tipc(\\s|$)' - line: install tipc /bin/true - create: yes - when: - - ubtu18cis_rule_3_4_4 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_3.4.4 - - tipc - -- name: "SCORED | 3.5.1.1 | PATCH | Ensure a Firewall package is installed" - apt: - name: "{{ ubtu18cis_firewall_package }}" - state: present - when: - - ubtu18cis_rule_3_5_1_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.5.1.1 - - apt - - firewall - -# Adding the allow OpenSSH rule while enabling ufw to allow ansible to run after enabling -- name: "SCORED | 3.5.2.1 | PATCH | Ensure ufw service is enabled" - ufw: - rule: allow - name: OpenSSH - state: enabled - when: - - ubtu18cis_rule_3_5_2_1 - - ubtu18cis_firewall_package == "ufw" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.5.2.1 - - ufw - -- name: "SCORED | 3.5.2.2 | PATCH | Ensure default deny firewall policy" - ufw: - default: deny - direction: "{{ item }}" - notify: reload ufw - with_items: - - incoming - - outgoing - - routed - when: - - ubtu18cis_rule_3_5_2_2 - - ubtu18cis_firewall_package == "ufw" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.5.2.2 - - ufw - -- name: "SCORED | 3.5.2.3 | PATCH | Ensure loopback traffic is configured" - block: - - name: "SCORED | 3.5.2.3 | PATCH | Ensure loopback traffic is configured | Set allow ufw rules" - ufw: - rule: allow - direction: in - interface: lo - notify: reload ufw - - - name: "SCORED | 3.5.2.3 | PATCH | Ensure loopback traffic is configured | Set deny ufw rules IPv4" - ufw: - rule: deny - direction: in - from_ip: 127.0.0.0/8 - notify: reload ufw - - - name: "SCORED | 3.5.2.3 | PATCH | Ensure loopback traffic is configured | Set deny ufw rules IPv6" - ufw: - rule: deny - direction: in - from_ip: "::1" - notify: reload ufw - when: ubtu18cis_ipv6_required - when: - - ubtu18cis_rule_3_5_2_3 - - ubtu18cis_firewall_package == "ufw" - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.5.2.3 - - ufw - -- name: "NOTSCORED | 3.5.2.4 | PATCH | Ensure outbound connections are configured" - block: - - name: "NOTSCORED | 3.5.2.4 | PATCH | Ensure outbound connections are configured | Custom ports" - ufw: - rule: allow - direction: out - to_port: '{{ item }}' - with_items: - - "{{ ubtu18cis_ufw_allow_out_ports }}" - notify: reload ufw - when: ubtu18cis_ufw_allow_out_ports != "all" - - - name: "NOTSCORED | 3.5.2.4 | PATCH | Ensure outbound connections are configured | Allow all" - ufw: - rule: allow - direction: out - to_port: all - notify: reload ufw - when: "'all' in ubtu18cis_ufw_allow_out_ports" - when: - - ubtu18cis_rule_3_5_2_4 - - ubtu18cis_firewall_package == "ufw" - tags: - - level1-server - - level1-workstation - - notscored - - patch - - rule_3.5.2.4 - - ufw - -- name: "NOTSCORED | 3.5.2.5 | AUDIT | Ensure firewall rules exist for all open ports" - block: - - name: "NOTSCORED | 3.5.2.5 | AUDIT | Ensure firewall rules exist for all open ports | Get list of open ports" - command: ss -4tuln - changed_when: false - failed_when: false - register: ubtu18cis_3_5_2_5_open_listen_ports - - - name: "NOTSCORED | 3.5.2.5 | AUDIT | Ensure firewall rules exist for all open ports | Get list of firewall rules" - command: ufw status - changed_when: false - failed_when: false - register: ubtu18cis_3_5_2_5_firewall_rules - - - name: "NOTSCORED | 3.5.2.5 | AUDIT | Ensure firewall rules exist for all open ports | Message out settings" - debug: - msg: - - "ALERT!!!!Below are the listening ports and firewall rules" - - "Please create firewall rule for any open ports if not already done" - - "*****---Open Listen Ports---*****" - - "{{ ubtu18cis_3_5_2_5_open_listen_ports.stdout_lines }}" - - "*****---Firewall Rules---*****" - - "{{ ubtu18cis_3_5_2_5_firewall_rules.stdout_lines }}" - when: - - ubtu18cis_rule_3_5_2_5 - - ubtu18cis_firewall_package == "ufw" - tags: - - level1-server - - level1-workstation - - notscored - - audit - - rule_3.5.2.5 - - ufw - -# --------------- -# --------------- -# NFTables is unsupported with this role. However I have the actions commented out as a guide -# --------------- -# --------------- -- name: "NOTSCORED | 3.5.3.1 | PATCH | Ensure iptables are flushed" - debug: - msg: "Warning: NFTables is not supported in this role. Please us UFW or iptables" - # iptables: - # flush: yes - when: - - ubtu18cis_rule_3_5_3_1 - - ubtu18cis_firewall_package == "nfptables" - tags: - - level1-server - - level1-workstation - - notscored - - audit - - rule_3.5.3.1 - - nftables - - notimplemented - -- name: "SCORED | 3.5.3.2 | PATCH | Ensure a table exists" - debug: - msg: "Warning: NFTables is not supported in this role. Please us UFW or iptables" - # command: "nft create table {{ ubtu18cis_nftables_table_name }}" - # changed_when: ubtu18cis_3_5_3_2_new_table.rc == 0 - # failed_when: false - # register: ubtu18cis_3_5_3_2_new_table - when: - - ubtu18cis_rule_3_5_3_2 - - ubtu18cis_firewall_package == "nftables" - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_3.5.3.2 - - nftables - - notimplemented - -- name: "SCORED | 3.5.3.3 | PATCH | Ensure base chains exist" - debug: - msg: "Warning: NFTables is not supported in this role. Please us UFW or iptables" - # block: - # - name: "SCORED | 3.5.3.3 | PATCH | Ensure base chains exist | Input entry" - # shell: 'nft create chain inet {{ ubtu18cis_nftables_table_name }} input { type filter hook input priority 0 \; }' - # changed_when: ubtu18cis_3_5_3_3_base_chains_input.rc == 0 - # failed_when: false - # register: ubtu18cis_3_5_3_3_base_chains_input - - # - name: "SCORED | 3.5.3.3 | PATCH | Ensure base chains exist | Forward entry" - # shell: 'nft create chain inet {{ ubtu18cis_nftables_table_name }} forward { type filter hook forward priority 0 \; }' - # changed_when: ubtu18cis_3_5_3_3_base_chains_forward.rc == 0 - # failed_when: false - # register: ubtu18cis_3_5_3_3_base_chains_forward - - # - name: "SCORED | 3.5.3.3 | PATCH | Ensure base chains exist | Output entry" - # shell: 'nft create chain inet {{ ubtu18cis_nftables_table_name }} output { type filter hook output priority 0 \; }' - # changed_when: ubtu18cis_3_5_3_3_base_chains_output.rc == 0 - # failed_when: false - # register: ubtu18cis_3_5_3_3_base_chains_output - when: - - ubtu18cis_rule_3_5_3_3 - - ubtu18cis_firewall_package == "nftables" - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_3.5.3.3 - - nftables - - notimplemented - -- name: "SCORED | 3.5.3.4 | PATCH | Ensure loopback traffic is configured" - debug: - msg: "Warning: NFTables is not supported in this role. Please us UFW or iptables" - # block: - # - name: "SCORED | 3.5.3.4 | AUDIT | Ensure loopback traffic is configured | Get input iif lo accept status" - # shell: nft list ruleset | awk '/hook input/,/}/' | grep 'iif "lo" accept' - # changed_when: false - # failed_when: false - # register: ubtu18cis_3_5_3_4_loopback_iif_status - - # - name: "SCORED | 3.5.3.4 | AUDIT | Ensure loopback traffic is configured | Get input iif lo accept status" - # shell: nft list ruleset | awk '/hook input/,/}/' | grep 'ip saddr' - # changed_when: false - # failed_when: false - # register: ubtu18cis_3_5_3_4_loopback_input_drop_status - - # - name: "SCORED | 3.5.3.4 | AUDIT | Ensure loopback traffic is configured | Get input iif lo accept status" - # shell: nft list ruleset | awk '/hook input/,/}/' | grep 'ip6 saddr' - # changed_when: false - # failed_when: false - # register: ubtu18cis_3_5_3_4_loopback_ipv6_drop_status - - # - name: "SCORED | 3.5.3.4 | PATCH | Ensure loopback traffic is configured | Loopback iif lo accept" - # command: 'nft add rule inet {{ ubtu18cis_nftables_table_name }} input iif lo accept' - # changed_when: ubtu18cis_3_5_3_4_loopback_iif.rc == 0 - # failed_when: false - # register: ubtu18cis_3_5_3_4_loopback_iif - # when: "'iif \"lo\" accept' not in ubtu18cis_3_5_3_4_loopback_iif_status.stdout" - - # - name: "SCORED | 3.5.3.4 | PATCH | Ensure loopback traffic is configured | Loopback input drop" - # command: 'nft add rule inet {{ ubtu18cis_nftables_table_name }} input ip saddr 127\.0\.0\.0\/8 counter drop' - # changed_when: ubtu18cis_3_5_3_4_loopback_input_drop.rc == 0 - # failed_when: false - # register: ubtu18cis_3_5_3_4_loopback_input_drop - # when: - # - "'ip saddr 127.0.0.0/8' not in ubtu18cis_3_5_3_4_loopback_input_drop_status.stdout" - # - "'drop' not in ubtu18cis_3_5_3_4_loopback_input_drop_status.stdout" - - # - name: "SCORED | 3.5.3.4 | PATCH | Ensure loopback traffic is configured | Loopback ipv6 drop" - # command: 'nft add rule inet {{ ubtu18cis_nftables_table_name }} input ip6 saddr ::1 counter drop' - # changed_when: ubtu18cis_3_5_3_4_loopback_ipv6_drop.rc == 0 - # failed_when: false - # register: ubtu18cis_3_5_3_4_loopback_ipv6_drop - # when: - # - "'ip6 saddr' not in ubtu18cis_3_5_3_4_loopback_ipv6_drop_status.stdout" - # - "'drop' not in ubtu18cis_3_5_3_4_loopback_ipv6_drop_status.stdout" - when: - - ubtu18cis_rule_3_5_3_4 - - ubtu18cis_firewall_package == "nftables" - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_3.5.3.4 - - nftables - - notimplemented - -- name: "NOTSCORED | 3.5.3.5 | PATCH | Ensure outbound and established connections are configured" - debug: - msg: "Warning: NFTables is not supported in this role. Please us UFW or iptables" - when: - - ubtu18cis_rule_3_5_3_5 - - ubtu18cis_firewall_package == "nftables" - tags: - - level1-server - - level1-workstation - - notscored - - audit - - rule_3.5.3.5 - - nftables - - notimplemented - -- name: "SCORED | 3.5.3.6 | PATCH | Ensure default deny firewall policy" - debug: - msg: "Warning: NFTables is not supported in this role. Please us UFW or iptables" - when: - - ubtu18cis_rule_3_5_3_6 - - ubtu18cis_firewall_package == "nftables" - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_3.5.3.6 - - nftables - - notimplemented - -- name: "SCORED | 3.5.3.7 | PATCH | Ensure nftables service is enabled" - debug: - msg: "Warning: NFTables is not supported in this role. Please us UFW or iptables" - # service: - # name: nftables - # state: started - # enabled: yes - when: - - ubtu18cis_rule_3_5_3_7 - - ubtu18cis_firewall_package == "nftables" - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_3.5.3.7 - - nftables - - notimplemented - -- name: "SCORED | 3.5.3.8 | PATCH | Ensure nftables rules are permanent" - debug: - msg: "Warning: NFTables is not supported in this role. Please us UFW or iptables" - when: - - ubtu18cis_rule_3_5_3_8 - - ubtu18cis_firewall_package == "nftables" - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_3.5.3.8 - - nftables - - notimplemented - -- name: "SCORED | 3.5.4.1.1 | PATCH | Ensure default deny firewall policy" - block: - - name: "SCORED | 3.5.4.1.1 | PATCH | Ensure default deny firewall policy | Configure SSH to be allowed in" - iptables: - chain: INPUT - protocol: tcp - destination_port: 22 - jump: ACCEPT - ctstate: 'NEW,ESTABLISHED' - - - name: "SCORED | 3.5.4.1.1 | PATCH | Ensure default deny firewall policy | Configure SSH to be allowed out" - iptables: - chain: OUTPUT - protocol: tcp - source_port: 22 - jump: ACCEPT - ctstate: 'NEW,ESTABLISHED' - - - name: "SCORED | 3.5.4.1.1 | PATCH | Ensure default deny firewall policy | Enable apt traffic" - iptables: - chain: INPUT - ctstate: 'ESTABLISHED' - jump: ACCEPT - - - name: "SCORED | 3.5.4.1.1 | PATCH | Ensure default deny firewall policy | Set drop items" - iptables: - policy: DROP - chain: "{{ item }}" - with_items: - - INPUT - - FORWARD - - OUTPUT - when: - - ubtu18cis_rule_3_5_4_1_1 - - ubtu18cis_firewall_package == "iptables" - - not ubtu18cis_iptables_v6 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.5.4.1.1 - - iptables - -- name: "SCORED | 3.5.4.1.2 | PATCH | Ensure loopback traffic is configured" - block: - - name: "SCORED | 3.5.4.1.2 | PATCH | Ensure loopback traffic is configured | INPUT loopback ACCEPT" - iptables: - action: append - chain: INPUT - in_interface: lo - jump: ACCEPT - - - name: "SCORED | 3.5.4.1.2 | PATCH | Ensure loopback traffic is configured | OUTPUT loopback ACCEPT" - iptables: - action: append - chain: OUTPUT - out_interface: lo - jump: ACCEPT - - - name: "SCORED | 3.5.4.1.2 | PATCH | Ensure loopback traffic is configured | OUTPUT loopback ACCEPT" - iptables: - action: append - chain: INPUT - source: 127.0.0.0/8 - jump: DROP - when: - - ubtu18cis_rule_3_5_4_1_2 - - ubtu18cis_firewall_package == "iptables" - - not ubtu18cis_iptables_v6 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.5.4.1.2 - - iptables - -- name: "NOTSCORED | 3.5.4.1.3 | PATCH | Ensure outbound and established connections are configured" - iptables: - action: append - chain: '{{ item.chain }}' - protocol: '{{ item.protocol }}' - match: state - ctstate: '{{ item.ctstate }}' - jump: ACCEPT - with_items: - - { chain: OUTPUT, protocol: tcp, ctstate: 'NEW,ESTABLISHED' } - - { chain: OUTPUT, protocol: udp, ctstate: 'NEW,ESTABLISHED' } - - { chain: OUTPUT, protocol: icmp, ctstate: 'NEW,ESTABLISHED' } - - { chain: INPUT, protocol: tcp, ctstate: 'ESTABLISHED' } - - { chain: INPUT, protocol: udp, ctstate: 'ESTABLISHED' } - - { chain: INPUT, protocol: icmp, ctstate: 'ESTABLISHED' } - when: - - ubtu18cis_rule_3_5_4_1_3 - - ubtu18cis_firewall_package == "iptables" - - not ubtu18cis_iptables_v6 - tags: - - level1-server - - level1-workstation - - notscored - - patch - - rule_3.5.4.1.3 - - iptables - -- name: "SCORED | 3.5.4.1.4 | AUDIT | Ensure firewall rules exist for all open ports" - block: - - name: "SCORED | 3.5.4.1.4 | AUDIT | Ensure firewall rules exist for all open ports | Get list of open ports" - command: ss -4tuln - changed_when: false - failed_when: false - register: ubtu18cis_3_5_4_1_4_open_ports - - - name: "SCORED | 3.5.4.1.4 | AUDIT | Ensure firewall rules exist for all open ports | Get list of rules" - command: iptables -L INPUT -v -n - changed_when: false - failed_when: false - register: ubtu18cis_3_5_4_1_4_current_rules - - - name: "SCORED | 3.5.4.1.4 | AUDIT | Ensure firewall rules exist for all open ports | Alert about settings" - debug: - msg: - - "ALERT!!!!Below is the list the open ports and current rules" - - "Please create a rule for any open port that does not have a current rule" - - "Open Ports:" - - "{{ ubtu18cis_3_5_4_1_4_open_ports.stdout_lines }}" - - "Current Rules:" - - "{{ ubtu18cis_3_5_4_1_4_current_rules.stdout_lines }}" - when: - - ubtu18cis_rule_3_5_4_1_4 - - ubtu18cis_firewall_package == "iptables" - - not ubtu18cis_iptables_v6 - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_3.5.4.1.4 - - iptables - -# --------------- -# --------------- -# This is not a control however using the iptables module only writes to memery -# if a reboot occurs that means changes can revert. This task will make the -# above iptables settings permanent -# --------------- -# --------------- -- name: "Make IPTables persistent | Not a control" - block: - - name: "Make IPTables persistent | Install iptables-persistent" - apt: - name: iptables-persistent - state: present - - - name: "Make IPTables persistent | Save to persistent files" - shell: bash -c "iptables-save > /etc/iptables/rules.v4" - changed_when: ubtu18cis_iptables_save.rc == 0 - failed_when: ubtu18cis_iptables_save.rc > 0 - register: ubtu18cis_iptables_save - when: - - ubtu18cis_firewall_package == "iptables" - - not ubtu18cis_iptables_v6 - - ubtu18cis_save_iptables_cis_rules - - ubtu18cis_rule_3_5_4_1_1 or - ubtu18cis_rule_3_5_4_1_2 or - ubtu18cis_rule_3_5_4_1_3 or - ubtu18cis_rule_3_5_4_1_4 - -- name: "SCORED | 3.5.4.2.1 | PATCH | Ensure IPv6 default deny firewall policy" - block: - - name: "SCORED | 3.5.4.2.1 | PATCH | Ensure IPv6 default deny firewall policy | Configure SSH to be allowed out" - iptables: - chain: OUTPUT - protocol: tcp - source_port: 22 - jump: ACCEPT - ctstate: 'NEW,ESTABLISHED' - ip_version: ipv6 - - - name: "SCORED | 3.5.4.2.1 | PATCH | Ensure IPv6 default deny firewall policy | Enable apt traffic" - iptables: - chain: INPUT - ctstate: 'ESTABLISHED' - jump: ACCEPT - ip_version: ipv6 - - - name: "SCORED | 3.5.4.2.1 | PATCH | Ensure IPv6 default deny firewall policy | Set drop items" - iptables: - policy: DROP - chain: "{{ item }}" - ip_version: ipv6 - with_items: - - INPUT - - FORWARD - - OUTPUT - when: - - ubtu18cis_rule_3_5_4_2_1 - - ubtu18cis_firewall_package == "iptables" - - ubtu18cis_iptables_v6 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.5.4.2.1 - - ip6tables - -- name: "SCORED | 3.5.4.2.2 | PATCH | Ensure IPv6 loopback traffic is configured" - block: - - name: "SCORED | 3.5.4.2.2 | PATCH | Ensure IPv6 loopback traffic is configured | INPUT loopback ACCEPT" - iptables: - action: append - chain: INPUT - in_interface: lo - jump: ACCEPT - ip_version: ipv6 - - - name: "SCORED | 3.5.4.2.2 | PATCH | Ensure IPv6 loopback traffic is configured | OUTPUT loopback ACCEPT" - iptables: - action: append - chain: OUTPUT - out_interface: lo - jump: ACCEPT - ip_version: ipv6 - - - name: "SCORED | 3.5.4.2.2 | PATCH | Ensure IPv6 loopback traffic is configured | INPUT loopback drop" - iptables: - action: append - chain: INPUT - source: ::1 - jump: DROP - ip_version: ipv6 - when: - - ubtu18cis_rule_3_5_4_2_2 - - ubtu18cis_firewall_package == "iptables" - - ubtu18cis_iptables_v6 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_3.5.4.2.2 - - ip6tables - -- name: "NOTSCORED | 3.5.4.2.3 | PATCH | Ensure IPv6 outbound and established connections are configured" - iptables: - action: append - chain: '{{ item.chain }}' - protocol: '{{ item.protocol }}' - match: state - ctstate: '{{ item.ctstate }}' - jump: ACCEPT - ip_version: ipv6 - with_items: - - { chain: OUTPUT, protocol: tcp, ctstate: 'NEW,ESTABLISHED' } - - { chain: OUTPUT, protocol: udp, ctstate: 'NEW,ESTABLISHED' } - - { chain: OUTPUT, protocol: icmp, ctstate: 'NEW,ESTABLISHED' } - - { chain: INPUT, protocol: tcp, ctstate: 'ESTABLISHED' } - - { chain: INPUT, protocol: udp, ctstate: 'ESTABLISHED' } - - { chain: INPUT, protocol: icmp, ctstate: 'ESTABLISHED' } - when: - - ubtu18cis_rule_3_5_4_2_3 - - ubtu18cis_firewall_package == "iptables" - - ubtu18cis_iptables_v6 - tags: - - level1-server - - level1-workstation - - notscored - - patch - - rule_3.5.4.2.3 - - ip6tables - -- name: "NOTSCORED | 3.5.4.2.4 | AUDIT | Ensure IPv6 firewall rules exist for all open ports" - block: - - name: "NOTSCORED | 3.5.4.2.4 | AUDIT | Ensure IPv6 firewall rules exist for all open ports | Get list of open ports" - command: ss -6tuln - changed_when: false - failed_when: false - register: ubtu18cis_3_5_4_2_4_open_ports - - - name: "NOTSCORED | 3.5.4.2.4 | AUDIT | Ensure IPv6 firewall rules exist for all open ports | Get list of rules" - command: ip6tables -L INPUT -v -n - changed_when: false - failed_when: false - register: ubtu18cis_3_5_4_2_4_current_rules - - - name: "NOTSCORED | 3.5.4.2.4 | AUDIT | Ensure IPv6 firewall rules exist for all open ports | Alert about settings" - debug: - msg: - - "ALERT!!!!Below is the list the open ports and current rules" - - "Please create a rule for any open port that does not have a current rule" - - "Open Ports:" - - "{{ ubtu18cis_3_5_4_2_4_open_ports.stdout_lines }}" - - "Current Rules:" - - "{{ ubtu18cis_3_5_4_2_4_current_rules.stdout_lines }}" - when: - - ubtu18cis_rule_3_5_4_2_4 - - ubtu18cis_firewall_package == "iptables" - - ubtu18cis_iptables_v6 - tags: - - level1-server - - level1-workstation - - notscored - - audit - - rule_3.5.4.2.3 - - ip6tables - -# --------------- -# --------------- -# This is not a control however using the ip6tables module only writes to memery -# if a reboot occurs that means changes can revert. This task will make the -# above ip6tables settings permanent -# --------------- -# --------------- -- name: "Make IP6Tables persistent | Not a control" - block: - - name: "Make IP6Tables persistent | Install iptables-persistent" - apt: - name: iptables-persistent - state: present - - - name: "Make IP6Tables persistent | Save to persistent files" - shell: bash -c "ip6tables-save > /etc/iptables/rules.v6" - changed_when: ubtu18cis_ip6tables_save.rc == 0 - failed_when: ubtu18cis_ip6tables_save.rc > 0 - register: ubtu18cis_ip6tables_save - when: - - ubtu18cis_firewall_package == "iptables" - - ubtu18cis_iptables_v6 - - ubtu18cis_save_iptables_cis_rules - - ubtu18cis_rule_3_5_4_2_1 or - ubtu18cis_rule_3_5_4_2_2 or - ubtu18cis_rule_3_5_4_2_3 or - ubtu18cis_rule_3_5_4_2_4 - -- name: "SCORED | 3.6 | SCORED | Ensure wireless interfaces are disabled" - block: - - name: "SCORED | 3.6 | SCORED | Ensure wireless interfaces are disabled | Check if nmcli command is available" - command: rpm -q NetworkManager - args: - warn: no - check_mode: false - changed_when: false - register: ubtu18cis_nmcli_available - failed_when: no - - - name: "SCORED | 3.6 | SCORED | Ensure wireless interfaces are disabled | Check if wifi is enabled" - command: nmcli radio wifi - register: ubtu18cis_wifi_enabled - check_mode: false - changed_when: ubtu18cis_wifi_enabled.stdout != "disabled" - when: ubtu18cis_nmcli_available.rc == 0 - - - name: "SCORED | 3.6 | SCORED | Ensure wireless interfaces are disabled | Disable wifi if enabled" - command: nmcli radio wifi off - when: ubtu18cis_wifi_enabled is changed - when: - - ubtu18cis_rule_3_6 - tags: - - level1-server - - level2-workstation - - scored - - patch - - rule_3.6 - - wireless - -- name: "NOTSCORED | 3.7 | PATCH | Disable IPv6" - lineinfile: - path: /etc/default/grub - regexp: '^(GRUB_CMDLINE_LINUX=.*(?!.*ipv6\.disable=1)\"[^\"]+)(\".*)' - line: '\1 ipv6.disable=1\2' - backrefs: yes - notify: grub update - when: - - ubtu18cis_rule_3_7 - - not ubtu18cis_ipv6_required - tags: - - level2-server - - level1-workstation - - notscored - - patch - - rule_3.7 - - ipv6 diff --git a/tasks/section4.yml b/tasks/section4.yml deleted file mode 100644 index c873d95..0000000 --- a/tasks/section4.yml +++ /dev/null @@ -1,655 +0,0 @@ ---- -- name: "SCORED | 4.1.1.1 | PATCH | Ensure auditd is installed" - apt: - name: ['auditd', 'audispd-plugins'] - state: present - when: - - ubtu18cis_rule_4_1_1_1 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_4.1.1.1 - - auditd - -- name: "SCORED | 4.1.1.2 | PATCH | Ensure auditd service is enabled" - service: - name: auditd - state: started - enabled: yes - when: - - ubtu18cis_rule_4_1_1_2 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_4.1.1.2 - - auditd - -- name: "SCORED | 4.1.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled" - block: - - name: "SCORED | 4.1.1.3 | AUDIT | Ensure auditing for processes that start prior to auditd is enabled | Get GRUB_CMDLINE_LINUX" - shell: grep "GRUB_CMDLINE_LINUX=" /etc/default/grub | cut -f2 -d'"' - changed_when: false - failed_when: false - register: ubtu18cis_4_1_1_3_cmdline_settings - - - name: "SCORED | 4.1.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled | Add setting if doesn't exist" - lineinfile: - path: /etc/default/grub - regexp: '^GRUB_CMDLINE_LINUX=' - line: 'GRUB_CMDLINE_LINUX="{{ ubtu18cis_4_1_1_3_cmdline_settings.stdout }} audit=1"' - when: "'audit=' not in ubtu18cis_4_1_1_3_cmdline_settings.stdout" - notify: grub update - - - name: "SCORED | 4.1.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled | Update setting if exists" - replace: - dest: /etc/default/grub - regexp: 'audit=([0-9]+)' - replace: 'audot=1' - after: '^GRUB_CMDLINE_LINUX="' - before: '"' - notify: grub update - when: "'audit=' in ubtu18cis_4_1_1_3_cmdline_settings.stdout" - when: - - ubtu18cis_rule_4_1_1_3 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_4_1_1_3 - - auditd - -- name: "SCORED | 4.1.1.4 | PATCH | Ensure audit_backlog_limit is sufficient" - replace: - dest: /etc/default/grub - regexp: '(^GRUB_CMDLINE_LINUX\s*\=\s*)(?:")(.+)(?/dev/null; done - register: priv_procs - changed_when: no - check_mode: no - - - name: "SCORED | 4.1.11 | PATCH | Ensure use of privileged commands is collected | Set privileged rules" - template: - src: audit/ubtu18cis_4_1_11_privileged.rules.j2 - dest: /etc/audit/rules.d/privileged.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - - ubtu18cis_rule_4_1_11 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_4.1.11 - - auditd - -- name: "SCORED | 4.1.12 | PATCH | Ensure successful file system mounts are collected" - template: - src: audit/ubtu18cis_4_1_12_audit.rules.j2 - dest: /etc/audit/rules.d/audit.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - ubtu18cis_rule_4_1_12 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_4.1.12 - - auditd - -- name: "SCORED | 4.1.13 | PATCH | Ensure file deletion events by users are collected" - template: - src: audit/ubtu18cis_4_1_13_delete.rules.j2 - dest: /etc/audit/rules.d/delete.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - - ubtu18cis_rule_4_1_13 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_4.1.13 - - auditd - -- name: "SCORED | 4.1.14 | PATCH | Ensure changes to system administration scope (sudoers) is collected" - template: - src: audit/ubtu18cis_4_1_14_scope.rules.j2 - dest: /etc/audit/rules.d/scope.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - - ubtu18cis_rule_4_1_13 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_4.1.14 - - auditd - -- name: "SCORED | 4.1.15 | PATCH | Ensure system administrator actions (sudolog) are collected" - template: - src: audit/ubtu18cis_4_1_15_actions.rules.j2 - dest: /etc/audit/rules.d/actions.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - - ubtu18cis_rule_4_1_15 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_4.1.15 - - auditd - -- name: "SCORED | 4.1.16 | PATCH | Ensure kernel module loading and unloading is collected" - template: - src: audit/ubtu18cis_4_1_16_modules.rules.j2 - dest: /etc/audit/rules.d/modules.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - - ubtu18cis_rule_4_1_16 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_4.1.16 - - auditd - -- name: "SCORED | 4.1.17 | PATCH | Ensure the audit configuration is immutable" - template: - src: audit/ubtu18cis_4_1_17_99finalize.rules.j2 - dest: /etc/audit/rules.d/99-finalize.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - - ubtu18cis_rule_4_1_17 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_4.1.17 - - auditd - -- name: "SCORED | 4.2.1.1 | PATCH | Ensure rsyslog is installed" - apt: - name: rsyslog - state: present - when: - - ubtu18cis_rule_4_2_1_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_4.2.1.1 - - rsyslog - - apt - -- name: "SCORED | 4.2.1.2 | PATCH | Ensure rsyslog Service is enabled" - service: - name: rsyslog - enabled: yes - when: - - ubtu18cis_rule_4_2_1_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_4.2.1.2 - - rsyslog - -- name: "NOTSCORED | 4.2.1.3 | PATCH | Ensure logging is configured" - block: - - name: "NOTSCORED | 4.2.1.3 | PATCH | Ensure logging is configured | Find configuration file" - shell: grep -r "*.emerg" /etc/* | cut -f1 -d":" - changed_when: false - failed_when: false - register: ubtu18cis_4_2_1_3_rsyslog_config_path - - - name: "NOTSCORED | 4.2.1.3 | PATCH | Ensure logging is configured | Gather rsyslog current config" - command: "cat {{ ubtu18cis_4_2_1_3_rsyslog_config_path.stdout }}" - changed_when: false - failed_when: false - register: ubtu18cis_4_2_1_3_rsyslog_config - - - name: "NOTSCORED | 4.2.1.3 | PATCH | Ensure logging is configured | Message out config" - debug: - msg: - - "Alert!!!Below is the current logging configurations for rsyslog, please review" - - "{{ ubtu18cis_4_2_1_3_rsyslog_config.stdout_lines }}" - when: not ubtu18cis_rsyslog_ansible_managed - - - name: "NOTSCORED | 4.2.1.3 | PATCH | Ensure logging is configured | Automated rsyslog configuration" - lineinfile: - path: "{{ ubtu18cis_4_2_1_3_rsyslog_config_path.stdout }}" - regexp: "{{ item.regexp }}" - line: "{{ item.line }}" - insertafter: "{{ item.insertafter }}" - with_items: - - { regexp: '^\*.emerg', line: '*.emerg :omusrmsg:*', insertafter: '^# Emergencies are sent to everybody logged in' } - - { regexp: '^auth,authpriv.\*', line: 'auth,authpriv.* /var/log/auth.log', insertafter: '^# First some standard log files. Log by facility' } - - { regexp: '^mail.\*|^#mail.\*', line: 'mail.* -/var/log/mail', insertafter: '^# First some standard log files' } - - { regexp: '^mail.info|^#mail.info', line: 'mail.info -/var/log/mail.info', insertafter: '^# Logging for the mail system' } - - { regexp: '^mail.warn|^#mail.warn', line: 'mail.warn -/var/log/mail.warn', insertafter: '^# Logging for the mail system.' } - - { regexp: '^mail.err|^#mail.err', line: 'mail.err /var/log/mail.err', insertafter: '^# Logging for the mail system.' } - - { regexp: '^news.crit|^#news.crit', line: 'news.crit -/var/log/news/news.crit', insertafter: '^# First some standard log files'} - - { regexp: '^news.err|^#news.err', line: 'news.err -/var/log/news/news.err', insertafter: '^# First some standard log files' } - - { regexp: '^news.notice|^#news.notice', line: 'news.notice -/var/log/news/news.notice', insertafter: '^# First some standard log files' } - - { regexp: '^\*.=warning;\*.=err|^#\*.=warning;\*.=err', line: '*.=warning;*.=err -/var/log/warn', insertafter: '^# First some standard log files' } - - { regexp: '^\*.crit|^#\*.crit', line: '*.crit /var/log/warn', insertafter: '^# First some standard log files' } - - { regexp: '^\*.\*;mail.none;news.none|^#\*.\*;mail.none;news.none', line: '*.*;mail.none;news.none -/var/log/messages', insertafter: '^# First some standard log files' } - - { regexp: '^local0,local1.\*|^#local0,local1.\*', line: 'local0,local1.* -/var/log/localmessages', insertafter: '^# First some standard log files' } - - { regexp: '^local2,local3.\*|^#local2,local3.\*', line: 'local2,local3.* -/var/log/localmessages', insertafter: '^# First some standard log files' } - - { regexp: '^local4,local5.\*|^#local4,local5.\*', line: 'local4,local5.* -/var/log/localmessages', insertafter: '^# First some standard log files' } - - { regexp: '^local6,local7.\*|^#local6,local7.\*', line: 'local6,local7.* -/var/log/localmessages', insertafter: '^# First some standard log files' } - notify: restart rsyslog - when: ubtu18cis_rsyslog_ansible_managed - when: - - ubtu18cis_rule_4_2_1_3 - tags: - - level1-server - - level1-workstation - - notscored - - patch - - rule_4.2.1.3 - - rsyslog - -- name: "SCORED | 4.2.1.4 | PATCH | Ensure rsyslog default file permissions configured" - lineinfile: - path: /etc/rsyslog.conf - regexp: '^\$FileCreateMode|^#\$FileCreateMode' - line: '$FileCreateMode 0640' - notify: restart rsyslog - when: - - ubtu18cis_rule_4_2_1_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_4.2.1.4 - - rsyslog - -- name: "SCORED | 4.2.1.5 | PATCH | Ensure rsyslog is configured to send logs to a remote log host" - blockinfile: - path: /etc/rsyslog.conf - block: | - ##Enable sending of logs over TCP add the following line: - *.* @@{{ ubtu18cis_remote_log_server }} - insertafter: EOF - when: - - ubtu18cis_rule_4_2_1_5 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_4.2.1.5 - - rsyslog - -- name: "NOTSCORED | 4.2.1.6 | PATCH | Ensure remote rsyslog messages are only accepted on designated log hosts" - block: - - name: "NOTSCORED | 4.2.1.6 | PATCH | Ensure remote rsyslog messages are only accepted on designated log hosts | When not a log host" - replace: - path: /etc/rsyslog.conf - regexp: '({{ item }})' - replace: '#\1' - with_items: - - '^(\$ModLoad)' - - '^(\$InputTCPServerRun)' - notify: restart rsyslog - when: not ubtu18cis_system_is_log_server - - - name: "NOTSCORED | 4.2.1.6 | PATCH | Ensure remote rsyslog messages are only accepted on designated log hosts | When a log server" - lineinfile: - path: /etc/rsyslog.conf - regexp: "{{ item.regexp }}" - line: "{{ item.line }}" - with_items: - - { regexp: '^\$ModLoad|^#\$ModLoad', line: '$ModLoad imtc' } - - { regexp: '^\$InputTCPServerRun|^#\$InputTCPServerRun', line: '$InputTCPServerRun 514' } - notify: restart rsyslog - when: ubtu18cis_system_is_log_server - when: - - ubtu18cis_rule_4_2_1_6 - tags: - - level1-server - - level1-workstation - - notscored - - patch - - rule_4.2.1.6 - - rsyslog - -- name: "SCORED | 4.2.2.1 | PATCH | Ensure journald is configured to send logs to rsyslog" - lineinfile: - path: /etc/systemd/journald.conf - regexp: '^ForwardToSyslog|^#ForwardToSyslog' - line: 'ForwardToSyslog=yes' - insertafter: '\[Journal\]' - when: - - ubtu18cis_rule_4_2_2_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_4.2.2.1 - - rsyslog - - journald - -- name: "SCORED | 4.2.2.2 | PATCH | Ensure journald is configured to compress large log files" - lineinfile: - path: /etc/systemd/journald.conf - regexp: '^Compress|^#Compress' - line: 'Compress=yes' - insertafter: '\[Journal\]' - when: - - ubtu18cis_rule_4_2_2_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_4.2.2.2 - - rsyslog - - journald - -- name: "SCORED | 4.2.2.3 | PATCH | Ensure journald is configured to write logfiles to persistent disk" - lineinfile: - path: /etc/systemd/journald.conf - regexp: '^Storage|^#Storage' - line: 'Storage=persistent' - insertafter: '\[Journal\]' - when: - - ubtu18cis_rule_4_2_2_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_4.2.2.3 - - rsyslog - - journald - -- name: "SCORED | 4.2.3 | PATCH | Ensure permissions on all logfiles are configured" - command: find /var/log -type f -exec chmod g-wx,o-rwx "{}" + -o -type d -exec chmod g-w,o-rwx "{}" + - changed_when: ubtu18cis_4_2_3_logfile_perms_status.rc == 0 - register: ubtu18cis_4_2_3_logfile_perms_status - when: - - ubtu18cis_rule_4_2_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_4.2.3 - - logfiles - - permissions - -- name: "NOTSCORED | 4.3 | PATCH | Ensure logrotate is configured" - block: - - name: "NOTSCORED | 4.3 | PATCH | Ensure logrotate is configured | Get logrotate files" - find: - paths: /etc/logrotate.d/ - register: ubtu18cis_4_3_logrotate_files - - - name: "NOTSCORED | 4.3 | PATCH | Ensure logrotate is configured | Set rotation configurations" - replace: - path: "{{ item.path }}" - regexp: '^(\s*)(daily|weekly|monthly|yearly)$' - replace: "\\1{{ ubtu18cis_logrotate }}" - with_items: - - "{{ ubtu18cis_4_3_logrotate_files.files }}" - - { path: "/etc/logrotate.conf" } - when: - - ubtu18cis_rule_4_3 - tags: - - level1-server - - level1-workstation - - notscored - - patch - - rule_4.3 - - logrotate diff --git a/tasks/section5.yml b/tasks/section5.yml deleted file mode 100644 index 12dd0d1..0000000 --- a/tasks/section5.yml +++ /dev/null @@ -1,1065 +0,0 @@ ---- -- name: "SCORED | 5.1.1 | PATCH | Ensure cron daemon is enabled" - service: - name: cron - enabled: yes - when: - - ubtu18cis_rule_5_1_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.1.1 - - cron - -- name: "SCORED | 5.1.2 | PATCH | Ensure permissions on /etc/crontab are configured" - file: - path: /etc/crontab - owner: root - group: root - mode: 0600 - when: - - ubtu18cis_rule_5_1_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.1.2 - - cron - -- name: "SCORED | 5.1.3 | PATCH | Ensure permissions on /etc/cron.hourly are configured" - file: - path: /etc/cron.hourly - owner: root - group: root - mode: 0700 - when: - - ubtu18cis_rule_5_1_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.1.3 - - cron - -- name: "SCORED | 5.1.4 | PATCH | Ensure permissions on /etc/cron.daily are configured" - file: - path: /etc/cron.daily - owner: root - group: root - mode: 0700 - when: - - ubtu18cis_rule_5_1_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.1.4 - - cron - -- name: "SCORED | 5.1.5 | PATCH | Ensure permissions on /etc/cron.weekly are configured" - file: - path: /etc/cron.weekly - owner: root - group: root - mode: 0700 - when: - - ubtu18cis_rule_5_1_5 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.1.5 - - cron - -- name: "SCORED | 5.1.6 | PATCH | Ensure permissions on /etc/cron.monthly are configured" - file: - path: /etc/cron.monthly - owner: root - group: root - mode: 0700 - when: - - ubtu18cis_rule_5_1_6 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.1.6 - - cron - -- name: "SCORED | 5.1.7 | PATCH | Ensure permissions on /etc/cron.d are configured" - file: - path: /etc/cron.d - owner: root - group: root - mode: 0700 - when: - - ubtu18cis_rule_5_1_7 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.1.7 - - cron - -- name: "SCORED | 5.1.8 | PATCH | Ensure at/cron is restricted to authorized users" - block: - - name: "SCORED | 5.1.8 | PATCH | Ensure at/cron is restricted to authorized users | Remove deny configs" - file: - path: "{{ item }}" - state: absent - with_items: - - /etc/at.deny - - /etc/cron.deny - - - name: "SCORED | 5.1.8 | PATCH | Ensure at/cron is restricted to authorized users | Create allow files" - file: - path: "{{ item }}" - owner: root - group: root - mode: 0640 - state: touch - with_items: - - /etc/cron.allow - - /etc/at.allow - when: - - ubtu18cis_rule_5_1_8 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.1.8 - - cron - -- name: "SCORED | 5.2.1 | PATCH | Ensure permissions on /etc/ssh/sshd_config are configured" - file: - path: /etc/ssh/sshd_config - owner: root - group: root - mode: 0600 - when: - - ubtu18cis_rule_5_2_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.1 - - ssh - -- name: "SCORED | 5.2.2 | PATCH | Ensure permissions on SSH private host key files are configured" - block: - - name: "SCORED | 5.2.2 | AUDIT | Ensure permissions on SSH private host key files are configured | Find ssh_host private keys" - find: - paths: /etc/ssh - patterns: 'ssh_host_*_key' - register: ubtu18cis_5_2_2_ssh_host_priv_keys - - - name: "SCORED | 5.2.2 | PATCH | Ensure permissions on SSH private host key files are configured | Set permissions" - file: - path: "{{ item.path }}" - owner: root - group: root - mode: 0600 - with_items: - - "{{ ubtu18cis_5_2_2_ssh_host_priv_keys.files }}" - when: - - ubtu18cis_rule_5_2_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.2 - - ssh - -- name: "SCORED | 5.2.3 | PATCH | Ensure permissions on SSH public host key files are configured" - block: - - name: "SCORED | 5.2.3 | PATCH | Ensure permissions on SSH public host key files are configured | Find ssh_host public keys" - find: - paths: /etc/ssh - patterns: 'ssh_host_*_key.pub' - register: ubtu18cis_5_2_3_ssh_host_pub_keys - - - name: "SCORED | 5.2.3 | PATCH | Ensure permissions on SSH public host key files are configured | Set permissions" - file: - path: "{{ item.path }}" - owner: root - group: root - mode: 0644 - with_items: - - "{{ ubtu18cis_5_2_3_ssh_host_pub_keys.files }}" - when: - - ubtu18cis_rule_5_2_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.3 - - ssh - -# ------------- -# ------------- -# Protocol command no longer exists in newer versions of sssh (7.4+). However adding it will not cause issues -# ------------- -# ------------- -- name: "SCORED | 5.2.4 | PATCH | Ensure SSH Protocol is not set to 1" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^Protocol|^#Protocol' - line: 'Protocol 2' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.4 - - ssh - -- name: "SCORED | 5.2.5 | PATCH | Ensure SSH LogLevel is appropriate" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^LogLevel|^#LogLevel' - line: 'LogLevel {{ ubtu18cis_sshd.log_level }}' - insertafter: '^# Logging' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_5 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.5 - - ssh - -- name: "SCORED | 5.2.6 | PATCH | Ensure SSH X11 forwarding is disabled" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^X11Forwarding|^#X11Forwarding' - line: 'X11Forwarding no' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_6 - tags: - - level2-server - - level1-workstation - - scored - - patch - - rule_5.2.6 - - ssh - -- name: "SCORED | 5.2.7 | PATCH | Ensure SSH MaxAuthTries is set to 4 or less" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^MaxAuthTries|^#MaxAuthTries' - line: 'MaxAuthTries {{ ubtu18cis_sshd.max_auth_tries }}' - insertafter: '^# Authentication' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_7 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.7 - - ssh - -- name: "SCORED | 5.2.8 | PATCH | Ensure SSH IgnoreRhosts is enabled" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^IgnoreRhosts|^#IgnoreRhosts' - line: 'IgnoreRhosts yes' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_8 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.8 - - ssh - -- name: "SCORED | 5.2.9 | PATCH | Ensure SSH HostbasedAuthentication is disabled" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^HostbasedAuthentication|^#HostbasedAuthentication' - line: 'HostbasedAuthentication no' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_9 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.9 - - ssh - -- name: "SCORED | 5.2.10 | PATCH | Ensure SSH root login is disabled" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^PermitRootLogin|^#PermitRootLogin' - line: 'PermitRootLogin no' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_10 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.10 - - ssh - -- name: "SCORED | 5.2.11 | PATCH | Ensure SSH PermitEmptyPasswords is disabled" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^PermitEmptyPasswords|^#PermitEmptyPasswords' - line: 'PermitEmptyPasswords no' - insertafter: '# To disable tunneled clear text passwords' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_11 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.11 - - ssh - -- name: "SCORED | 5.2.12 | PATCH | Ensure SSH PermitUserEnvironment is disabled" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^PermitUserEnvironment|^#PermitUserEnvironment' - line: 'PermitUserEnvironment no' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_12 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.12 - - ssh - -- name: "SCORED | 5.2.13 | PATCH | Ensure only strong Ciphers are used" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^Ciphers|^#Ciphers' - line: 'Ciphers {{ ubtu18cis_sshd.ciphers }}' - insertafter: '^# Ciphers and keying' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_13 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.13 - - ssh - -- name: "SCORED | 5.2.14 | PATCH | Ensure only strong MAC algorithms are used" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^MACs|^#MACs' - line: 'MACs {{ ubtu18cis_sshd.macs }}' - insertafter: '^# Ciphers and keying' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_14 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.14 - - ssh - -- name: "SCORED | 5.2.15 | PATCH | Ensure only strong Key Exchange algorithms are used" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^KexAlgorithms|^#KexAlgorithms' - line: 'KexAlgorithms {{ ubtu18cis_sshd.kex_algorithms }}' - insertafter: '^# Ciphers and keying' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_15 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.15 - - ssh - -- name: "SCORED | 5.2.16 | PATCH | Ensure SSH Idle Timeout Interval is configured" - lineinfile: - path: /etc/ssh/sshd_config - regexp: "{{ item.regexp }}" - line: "{{ item.line }}" - with_items: - - { regexp: '^ClientAliveInterval|^#ClientAliveInterval', line: 'ClientAliveInterval {{ ubtu18cis_sshd.client_alive_interval }}' } - - { regexp: '^ClientAliveCountMax|^#ClientAliveCountMax', line: 'ClientAliveCountMax {{ ubtu18cis_sshd.client_alive_count_max }}' } - notify: restart sshd - when: - - ubtu18cis_rule_5_2_16 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.16 - - sshd - -- name: "SCORED | 5.2.17 | PATCH | Ensure SSH LoginGraceTime is set to one minute or less" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^LoginGraceTime|^#LoginGraceTime' - line: 'LoginGraceTime {{ ubtu18cis_sshd.login_grace_time }}' - insertafter: '^# Authentication' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_17 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.17 - - ssh - -- name: "SCORED | 5.2.18 | PATCH | Ensure SSH access is limited" - block: - - name: "SCORED | 5.2.18 | PATCH | Ensure SSH access is limited | Add allowed users" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^AllowUsers|^#AllowUsers' - line: 'AllowUsers {{ ubtu18cis_sshd.allow_users }}' - notify: restart sshd - when: "ubtu18cis_sshd['allow_users']|default('') != ''" - - - name: "SCORED | 5.2.18 | PATCH | Ensure SSH access is limited | Add allowed groups" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^AllowGroups|^#AllowGroups' - line: 'AllowGroups {{ ubtu18cis_sshd.allow_groups }}' - notify: restart sshd - when: "ubtu18cis_sshd['allow_groups']|default('') != ''" - - - name: "SCORED | 5.2.18 | PATCH | Ensure SSH access is limited | Add deny users" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^DenyUsers|^#DenyUsers' - line: 'DenyUsers {{ ubtu18cis_sshd.deny_users }}' - notify: restart sshd - when: "ubtu18cis_sshd['deny_users']|default('') != ''" - - - name: "SCORED | 5.2.18 | PATCH | Ensure SSH access is limited | Add deny groups" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^DenyGroups|^#DenyGroups' - line: 'DenyGroups {{ ubtu18cis_sshd.deny_groups }}' - notify: restart sshd - when: "ubtu18cis_sshd['deny_groups']|default('') != ''" - when: - - ubtu18cis_rule_5_2_18 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.18 - - ssh - -- name: "SCORED | 5.2.19 | PATCH | Ensure SSH warning banner is configured" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^Banner|^#Banner' - line: Banner /etc/issue.net - insertafter: '^# no default banner path' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_19 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.19 - - ssh - -- name: "SCORED | 5.2.20 | PATCH | Ensure SSH PAM is enabled" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^UsePAM|^#UsePAM' - line: 'UsePAM yes' - insertafter: '^# and ChallengeResponseAuthentication' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_20 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.20 - - ssh - - pam - -- name: "SCORED | 5.2.21 | PATCH | Ensure SSH AllowTcpForwarding is disabled" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^AllowTcpForwarding|^#AllowTcpForwarding' - line: 'AllowTcpForwarding no' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_21 - tags: - - level2-server - - level2-workstation - - scored - - patch - - rule_5.2.21 - - ssh - -- name: "SCORED | 5.2.22 | PATCH | Ensure SSH MaxStartups is configured" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^MaxStartups|^#MaxStartups' - line: 'MaxStartups 10:30:60' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_22 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.22 - - ssh - -- name: "SCORED | 5.2.23 | PATCH | Ensure SSH MaxSessions is set to 4 or less" - lineinfile: - path: /etc/ssh/sshd_config - regexp: '^MaxSessions|^#MaxSessions' - line: 'MaxSessions {{ ubtu18cis_sshd.max_sessions }}' - insertafter: '^# Authentication' - notify: restart sshd - when: - - ubtu18cis_rule_5_2_23 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.2.23 - - ssh - -- name: "SCORED | 5.3.1 | PATCH | Ensure password creation requirements are configured" - block: - - name: "SCORED | 5.3.1 | PATCH | Ensure password creation requirements are configured | Install pam_pwquality module" - apt: - name: libpam-pwquality - state: present - - - name: "SCORED | 5.3.1 | PATCH | Ensure password creation requirements are configured | Add minlen" - lineinfile: - path: /etc/security/pwquality.conf - regexp: '^minlen|^# minlen' - line: minlen = 14 - - - name: "SCORED | 5.3.1 | PATCH | Ensure password creation requirements are configured | Add minclass" - lineinfile: - path: /etc/security/pwquality.conf - regexp: '^minclass|^# minclass' - line: 'minclass = 4' - - - name: "SCORED | 5.3.1 | AUDIT | Ensure password creation requirements are configured | Confirm pwquality module in common-password" - command: grep 'password.*requisite.*pam_pwquality.so' /etc/pam.d/common-password - changed_when: false - failed_when: false - register: ubtu18cis_5_3_1_pam_pwquality_state - - - name: "SCORED | 5.3.1 | PATCH | Ensure password creation requirements are configured | Set retry to 3 if pwquality exists" - pamd: - name: common-password - type: password - control: requisite - module_path: pam_pwquality.so - module_arguments: 'retry=3' - state: args_present - when: ubtu18cis_5_3_1_pam_pwquality_state.stdout != "" - - - name: "SCORED | 5.3.1 | PATCH | Ensure password creation requirements are configured | Set retry to 3 if pwquality does not exist" - pamd: - name: common-password - type: password - control: required - module_path: pam_permit.so - new_type: password - new_control: requisite - new_module_path: pam_pwquality.so - module_arguments: 'retry=3' - state: after - when: ubtu18cis_5_3_1_pam_pwquality_state.stdout == "" - when: - - ubtu18cis_rule_5_3_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.3.1 - - pam - -# ------------- -# ------------- -# There is a bug in pam_tally2.so where the use of the audit keyword may log credentials in the case of user error during authentication. -# To work around this bug the CIS documentation has you setting pam_tally2 to the account section. -# Once bug is fixed please set pam_tally2 to the auth sections. We have those commented out in the task -# ------------- -# ------------- - -# ------------- -# ------------- -# figure out why pam_deny kills vagrant user -# ------------- -# ------------- -- name: "SCORED | 5.3.2 | PATCH | Ensure lockout for failed password attempts is configured" - block: - - name: "SCORED | 5.3.2 | AUDIT | Ensure lockout for failed password attempts is configured | Confirm pam_tally2.so module in common-password" - # command: grep 'auth.*required.*pam_tally2.so' /etc/pam.d/common-password - command: grep 'auth.*required.*pam_tally2.so' /etc/pam.d/common-account - changed_when: false - failed_when: false - register: ubtu18cis_5_3_2_pam_tally2_state - - - name: "SCORED | 5.3.2 | PATCH | Ensure lockout for failed password attempts is configured | Set pam_tally2.so settings if exists" - pamd: - # name: common-auth - name: common-account - # type: auth - type: account - control: required - module_path: pam_tally2.so - module_arguments: 'onerr=fail - audit - silent - deny=5 - unlock_time=900' - when: ubtu18cis_5_3_2_pam_tally2_state.stdout != "" - - - name: "SCORED | 5.3.2 | PATCH | Ensure lockout for failed password attempts is configured | Set pam_tally2.so settings if does not exist" - lineinfile: - # path: /etc/pam.d/common-auth - path: /etc/pam.d/common-account - # line: 'auth required pam_tally2.so onerr=fail audit silent deny=5 unlock_time=900' - line: 'account required pam_tally2.so onerr=fail audit silent deny=5 unlock_time=900' - insertafter: '^# end of pam-auth-update config' - when: ubtu18cis_5_3_2_pam_tally2_state == "" - - - name: "SCORED | 5.3.2 | PATCH | Ensure lockout for failed password attempts is configured | Set pam_deny.so and pam_tally.so" - lineinfile: - path: /etc/pam.d/common-account - regexp: "{{ item.regexp }}" - line: "{{ item.line }}" - insertafter: '^# end of pam-auth-update config' - with_items: - # - { regexp: '^accout.*requisite.*pam_deny.so', line: 'account requisite pam_george.so' } - - { regexp: '^account.*required.*pam_tally.so', line: 'account required pam_tally.so' } - when: - - ubtu18cis_rule_5_3_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.3.2 - - pamd - - notimplemented - -- name: "SCORED | 5.3.3 | PATCH | Ensure password reuse is limited" - block: - - name: "SCORED | 5.3.3 | PATCH | Ensure password reuse is limited | Confirm pam_pwhistory.so in common-password" - command: grep 'password.*required.*pam_pwhistory.so' /etc/pam.d/common-password - changed_when: false - failed_when: false - register: ubtu18cis_5_3_3_pam_pwhistory_state - - - name: "SCORED | 5.3.3 | PATCH | Ensure password reuse is limited | Set remember value if pam_pwhistory exists" - pamd: - name: common-password - type: password - control: required - module_path: pam_pwhistory.so - module_arguments: 'remember={{ ubtu18cis_pamd_pwhistory_remember }}' - state: args_present - when: ubtu18cis_5_3_3_pam_pwhistory_state.stdout != "" - - - name: "SCORED | 5.3.3 | PATCH | Ensure password reuse is limited | Set remember value if pam_pwhistory does no exist" - lineinfile: - path: /etc/pam.d/common-password - line: 'password required pam_pwhistory.so remember={{ ubtu18cis_pamd_pwhistory_remember }}' - insertafter: '^# end of pam-auth-update config' - when: ubtu18cis_5_3_3_pam_pwhistory_state.stdout == "" - when: - - ubtu18cis_rule_5_3_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.3.3 - - pamd - -- name: "SCORED | 5.3.4 | PATCH | Ensure password hashing algorithm is SHA-512" - block: - - name: "SCORED | 5.3.4 | PATCH | Ensure password hashing algorithm is SHA-512 | Confirm pam_unix.so" - shell: grep -E '^\s*password\s+(\S+\s+)+pam_unix\.so\s+(\S+\s+)*sha512\s*(\S+\s*)*(\s+#.*)?$' /etc/pam.d/common-password - changed_when: false - failed_when: false - register: ubtu18cis_5_3_4_pam_unix_state - - - name: "SCORED | 5.3.4 | PATCH | Ensure password hashing algorithm is SHA-512 | Set hashing if pam_unix.so exists" - pamd: - name: common-password - type: password - control: '[success=1 default=ignore]' - module_path: pam_unix.so - module_arguments: sha512 - state: args_present - when: ubtu18cis_5_3_4_pam_unix_state.stdout != "" - - - name: "SCORED | 5.3.4 | PATCH | Ensure password hashing algorithm is SHA-512 | Set hashing if pam_unix.so does not exist" - lineinfile: - path: /etc/pam.d/common-password - line: 'password [success=1 default=ignore] pam_unix.so sha512' - insertafter: '^# end of pam-auth-update config' - when: ubtu18cis_5_3_4_pam_unix_state.stdout == "" - when: - - ubtu18cis_rule_5_3_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.3.4 - - pamd - -- name: "SCORED | 5.4.1.1 | PATCH | Ensure password expiration is 365 days or less" - block: - - name: "SCORED | 5.4.1.1 | PATCH | Ensure password expiration is 365 days or less | Set /etc/login.defs PASS_MAX_DAYS" - lineinfile: - path: /etc/login.defs - regexp: '^PASS_MAX_DAYS|^#PASS_MAX_DAYS' - line: 'PASS_MAX_DAYS {{ ubtu18cis_pass.max_days }}' - insertafter: '# Password aging controls' - - - name: "SCORED | 5.4.1.1 | PATCH | Ensure password expiration is 365 days or less | Set existing users PASS_MAX_DAYS" - command: chage --maxdays {{ ubtu18cis_pass.max_days }} {{ item }} - failed_when: false - with_items: - - "{{ ubtu18cis_passwd| selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" - when: ubtu18cis_disruption_high - when: - - ubtu18cis_rule_5_4_1_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.4.1.1 - - user - - login - -- name: "SCORED | 5.4.1.2 | PATCH | Ensure minimum days between password changes is configured" - block: - - name: "SCORED | 5.4.1.2 | PATCH | Ensure minimum days between password changes is configured | Set /etc/login.defs PASS_MIN_DAYS" - lineinfile: - path: /etc/login.defs - regexp: '^PASS_MIN_DAYS|^#PASS_MIN_DAYS' - line: 'PASS_MIN_DAYS {{ ubtu18cis_pass.min_days }}' - - - name: "SCORED | 5.4.1.2 | PATCH | Ensure minimum days between password changes is configured | Set existing users PASS_MIN_DAYS" - command: chage --mindays {{ ubtu18cis_pass.min_days }} {{ item }} - failed_when: false - with_items: - - "{{ ubtu18cis_passwd| selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" - when: ubtu18cis_disruption_high - when: - - ubtu18cis_rule_5_4_1_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.4.1.1 - - user - - login - -- name: "SCORED | 5.4.1.3 | PATCH | Ensure password expiration warning days is 7 or more" - block: - - name: "SCORED | 5.4.1.3 | PATCH | Ensure password expiration warning days is 7 or more | Set /etc/login.defs PASS_WARN_AGE" - lineinfile: - path: /etc/login.defs - regexp: '^PASS_WARN_AGE|^#PASS_WARN_AGE' - line: 'PASS_WARN_AGE {{ ubtu18cis_pass.warn_age }}' - - - name: "SCORED | 5.4.1.3 | PATCH | Ensure password expiration warning days is 7 or more | Set existing users PASS_WARN_AGE" - command: chage --warndays {{ ubtu18cis_pass.warn_age }} {{ item }} - failed_when: false - with_items: - - "{{ ubtu18cis_passwd| selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" - when: ubtu18cis_disruption_high - when: - - ubtu18cis_rule_5_4_1_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.4.1.3 - - user - - login - -- name: "SCORED | 5.4.1.4 | PATCH | Ensure inactive password lock is 30 days or less" - block: - - name: "SCORED | 5.4.1.4 | PATCH | Ensure inactive password lock is 30 days or less | Set inactive period for new users" - command: useradd -D -f {{ ubtu18cis_pass.inactive }} - failed_when: false - - - name: "SCORED | 5.4.1.4 | PATCH | Ensure inactive password lock is 30 days or less | Set inactive period for existing users" - command: chage --inactive {{ ubtu18cis_pass.inactive }} {{ item }} - failed_when: false - with_items: - - "{{ ubtu18cis_passwd| selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" - when: ubtu18cis_disruption_high - when: - - ubtu18cis_rule_5_4_1_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.4.1.4 - - user - - login - -- name: "SCORED | 5.4.1.5 | PATCH | Ensure all users last password change date is in the past" - block: - - name: "SCORED | 5.4.1.5 | PATCH | Ensure all users last password change date is in the past | Get current date in Unix Time" - shell: echo $(($(date --utc --date "$1" +%s)/86400)) - changed_when: false - failed_when: false - register: ubtu18cis_5_4_1_5_current_time - - - name: "SCORED | 5.4.1.5 | PATCH | Ensure all users last password change date is in the past | Get list of users with last changed PW date in future" - shell: "cat /etc/shadow | awk -F: '{if($3>{{ ubtu18cis_5_4_1_5_current_time.stdout }})print$1}'" - changed_when: false - failed_when: false - register: ubtu18cis_5_4_1_5_user_list - - - name: "SCORED | 5.4.1.5 | PATCH | Ensure all users last password change date is in the past | Warn about users" - debug: - msg: - - "WARNING!!!!The following accounts have the last PW change date in the future" - - "{{ ubtu18cis_5_4_1_5_user_list.stdout_lines }}" - when: ubtu18cis_5_4_1_5_user_list.stdout != "" - - - name: "SCORED | 5.4.1.5 | PATCH | Ensure all users last password change date is in the past | Lock accounts with furtre PW changed dates" - command: passwd --expire {{ item }} - failed_when: false - with_items: - - "{{ ubtu18cis_5_4_1_5_user_list.stdout_lines }}" - when: - - ubtu18cis_disruption_high - - ubtu18cis_5_4_1_5_user_list.stdout != "" - when: - - ubtu18cis_rule_5_4_1_5 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.4.1.5 - - user - - login - -- name: "SCORED | 5.4.2 | PATCH | Ensure system accounts are secured" - block: - - name: "SCORED | 5.4.2 | PATCH | Ensure system accounts are secured | Set system accounts to login" - user: - name: "{{ item }}" - shell: /sbin/nologin - with_items: - - "{{ ubtu18cis_passwd | selectattr('uid', '<', 1000) | map(attribute='id') | list }}" - when: - - item != "root" - - item != "sync" - - item != "shutdown" - - item != "halt" - - - name: "SCORED | 5.4.2 | PATCH | Ensure system accounts are secured | Lock non-root system accounts" - user: - name: "{{ item }}" - password_lock: true - with_items: - - "{{ ubtu18cis_passwd| selectattr('uid', '<', 1000) | map(attribute='id') | list }}" - when: - - item != "root" - when: - - ubtu18cis_rule_5_4_2 - - ubtu18cis_disruption_high - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.4.2 - - user - - system - -- name: "SCORED | 5.4.3 | PATCH | Ensure default group for the root account is GID 0" - block: - - name: "SCORED | 5.4.3 | PATCH | Ensure default group for the root account is GID 0 | Set root group to GUID 0" - group: - name: root - gid: 0 - - - name: "SCORED | 5.4.3 | PATCH | Ensure default group for the root account is GID 0 | Set root user to root group" - user: - name: root - group: root - when: - - ubtu18cis_rule_5_4_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.4.3 - - user - - system - -- name: "SCORED | 5.4.4 | PATCH | Ensure default user umask is 027 or more restrictive" - replace: - path: "{{ item }}" - regexp: '(^\s+umask) 002' - replace: '\1 027' - with_items: - - /etc/bash.bashrc - - /etc/profile - when: - - ubtu18cis_rule_5_4_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.4.4 - - user - -- name: "SCORED | 5.4.5 | PATCH | Ensure default user shell timeout is 900 seconds or less" - blockinfile: - create: yes - mode: 0644 - dest: "{{ item.dest }}" - state: "{{ item.state }}" - marker: "# {mark} ANSIBLE MANAGED" - block: | - # Set session timeout - CIS ID 5.4.5 - TMOUT={{ ubtu18cis_shell_session_timeout.timeout }} - readonly TMOUT - export TMOUT - with_items: - - { dest: "{{ ubtu18cis_shell_session_timeout.file }}", state: present } - - { dest: /etc/profile, state: "{{ (ubtu18cis_shell_session_timeout.file == '/etc/profile') | ternary('present', 'absent') }}" } - - { dest: /etc/bash.bashrc, state: present } - when: - - ubtu18cis_rule_5_4_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.4.5 - - user - -- name: "NOTSCORED | 5.5 | AUDIT | Ensure root login is restricted to system console" - block: - - name: "NOTSCORED | 5.5 | AUDIT | Ensure root login is restricted to system console | Get list of all terminals" - command: cat /etc/securetty - changed_when: false - failed_when: false - register: ubtu18cis_5_5_terminal_list - - - name: "NOTSCORED | 5.5 | AUDIT | Ensure root login is restricted to system console | Message out list" - debug: - msg: - - "WARNING!!!!Below is the list of conoles with root login access" - - "Please review for any conoles that are not in a physically secure location" - - "{{ ubtu18cis_5_5_terminal_list.stdout_lines }}" - when: - - ubtu18cis_rule_5_5 - tags: - - level1-server - - level1-workstation - - notscored - - audit - - rule_5.5 - - user - -- name: "SCORED | 5.6 | PATCH | Ensure access to the su command is restricted" - block: - - name: "SCORED | 5.6 | PATCH | Ensure access to the su command is restricted | Check for pam_wheel.so module" - command: grep 'auth.*required.*pam_wheel' /etc/pam.d/su - changed_when: false - failed_when: false - register: ubtu18cis_5_6_pam_wheel_status - - - name: "SCORED | 5.6 | PATCH | Ensure access to the su command is restricted | Create empty sugroup" - group: - name: "{{ ubtu18cis_su_group }}" - - - name: "SCORED | 5.6 | PATCH | Ensure access to the su command is restricted | Set pam_wheel if exists" - pamd: - name: su - type: auth - control: required - module_path: pam_wheel.so - module_arguments: 'use_uid group={{ ubtu18cis_su_group }}' - when: ubtu18cis_5_6_pam_wheel_status.stdout != "" - - - name: "SCORED | 5.6 | PATCH | Ensure access to the su command is restricted | Set pam_wheel if does not exist" - lineinfile: - path: /etc/pam.d/su - line: 'auth required pam_wheel.so use_uid group={{ ubtu18cis_su_group }}' - create: yes - when: ubtu18cis_5_6_pam_wheel_status.stdout == "" - when: - - ubtu18cis_rule_5_6 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_5.6 - - user diff --git a/tasks/section6.yml b/tasks/section6.yml deleted file mode 100644 index 292c73d..0000000 --- a/tasks/section6.yml +++ /dev/null @@ -1,931 +0,0 @@ ---- -- name: "NOTSCORED | 6.1.1 | AUDIT | Audit system file permissions" - block: - - name: "NOTSCORED | 6.1.1 | AUDIT | Audit system file permissions | Register package list" - command: ls -a /bin/ - changed_when: false - failed_when: false - register: ubtu18cis_6_1_1_packages - - # - name: "NOTSCORED | 6.1.1 | AUDIT | Audit system file permissions | Audit the packages" - # command: dpkg --verify {{ item }} - # changed_when: false - # failed_when: false - # with_items: - # - "{{ ubtu18cis_6_1_1_packages.stdout_lines }}" - # register: ubtu18cis_6_1_1_packages_audited - - - name: "NOTSCORED | 6.1.1 | AUDIT | Audit system file permissions | Message out packages results for review" - debug: - msg: - - "ALERT!!!! Below are the packages that need to be reviewed." - - "You can run dpkg --verify and if nothing is returned the package is installed correctly" - - "{{ ubtu18cis_6_1_1_packages.stdout_lines }}" - when: - - ubtu18cis_rule_6_1_1 - tags: - - level2-server - - level2-workstation - - notscored - - audit - - rule_6.1.1 - - permissions - -- name: "SCORED | 6.1.2 | PATCH | Ensure permissions on /etc/passwd are configured" - file: - path: /etc/passwd - owner: root - group: root - mode: 0644 - when: - - ubtu18cis_rule_6_1_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.1.2 - - permissions - -- name: "SCORED | 6.1.3 | PATCH | Ensure permissions on /etc/gshadow- are configured" - file: - path: /etc/gshadow- - owner: root - group: shadow - mode: 0640 - when: - - ubtu18cis_rule_6_1_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.1.3 - - permissions - -- name: "SCORED | 6.1.4 | PATCH | Ensure permissions on /etc/shadow are configured" - file: - path: /etc/shadow - owner: root - group: shadow - mode: 0640 - when: - - ubtu18cis_rule_6_1_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.1.4 - - permissions - -- name: "SCORED | 6.1.5 | PATCH | Ensure permissions on /etc/group are configured" - file: - path: /etc/group - owner: root - group: root - mode: 0644 - when: - - ubtu18cis_rule_6_1_5 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.1.5 - - permissions - -- name: "SCORED | 6.1.6 | PATCH | Ensure permissions on /etc/passwd- are configured" - file: - path: /etc/passwd- - owner: root - group: root - mode: 0600 - when: - - ubtu18cis_rule_6_1_6 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.1.6 - - permissions - -- name: "SCORED | 6.1.7 | PATCH | Ensure permissions on /etc/shadow- are configured" - file: - path: /etc/shadow- - owner: root - group: shadow - mode: 0600 - when: - - ubtu18cis_rule_6_1_7 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.1.7 - - permissions - -- name: "SCORED | 6.1.8 | PATCH | Ensure permissions on /etc/group- are configured" - file: - path: /etc/group- - owner: root - group: root - mode: 0644 - when: - - ubtu18cis_rule_6_1_8 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.1.8 - - permissions - -- name: "SCORED | 6.1.9 | PATCH | Ensure permissions on /etc/gshadow are configured" - file: - path: /etc/gshadow - owner: root - group: shadow - mode: 0640 - when: - - ubtu18cis_rule_6_1_9 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.1.9 - - permissions - -- name: "SCORED | 6.1.10 | PATCH | Ensure no world writable files exist" - block: - - name: "SCORED | 6.1.10 | PATCH | Ensure no world writable files exist | Get list of world-writable files" - shell: find {{ item.mount }} -xdev -type f -perm -0002 - changed_when: false - failed_when: false - register: ubtu18cis_6_1_10_wwf - with_items: - - "{{ ansible_mounts }}" - - - name: "SCORED | 6.1.10 | PATCH | Ensure no world writable files exist | Adjust world-writable files if they exist" - file: - path: "{{ item }}" - mode: o-w - with_items: - - "{{ ubtu18cis_6_1_10_wwf.results | map(attribute='stdout_lines') | flatten }}" - when: ubtu18cis_no_world_write_adjust - when: - - ubtu18cis_rule_6_1_10 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.1.10 - - permissions - -- name: "SCORED | 6.1.11 | PATCH | Ensure no unowned files or directories exist" - block: - - name: "SCORED | 6.1.11 | AUDIT | Ensure no unowned files or directories exist | Get unowned files or directories" - shell: find {{ item.mount }} -xdev -nouser - changed_when: false - failed_when: false - register: ubtu18cis_6_1_11_no_user_items - with_items: - - "{{ ansible_mounts }}" - - - name: "SCORED | 6.1.11 | PATCH | Ensure no unowned files or directories exist | Flatten no_user_items results for easier use" - set_fact: - ubtu18cis_6_1_11_no_user_items_flatten: "{{ ubtu18cis_6_1_11_no_user_items.results | map(attribute='stdout_lines') | flatten }}" - - - name: "SCORED | 6.1.11 | AUDIT | Ensure no unowned files or directories exist | Alert on unowned files and directories" - debug: - msg: - - "ALERT!!!You have unowned files and are configured to not auto-remediate for this task" - - "Please review the files/directories below and assign an owner" - - "{{ ubtu18cis_6_1_11_no_user_items_flatten }}" - when: - - not ubtu18cis_no_owner_adjust - - ubtu18cis_6_1_11_no_user_items_flatten != "" - - - name: "SCORED | 6.1.11 | PATCH | Ensure no unowned files or directories exist | Set unowned files/directories to configured owner" - file: - path: "{{ item }}" - owner: "{{ ubtu18cis_unowned_owner }}" - with_items: - - "{{ ubtu18cis_6_1_11_no_user_items_flatten }}" - when: - - ubtu18cis_no_owner_adjust - - ubtu18cis_6_1_11_no_user_items_flatten != "" - when: - - ubtu18cis_rule_6_1_11 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.1.11 - - permissions - -- name: "SCORED | 6.1.12 | PATCH | Ensure no ungrouped files or directories exist" - block: - - name: "SCORED | 6.1.12 | PATCH | Ensure no ungrouped files or directories exist | Get ungrouped fiels or directories" - shell: find {{ item.mount }} -xdev -nogroup - changed_when: false - failed_when: false - register: ubtu18cis_6_1_12_ungrouped_items - with_items: - - "{{ ansible_mounts }}" - - - name: "SCORED | 6.1.12 | PATCH | Ensure no ungrouped files or directories exist | Flatten ungrouped_items results for easier use" - set_fact: - ubtu18cis_6_1_12_ungrouped_items_flatten: "{{ ubtu18cis_6_1_12_ungrouped_items.results | map(attribute='stdout_lines') | flatten }}" - - - name: "SCORED | 6.1.12 | PATCH | Ensure no ungrouped files or directories exist | Alert on ungrouped files and directories" - debug: - msg: - - "ALERT!!!!You have ungrouped files/directories and are configured to not auto-remediate for this task" - - "Please review the files/directories below and assign a group" - - "{{ ubtu18cis_6_1_12_ungrouped_items_flatten }}" - when: - - not ubtu18cis_no_group_adjust - - ubtu18cis_6_1_12_ungrouped_items_flatten != "" - - - name: "SCORED | 6.1.12 | PATCH | Ensure no ungrouped files or directories exist | Set ungrouped files/directories to configured group" - file: - path: "{{ item }}" - group: "{{ ubtu18cis_ungrouped_group }}" - with_items: - - "{{ ubtu18cis_6_1_12_ungrouped_items_flatten }}" - when: - - ubtu18cis_no_group_adjust - - ubtu18cis_6_1_12_ungrouped_items_flatten != "" - when: - - ubtu18cis_rule_6_1_12 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.1.12 - - permissions - -- name: "NOTSCORED | 6.1.13 | AUDIT | Audit SUID executables" - block: - - name: "NOTSCORED | 6.1.13 | AUDIT | Audit SUID executables | Find SUID executables" - # shell: df --local -P | awk '{if (NR!=1) print $6}' | xargs -I '{}' find '{}' -xdev -type f -perm -4000 - shell: find {{ item.mount }} -xdev -type f -perm -4000 - changed_when: false - failed_when: false - register: ubtu18cis_6_1_13_suid_executables - with_items: - - "{{ ansible_mounts }}" - - - name: "NOTSCORED | 6.1.13 | AUDIT | Audit SUID executables | Flatten suid_executables results for easier use" - set_fact: - ubtu18cis_6_1_13_suid_executables_flatten: "{{ ubtu18cis_6_1_13_suid_executables.results | map(attribute='stdout_lines') | flatten }}" - - - name: "NOTSCORED | 6.1.13 | AUDIT | Audit SUID executables | Alert SUID executables exist" - debug: - msg: - - "ALERT!!!!You have SUID executables" - - "The files are listed below, please confirm the integrity of these binaries" - - "{{ ubtu18cis_6_1_13_suid_executables_flatten }}" - when: - - ubtu18cis_6_1_13_suid_executables_flatten != "" - - not ubtu18cis_suid_adjust - - - name: "NOTSCORED | 6.1.13 | PATCH | Audit SUID executables | Remove SUID bit" - file: - path: "{{ item }}" - mode: 'u-s' - with_items: - - "{{ ubtu18cis_6_1_13_suid_executables_flatten }}" - when: - - ubtu18cis_suid_adjust - - ubtu18cis_6_1_13_suid_executables_flatten != "" - when: - - ubtu18cis_rule_6_1_13 - tags: - - level1-server - - level1-workstation - - notscored - - audit - - rule_6.1.13 - - permissions - -- name: "NOTSCORED | 6.1.14 | AUDIT | Audit SGID executables" - block: - - name: "NOTSCORED | 6.1.14 | PATCH | Audit SGID executables | Find SGID executables" - shell: find {{ item }} -xdev -type f -perm -2000 - changed_when: false - failed_when: false - register: ubtu18cis_6_1_14_sgid_executables - with_items: - - "{{ ansible_mounts }}" - - - name: "NOTSCORED | 6.1.14 | AUDIT | Audit SGID executables | Flatten sgid_executables results for easier use" - set_fact: - ubtu18cis_6_1_14_sgid_executables_flatten: "{{ ubtu18cis_6_1_14_sgid_executables.results | map(attribute='stdout_lines') | flatten }}" - - - name: "NOTSCORED | 6.1.14 | AUDIT | Audit SGID executables | Alert SGID executables exist" - debug: - msg: - - "ALERT!!!!You have SGID executables" - - "The files are listed below, please review the integrity of these binaries" - - "{{ ubtu18cis_6_1_14_sgid_executables_flatten }}" - when: ubtu18cis_6_1_14_sgid_executables_flatten != [] - when: - - ubtu18cis_rule_6_1_14 - tags: - - level1-server - - level1-workstation - - notscored - - audit - - rule_6.1.14 - - permissions - -- name: "SCORED | 6.2.1 | PATCH | Ensure password fields are not empty" - block: - - name: "SCORED | 6.2.1 | PATCH | Ensure password fields are not empty | Find users with no password" - shell: awk -F":" '($2 == "" ) { print $1 }' /etc/shadow - register: ubtu18cis_6_2_1_empty_password_acct - changed_when: no - check_mode: no - - - name: "SCORED | 6.2.1 | PATCH | Ensure password fields are not empty | Lock users with empty password" - user: - name: "{{ item }}" - password_lock: yes - with_items: - - "{{ ubtu18cis_6_2_1_empty_password_acct.stdout_lines }}" - when: ubtu18cis_6_2_1_empty_password_acct.stdout != "" - when: - - ubtu18cis_rule_6_2_1 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.1 - - user - - permissions - -- name: "SCORED | 6.2.2 | PATCH | Ensure no legacy + entries exist in /etc/passwd" - replace: - path: /etc/passwd - regexp: '\+:' - replace: ':' - when: - - ubtu18cis_rule_6_2_2 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.2 - - user - -- name: "SCORED | 6.2.3 | PATCH | Ensure all users' home directories exist" - block: - - name: capture audit task for missing homedirs - block: &u18s_homedir_audit - - name: "SCORED | 6.2.3 | PATCH | Ensure all users' home directories exist | Find users missing home directories" - shell: pwck -r | grep -P {{ ld_regex | quote }} - check_mode: no - register: ubtu18cis_users_missing_home - changed_when: ubtu18cis_6_2_3_audit | length > 0 - # failed_when: 0: success, 1: no grep match, 2: pwck found something - failed_when: ubtu18cis_users_missing_home.rc not in [0,1,2] - - ### NOTE: due to https://github.com/ansible/ansible/issues/24862 This is a shell command, and is quite frankly less than ideal. - - name: "SCORED | 6.2.3 | PATCH | Ensure all users' home directories exist| Creates home directories" - command: "mkhomedir_helper {{ item }}" - # check_mode: "{{ ubtu18cis_disruptive_check_mode }}" - with_items: "{{ ubtu18cis_6_2_3_audit | map(attribute='id') | list }}" - when: - - ubtu18cis_users_missing_home is changed - - ubtu18cis_disruption_high - - ### NOTE: Now we need to address that SELINUX will not let mkhomedir_helper create home directories for UUID < 500, so the ftp user will still show up in a pwck. Not sure this is needed, I need to confirm if that user is removed in an earlier task. - ### ^ Likely doesn't matter as 6.2.7 defines "local interactive users" as those w/ uid 1000-4999 - - name: replay audit task - block: *u18s_homedir_audit - - # CAUTION: debug loops don't show changed since 2.4: - # Fix: https://github.com/ansible/ansible/pull/59958 - - name: "SCORED | 6.2.3 | PATCH | Ensure all users' home directories exist | Alert about correcting owner and group" - debug: msg="You will need to mkdir -p {{ item }} and chown properly to the correct owner and group." - with_items: "{{ ubtu18cis_6_2_3_audit | map(attribute='dir') | list }}" - changed_when: ubtu18cis_audit_complex - when: - - ubtu18cis_users_missing_home is changed - vars: - ld_regex: >- - ^user '(?P.*)': directory '(?P.*)' does not exist$ - ld_users: "{{ ubtu18cis_users_missing_home.stdout_lines | map('regex_replace', ld_regex, '\\g') | list }}" - ubtu18cis_6_2_3_audit: "{{ ubtu18cis_passwd | selectattr('uid', '>=', 1000) | selectattr('id', 'in', ld_users) | list }}" - when: - - ubtu18cis_rule_6_2_3 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.3 - - user - -- name: "SCORED | 6.2.4 | PATCH | Ensure no legacy '+' entries exist in /etc/shadow" - replace: - path: /etc/shadow - regexp: '\+:' - replace: ':' - when: - - ubtu18cis_rule_6_2_4 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.4 - - user - -- name: "SCORED | 6.2.5 | PATCH | Ensure no legacy '+' entries exist in /etc/group" - replace: - path: /etc/group - regexp: '\+:' - replace: ':' - when: - - ubtu18cis_rule_6_2_5 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.5 - - user - -- name: "SCORED | 6.2.6 | PATCH | Ensure root is the only UID 0 account" - block: - - name: "SCORED | 6.2.6 | AUDIT | Ensure root is the only UID 0 account | Get non-root users with UID of 0" - shell: awk -F":" '($3 == 0 && $1 != \"root\") {i++;print $1 }' /etc/passwd - changed_when: false - failed_when: false - register: ubtu18cis_6_2_6_uid_0_notroot - - - name: "SCORED | 6.2.6 | PATCH | Ensure root is the only UID 0 account | Lock UID 0 users" - user: - name: "{{ item }}" - password_lock: yes - with_items: - - "{{ ubtu18cis_6_2_6_uid_0_notroot.stdout_lines }}" - when: - - ubtu18cis_disruption_high - - ubtu18cis_6_2_6_uid_0_notroot.stdout != "" - - - name: "SCORED | 6.2.6 | AUDIT | Ensure root is the only UID 0 account | Alert about accounts disruption high" - debug: - msg: - - "ALERT!!!! You have non-root users with a UID of 0 and ubtu18cis_disruption_high enabled" - - "This means the following accounts were password locked and will need to have the UID's manually adjusted" - - "{{ ubtu18cis_6_2_6_uid_0_notroot.stdout_lines }}" - when: - - ubtu18cis_disruption_high - - ubtu18cis_6_2_6_uid_0_notroot.stdout != "" - - - name: "SCORED | 6.2.6 | AUDIT | Ensure root is the only UID 0 account | Alert about accounts disruption low" - debug: - msg: - - "ALERT!!!! You have non-root users with a UID of 0 and ubtu18cis_disruption_high disabled" - - "This means no action was taken, you will need to have the UID's of the users below manually adjusted" - - "{{ ubtu18cis_6_2_6_uid_0_notroot.stdout_lines }}" - when: - - not ubtu18cis_disruption_high - - ubtu18cis_6_2_6_uid_0_notroot.stdout != "" - when: - - ubtu18cis_rule_6_2_6 - tags: - - level1-server - - level1-workstation - - patch - - scored - - rule_6.2.6 - - user - - root - -- name: "SCORED | 6.2.7 | PATCH | Ensure root PATH Integrity" - block: - - name: "SCORED | 6.2.7 | PATCH | Ensure root PATH Integrity | Determine empty value" - shell: 'echo $PATH | grep ::' - changed_when: False - failed_when: ubtu18cis_6_2_7_path_colon.rc == 0 - register: ubtu18cis_6_2_7_path_colon - - - name: "SCORED | 6.2.7 | PATCH | Ensure root PATH Integrity | Determine colon end" - shell: 'echo $PATH | grep :$' - changed_when: False - failed_when: ubtu18cis_6_2_7_path_colon_end.rc == 0 - register: ubtu18cis_6_2_7_path_colon_end - - - name: "SCORED | 6.2.7 | PATCH | Ensure root PATH Integrity | Determine working dir" - shell: echo "$PATH" - changed_when: False - failed_when: '"." in ubtu18cis_6_2_7_working_dir.stdout_lines' - register: ubtu18cis_6_2_7_working_dir - - debug: var=ubtu18cis_6_2_7_working_dir - - - name: "SCORED | 6.2.7 | PATCH | Ensure root PATH Integrity | Check paths" - stat: - path: "{{ item }}" - register: ubtu18cis_6_2_7_path_stat - with_items: - - "{{ ubtu18cis_6_2_7_working_dir.stdout.split(':') }}" - - - debug: var=ubtu18cis_6_2_7_path_stat - - - name: "SCORED | 6.2.7 | PATCH | Ensure root PATH Integrity | Alert on empty value, colon end, and no working dir" - debug: - msg: - - "The following paths have no working directory: {{ ubtu18cis_6_2_7_path_stat.results | selectattr('stat.exists','equalto','false') | map(attribute='item') | list }}" - - # - name: "SCORED | 6.2.7 | PATCH | Ensure root PATH Integrity | Set permissions" - # file: - # path: "{{ item }}" - # owner: root - # mode: 'o-w,g-w' - # follow: yes - # state: directory - # with_items: - # - "{{ ubtu18cis_6_2_7_path_stat | selectattr('exists','==','true') | map(attribute='path') }}" - when: - - ubtu18cis_rule_6_2_7 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.7 - - user - - root - - notimplemented - -- name: "SCORED | 6.2.8 | PATCH | Ensure users' home directories permissions are 750 or more restrictive" - block: - - name: "SCORED | 6.2.8 | AUDIT | Ensure users' home directories permissions are 750 or more restrictive | Stat home directories" - stat: - path: "{{ item }}" - with_items: "{{ ubtu18cis_passwd | selectattr('uid', '>=', 1000) | selectattr('uid', '!=', 65534) | map(attribute='dir') | list }}" - register: ubtu18cis_6_2_8_audit - - - name: "SCORED | 6.2.8 | AUDIT | Ensure users' home directories permissions are 750 or more restrictive | Find home directories more 750" - command: find -H {{ item.0 | quote }} -not -type l -perm /027 - register: ubtu18cis_6_2_8_patch_audit - changed_when: ubtu18cis_6_2_8_patch_audit.stdout != "" - when: - - item.1.exists - with_together: - - "{{ ubtu18cis_6_2_8_audit.results | map(attribute='item') | list }}" - - "{{ ubtu18cis_6_2_8_audit.results | map(attribute='stat') | list }}" - loop_control: - label: "{{ item.0 }}" - - - name: "SCORED | 6.2.8 | PATCH | Ensure users' home directories permissions are 750 or more restrictive | Set home perms" - file: - path: "{{ item.0 }}" - recurse: yes - mode: a-st,g-w,o-rwx - register: ubtu18cis_6_2_8_patch - when: - - ubtu18cis_disruption_high - - item.1.exists - with_together: - - "{{ ubtu18cis_6_2_8_audit.results | map(attribute='item') | list }}" - - "{{ ubtu18cis_6_2_8_audit.results | map(attribute='stat') | list }}" - loop_control: - label: "{{ item.0 }}" - - # set default ACLs so the homedir has an effective umask of 0027 - - name: "SCORED | 6.2.8 | PATCH | Ensure users' home directories permissions are 750 or more restrictive | Set ACL's" - acl: - path: "{{ item.0 }}" - default: yes - state: present - recursive: yes - etype: "{{ item.1.etype }}" - permissions: "{{ item.1.mode }}" - when: not ubtu18cis_system_is_container - with_nested: - - "{{ (ansible_check_mode | ternary(ubtu18cis_6_2_8_patch_audit, ubtu18cis_6_2_8_patch)).results | - rejectattr('skipped', 'defined') | map(attribute='item') | map('first') | list }}" - - - - etype: group - mode: rx - - etype: other - mode: '0' - when: - - ubtu18cis_rule_6_2_8 - - ubtu18cis_disruption_high - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.8 - - user - -- name: "SCORED | 6.2.9 | PATCH | Ensure users own their home directories" - file: - path: "{{ item.dir }}" - owner: "{{ item.id }}" - state: directory - with_items: "{{ ubtu18cis_passwd }}" - loop_control: - label: "{{ ubtu18cis_passwd_label }}" - when: - - ubtu18cis_rule_6_2_9 - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.9 - - user - -- name: "SCORED | 6.2.10 | PATCH | Ensure users' dot files are not group or world writable" - block: - - name: "SCORED | 6.2.10 | AUDIT | Ensure users' dot files are not group or world-writable | Check for files" - shell: find /home/ -name "\.*" -perm /g+w,o+w - changed_when: no - failed_when: no - register: ubtu18cis_6_2_10_audit - - - name: "SCORED | 6.2.10 | AUDIT | Ensure users' dot files are not group or world-writable | Alert on files found" - debug: - msg: "Good news! We have not found any group or world-writable dot files on your sytem" - failed_when: false - changed_when: false - when: - - ubtu18cis_6_2_10_audit.stdout == "" - - - name: "SCORED | 6.2.10 | PATCH | Ensure users' dot files are not group or world-writable | Changes files if configured" - file: - path: '{{ item }}' - mode: go-w - with_items: "{{ ubtu18cis_6_2_10_audit.stdout_lines }}" - when: - - ubtu18cis_6_2_10_audit.stdout != "" - - ubtu18cis_dotperm_ansibleManaged - when: - - ubtu18cis_rule_6_2_10 - - ubtu18cis_disruption_high - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.10 - - user - -- name: "SCORED | 6.2.11 | PATCH | Ensure no users have .forward files" - file: - dest: "~{{ item }}/.forward" - state: absent - with_items: - - "{{ ubtu18cis_users.stdout_lines }}" - when: - - ubtu18cis_rule_6_2_11 - - ubtu18cis_disruption_high - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.11 - - user - -- name: "SCORED | 6.2.12 | PATCH | Ensure no users have .netrc files" - file: - dest: "~{{ item }}/.netrc" - state: absent - with_items: - - "{{ ubtu18cis_users.stdout_lines }}" - when: - - ubtu18cis_rule_6_2_12 - - ubtu18cis_disruption_high - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.12 - - user - -- name: "SCORED | 6.2.13 | PATCH | Ensure users' .netrc Files are not group or world accessible" - file: - dest: "~{{ item }}/.netrc" - mode: go-w - failed_when: false - with_items: - - "{{ ubtu18cis_users.stdout_lines }}" - when: - - ubtu18cis_rule_6_2_13 - - ubtu18cis_disruption_high - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.13 - - user - -- name: "SCORED | 6.2.14 | PATCH | Ensure no users have .rhosts files" - file: - dest: "~{{ item }}/.rhosts" - state: absent - with_items: - - "{{ ubtu18cis_users.stdout_lines }}" - when: - - ubtu18cis_rule_6_2_14 - - ubtu18cis_disruption_high - tags: - - level1-server - - level1-workstation - - scored - - patch - - rule_6.2.14 - - user - -- name: "SCORED | 6.2.15 | AUDIT | Ensure all groups in /etc/passwd exist in /etc/group" - block: - - name: "SCORED | 6.2.15 | AUDIT | Ensure all groups in /etc/passwd exist in /etc/group | Check /etc/passwd entries" - shell: pwck -r | grep 'no group' | awk '{ gsub("[:\47]",""); print $2}' - changed_when: false - failed_when: false - register: ubtu18cis_6_2_15_passwd_gid_check - - - name: "SCORED | 6.2.15 | AUDIT | Ensure all groups in /etc/passwd exist in /etc/group | Print message that all groups match between passwd and group files" - debug: - msg: "Good News! There are no users that have non-existent GUIDs (Groups)" - when: ubtu18cis_6_2_15_passwd_gid_check.stdout == "" - - - name: "SCORED | 6.2.15 | AUDIT | Ensure all groups in /etc/passwd exist in /etc/group | Print warning about users with invalid GIDs missing GID entries in /etc/group" - debug: - msg: "WARNING!!!! The following users have non-existent GIDs (Groups): {{ ubtu18cis_6_2_15_passwd_gid_check.stdout_lines | join (', ') }}" - when: ubtu18cis_6_2_15_passwd_gid_check.stdout != "" - when: - - ubtu18cis_rule_6_2_15 - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_6.2.15 - - groups - -- name: "SCORED | 6.2.16 | AUDIT | Ensure no duplicate UIDs exist" - block: - - name: "SCORED | 6.2.16 | AUDIT | Ensure no duplicate UIDs exist | Check for duplicate UIDs" - shell: "pwck -r | awk -F: '{if ($3 in uid) print $1 ; else uid[$3]}' /etc/passwd" - changed_when: false - failed_when: false - register: ubtu18cis_6_2_16_user_uid_check - - - name: "SCORED | 6.2.16 | AUDIT | Ensure no duplicate UIDs exist | Print message that no duplicate UIDs exist" - debug: - msg: "Good News! There are no duplicate UID's in the system" - when: ubtu18cis_6_2_16_user_uid_check.stdout == "" - - - name: "SCORED | 6.2.16 | AUDIT | Ensure no duplicate UIDs exist | Print warning about users with duplicate UIDs" - debug: - msg: "Warning!!!! The following users have UIDs that are duplicates: {{ ubtu18cis_6_2_16_user_uid_check.stdout_lines }}" - when: ubtu18cis_6_2_16_user_uid_check.stdout != "" - when: - - ubtu18cis_rule_6_2_16 - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_6.2.16 - - user - -- name: "SCORED | 6.2.17 | AUDIT | Ensure no duplicate GIDs exist" - block: - - name: "SCORED | 6.2.17 | AUDIT | Ensure no duplicate GIDs exist | Check for duplicate GIDs" - shell: "pwck -r | awk -F: '{if ($3 in users) print $1 ; else users[$3]}' /etc/group" - changed_when: no - failed_when: no - register: user_user_check - - - name: "SCORED | 6.2.17 | AUDIT | Ensure no duplicate GIDs exist | Print message that no duplicate GID's exist" - debug: - msg: "Good News! There are no duplicate GIDs in the system" - when: user_user_check.stdout == "" - - - name: "SCORED | 6.2.17 | AUDIT | Ensure no duplicate GIDs exist | Print warning about users with duplicate GIDs" - debug: - msg: "Warning: The following groups have duplicate GIDs: {{ user_user_check.stdout_lines }}" - when: user_user_check.stdout != "" - when: - - ubtu18cis_rule_6_2_17 - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_6.2.17 - - groups - -- name: "SCORED | 6.2.18 | AUDIT | Ensure no duplicate user names exist" - block: - - name: "SCORED | 6.2.18 | AUDIT | Ensure no duplicate user names exist | Check for duplicate User Names" - shell: "pwck -r | awk -F: '{if ($1 in users) print $1 ; else users[$1]}' /etc/passwd" - changed_when: no - failed_when: no - register: ubtu18cis_6_2_18_user_username_check - - - name: "SCORED | 6.2.18 | AUDIT | Ensure no duplicate user names exist | Print message that no duplicate user names exist" - debug: - msg: "Good News! There are no duplicate user names in the system" - when: ubtu18cis_6_2_18_user_username_check.stdout == "" - - - name: "SCORED | 6.2.18 | AUDIT | Ensure no duplicate user names exist | Print warning about users with duplicate User Names" - debug: - msg: "Warning: The following user names are duplicates: {{ ubtu18cis_6_2_18_user_username_check.stdout_lines }}" - when: ubtu18cis_6_2_18_user_username_check.stdout != "" - when: - - ubtu18cis_rule_6_2_18 - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_6.2.18 - - user - -- name: "SCORED | 6.2.19 | AUDIT | Ensure no duplicate group names exist" - block: - - name: "SCORED | 6.2.19 | AUDIT | Ensure no duplicate group names exist | Check for duplicate group names" - shell: 'getent passwd | cut -d: -f1 | sort -n | uniq -d' - changed_when: false - failed_when: false - register: ubtu18cis_6_2_19_group_group_check - - - name: "SCORED | 6.2.19 | AUDIT | Ensure no duplicate group names exist | Print message that no duplicate groups exist" - debug: - msg: "Good News! There are no duplicate group names in the system" - when: ubtu18cis_6_2_19_group_group_check.stdout == "" - - - name: "SCORED | 6.2.19 | AUDIT | Ensure no duplicate group names exist | Print warning about users with duplicate group names" - debug: - msg: "Warning: The following group names are duplicates: {{ ubtu18cis_6_2_19_group_group_check.stdout_lines }}" - when: ubtu18cis_6_2_19_group_group_check.stdout != "" - when: - - ubtu18cis_rule_6_2_19 - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_6.2.19 - - groups - -- name: "SCORED | 6.2.20 | AUDIT | Ensure shadow group is empty" - block: - - name: "SCORED | 6.2.20 | AUDIT | Ensure shadow group is empty | Get Shadow GID" - shell: grep ^shadow /etc/group | cut -f3 -d":" - changed_when: false - failed_when: false - register: ubtu18cis_6_2_20_shadow_gid - - - name: "SCORED | 6.2.20 | AUDIT | Ensure shadow group is empty | List of users with Shadow GID" - shell: awk -F":" '($4 == "{{ ubtu18cis_6_2_20_shadow_gid.stdout }}") { print }' /etc/passwd | cut -f1 -d":" - changed_when: false - failed_when: false - register: ubtu18cis_6_2_20_users_shadow_gid - - - name: "SCORED | 6.2.20 | AUDIT | Ensure shadow group is empty | Message on no users" - debug: - msg: "Good News! There are no users with the Shado GID on your system" - when: ubtu18cis_6_2_20_users_shadow_gid.stdout == "" - - - name: "SCORED | 6.2.20 | AUDIT | Ensure shadow group is empty | Message on users with Shadow GID" - debug: - msg: - - "WARNING!!!! There are users that are in the Shadow group" - - "To conform to CIS standards no users should be in this group" - - "Please move the users below into another group" - - "{{ ubtu18cis_6_2_20_users_shadow_gid.stdout_lines }}" - when: ubtu18cis_6_2_20_users_shadow_gid.stdout != "" - when: - - ubtu18cis_rule_6_2_20 - tags: - - level1-server - - level1-workstation - - scored - - audit - - rule_6.2.20 - - groups - - user diff --git a/tasks/section_1/cis_1.1.x.yml b/tasks/section_1/cis_1.1.x.yml new file mode 100644 index 0000000..12e2744 --- /dev/null +++ b/tasks/section_1/cis_1.1.x.yml @@ -0,0 +1,579 @@ +--- + +- name: "1.1.1.1 | PATCH | Ensure mounting of cramfs filesystems is disabled" + block: + - name: "1.1.1.1 | PATCH | Ensure mounting of cramfs filesystems is disabled | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/cramfs.conf + regexp: "^(#)?install cramfs(\\s|$)" + line: install cramfs /bin/true + create: true + + - name: "1.1.1.1 | PATCH | Ensure mounting of cramfs filesystems is disabled | Disable cramfs" + community.general.modprobe: + name: cramfs + state: absent + when: ansible_connection != 'docker' + when: + - ubtu18cis_rule_1_1_1_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.1.1 + - cramfs + +- name: "1.1.1.2 | PATCH | Ensure mounting of freevxfs filesystems is disabled" + block: + - name: "1.1.1.2 | PATCH | Ensure mounting of freevxfs filesystems is disabled | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/freevxfs.conf + regexp: "^(#)?install freevxfs(\\s|$)" + line: install freevxfs /bin/true + create: true + + - name: "1.1.1.2 | PATCH | Ensure mounting of freevxfs filesystems is disabled | Disable freevxfs" + community.general.modprobe: + name: freevxfs + state: absent + when: ansible_connection != 'docker' + when: + - ubtu18cis_rule_1_1_1_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.1.2 + - freevxfs + +- name: "1.1.1.3 | PATCH | Ensure mounting of jffs2 filesystems is disabled" + block: + - name: "1.1.1.3 | PATCH | Ensure mounting of jffs2 filesystems is disabled | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/jffs2.conf + regexp: "^(#)?install jffs2(\\s|$)" + line: install jffs2 /bin/true + create: true + + - name: "1.1.1.3 | PATCH | Ensure mounting of jffs2 filesystems is disabled | Disable jffs2" + community.general.modprobe: + name: jffs2 + state: absent + when: ansible_connection != 'docker' + when: + - ubtu18cis_rule_1_1_1_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.1.3 + - jffs2 + +- name: "1.1.1.4 | PATCH | Ensure mounting of hfs filesystems is disabled" + block: + - name: "1.1.1.4 | PATCH | Ensure mounting of hfs filesystems is disabled | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/hfs.conf + regexp: "^(#)?install hfs(\\s|$)" + line: install hfs /bin/true + create: true + + - name: "1.1.1.4 | PATCH | Ensure mounting of hfs filesystems is disabled | Disable hfs" + community.general.modprobe: + name: hfs + state: absent + when: ansible_connection != 'docker' + when: + - ubtu18cis_rule_1_1_1_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.1.4 + - hfs + +- name: "1.1.1.5 | PATCH | Ensure mounting of hfsplus filesystems is disabled" + block: + - name: "1.1.1.5 | PATCH | Ensure mounting of hfsplus filesystems is disabled | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/hfsplus.conf + regexp: "^(#)?install hfsplus(\\s|$)" + line: install hfsplus /bin/true + create: true + + - name: "1.1.1.5 | PATCH | Ensure mounting of hfsplus filesystems is disabled | Disable hfsplus" + community.general.modprobe: + name: hfsplus + state: absent + when: ansible_connection != 'docker' + when: + - ubtu18cis_rule_1_1_1_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.1.5 + - hfsplus + +- name: "1.1.1.6 | PATCH | Ensure mounting of udf filesystems is disabled" + block: + - name: "1.1.1.6 | PATCH | Ensure mounting of udf filesystems is disabled | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/udf.conf + regexp: "^(#)?install udf(\\s|$)" + line: install udf /bin/true + create: true + + - name: "1.1.1.6 | PATCH | Ensure mounting of udf filesystems is disabled | Disable udf" + community.general.modprobe: + name: udf + state: absent + when: ansible_connection != 'docker' + when: + - ubtu18cis_rule_1_1_1_6 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.1.6 + - udf + +- name: "1.1.2 | PATCH | Ensure /tmp is configured" + ansible.posix.mount: + path: /tmp + src: /tmp + state: mounted + fstype: tmpfs + when: + - ubtu18cis_rule_1_1_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.2 + - tmp + +- name: | + "1.1.3 | PATCH | Ensure nodev option set on /tmp partition" + "1.1.4 | PATCH | Ensure nosuid option set on /tmp partition" + "1.1.5 | PATCH | Ensure noexec option set on /tmp partition" + ansible.posix.mount: + name: /tmp + src: "{{ item.device }}" + state: mounted + fstype: "{{ item.fstype }}" + opts: "defaults,{% if ubtu18cis_rule_1_1_3 %}nodev,{% endif %}{% if ubtu18cis_rule_1_1_4 %}nosuid,{% endif %}{% if ubtu18cis_rule_1_1_5 %}noexec{% endif %}" + loop: "{{ ansible_mounts }}" + loop_control: + label: "{{ item.device }}" + when: + - ubtu18cis_rule_1_1_3 or + ubtu18cis_rule_1_1_4 or + ubtu18cis_rule_1_1_5 + - item.mount == '/tmp' + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.3 + - rule_1.1.4 + - rule_1.1.5 + - tmp + +- name: "1.1.6 | PATCH | Ensure /dev/shm is configured" + ansible.posix.mount: + path: /dev/shm + src: /dev/shm + state: mounted + fstype: tmpfs + when: + - ubtu18cis_rule_1_1_6 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.6 + - tmp + +- name: | + "1.1.7 | PATCH | Ensure nodev option set on /dev/shm partition" + "1.1.8 | PATCH | Ensure nosuid option set on /dev/shm partition" + "1.1.9 | PATCH | Ensure noexec option set on /dev/shm partition" + ansible.posix.mount: + name: /dev/shm + src: "{{ item.device }}" + state: mounted + fstype: "{{ item.fstype }}" + opts: "defaults,{% if ubtu18cis_rule_1_1_7 %}nodev,{% endif %}{% if ubtu18cis_rule_1_1_8 %}nosuid,{% endif %}{% if ubtu18cis_rule_1_1_9 %}noexec{% endif %}" + loop: "{{ ansible_mounts }}" + loop_control: + label: "{{ item.device }}" + notify: Remount shm + when: + - ubtu18cis_rule_1_1_7 or + ubtu18cis_rule_1_1_8 or + ubtu18cis_rule_1_1_9 + - item.mount == '/dev/shm' + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.7 + - rule_1.1.8 + - rule_1.1.9 + - /dev/shm + +- name: "1.1.10 | AUDIT | Ensure separate partition exists for /var" + block: + - name: "1.1.10 | AUDIT | Ensure separate partition exists for /var | Gather /var partition" + ansible.builtin.shell: mount | grep "on /var " + changed_when: false + failed_when: false + register: ubtu18cis_1_1_10_var_mounted + + - name: DEBUG + ansible.builtin.debug: + msg: "{{ ubtu18cis_1_1_10_var_mounted }}" + + - name: "1.1.10 | AUDIT | Ensure separate partition exists for /var | Alert if /var partition does not exist" + ansible.builtin.debug: + msg: + - "Warning!! There is no separate partition for /var" + - "Please create a separate partition for /var" + when: + - ubtu18cis_1_1_10_var_mounted.stdout is not defined or + ubtu18cis_1_1_10_var_mounted.stdout | length == 0 + + - name: "1.1.10 | AUDIT | Ensure separate partition exists for /var | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '1.1.10' + when: + - ubtu18cis_1_1_10_var_mounted.stdout is not defined or + ubtu18cis_1_1_10_var_mounted.stdout | length == 0 + when: + - ubtu18cis_rule_1_1_10 + tags: + - level2-server + - level2-workstation + - automated + - audit + - rule_1.1.10 + - var + +- name: "1.1.11 | AUDIT | Ensure separate partition exists for /var/tmp" + block: + - name: "1.1.11 | AUDIT | Ensure separate partition exists for /var/tmp | Gather /var/tmp partition" + ansible.builtin.shell: mount | grep "on /var/tmp " + changed_when: false + failed_when: false + register: ubtu18cis_1_1_11_var_tmp_mounted + + - name: "1.1.11 | AUDIT | Ensure separate partition exists for /var/tmp | Alert if /var/tmp partition does not exist" + ansible.builtin.debug: + msg: + - "Warning!! There is no separate partition for /var/tmp" + - "Please create a separate partition for /var/tmp" + when: + - ubtu18cis_1_1_11_var_tmp_mounted.stdout is not defined or + ubtu18cis_1_1_11_var_tmp_mounted.stdout | length == 0 + + - name: "1.1.11 | AUDIT | Ensure separate partition exists for /var/tmp | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '1.1.11' + when: + - ubtu18cis_1_1_11_var_tmp_mounted.stdout is not defined or + ubtu18cis_1_1_11_var_tmp_mounted.stdout | length == 0 + when: + - ubtu18cis_rule_1_1_11 + tags: + - level2-server + - level2-workstation + - automated + - audit + - rule_1.1.11 + - var/tmp + +- name: | + "1.1.12 | PATCH | Ensure nodev option set on /var/tmp partition" + "1.1.13 | PATCH | Ensure nosuid option set on /var/tmp partition" + "1.1.14 | PATCH | Ensure noexec option set on /var/tmp partition" + ansible.posix.mount: + name: /var/tmp + src: "{{ item.device }}" + state: present + fstype: "{{ item.fstype }}" + opts: "defaults,{% if ubtu18cis_rule_1_1_12 %}nodev,{% endif %}{% if ubtu18cis_rule_1_1_13 %}nosuid,{% endif %}{% if ubtu18cis_rule_1_1_14 %}noexec{% endif %}" + loop: "{{ ansible_mounts }}" + loop_control: + label: "{{ item.device }}" + notify: Remount var_tmp + when: + - ubtu18cis_rule_1_1_12 or + ubtu18cis_rule_1_1_13 or + ubtu18cis_rule_1_1_14 + - item.mount == '/var/tmp' + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.12 + - rule_1.1.13 + - rule_1.1.14 + - var_tmp + +- name: "1.1.15 | AUDIT | Ensure separate partition exists for /var/log" + block: + - name: "1.1.15 | AUDIT | Ensure separate partition exists for /var/log | Gather /var/log partition" + ansible.builtin.shell: mount | grep "on /var/log " + changed_when: false + failed_when: false + register: ubtu18cis_1_1_15_var_log_mounted + + - name: "1.1.15 | AUDIT | Ensure separate partition exists for /var/log | Alert if /var/log partition does not exist" + ansible.builtin.debug: + msg: + - "Warning!! There is no separate partition for /var/log" + - "Please create a separate partition for /var/log" + when: + - ubtu18cis_1_1_15_var_log_mounted.stdout is not defined or + ubtu18cis_1_1_15_var_log_mounted.stdout | length == 0 + + - name: "1.1.15 | AUDIT | Ensure separate partition exists for /var/log | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '1.1.15' + when: + - ubtu18cis_1_1_15_var_log_mounted.stdout is not defined or + ubtu18cis_1_1_15_var_log_mounted.stdout | length == 0 + when: + - ubtu18cis_rule_1_1_15 + tags: + - level2-server + - level2-workstation + - automated + - audit + - rule_1.1.15 + - var_log + +- name: "1.1.16 | AUDIT | Ensure separate partition exists for /var/log/audit" + block: + - name: "1.1.16 | AUDIT | Ensure separate partition exists for /var/log/audit | Gather /var/log/audit" + ansible.builtin.shell: mount | grep "on /var/log/audit " + changed_when: false + failed_when: false + register: ubtu18cis_1_1_16_var_log_audit_mounted + + - name: "1.1.16 | AUDIT | Ensure separate partition exists for /var/log/audit | Alert if /var/log/audit partition does not exist." + ansible.builtin.debug: + msg: + - "Warning!! There is no separate partition for /var/log/audit" + - "Please create a separate partition for /var/log/audit" + when: + - ubtu18cis_1_1_16_var_log_audit_mounted.stdout is not defined or + ubtu18cis_1_1_16_var_log_audit_mounted.stdout | length == 0 + + - name: "1.1.16 | AUDIT | Ensure separate partition exists for /var/log/audit | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '1.1.16' + when: + - ubtu18cis_1_1_16_var_log_audit_mounted.stdout is not defined or + ubtu18cis_1_1_16_var_log_audit_mounted.stdout | length == 0 + when: + - ubtu18cis_rule_1_1_16 + tags: + - level2-server + - level2-workstation + - automated + - audit + - rule_1.1.16 + - var_log_audit + +- name: "1.1.17 | AUDIT | Ensure separate partition exists for /home." + block: + - name: "1.1.17 | AUDIT | Ensure separate partition exists for /home | Gather /home." + ansible.builtin.shell: mount | grep "on /home" + changed_when: false + failed_when: false + register: ubtu18cis_1_1_17_home_mounted + + - name: "1.1.17 | AUDIT | Ensure separate partition exists for /home | Alert if /home partition does not exist." + ansible.builtin.debug: + msg: + - "Warning!! There is no separate partition for /home" + - "Please create a separate partition for /home" + when: + - ubtu18cis_1_1_17_home_mounted.stdout is not defined or + ubtu18cis_1_1_17_home_mounted.stdout | length == 0 + + - name: "1.1.17 | AUDIT | Ensure separate partition exists for /home | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '1.1.17' + when: + - ubtu18cis_1_1_17_home_mounted.stdout is not defined or + ubtu18cis_1_1_17_home_mounted.stdout | length == 0 + when: + - ubtu18cis_rule_1_1_17 + tags: + - level2-server + - level2-workstation + - automated + - audit + - rule_1.1.17 + - /home + +- name: "1.1.18 | PATCH | Ensure /home partition includes the nodev option." + ansible.posix.mount: + name: "/home" + src: "{{ item.device }}" + state: mounted + fstype: "{{ item.fstype }}" + opts: "nodev" + with_items: "{{ ansible_mounts }}" + when: + - ubtu18cis_rule_1_1_18 + - item.mount == "/home" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.18 + - /home + +- name: "1.1.19 | AUDIT | Ensure nodev option set on removable media partitions." + block: + - name: "1.1.19 | AUDIT | Ensure nodev option set on removable media partitions. | Warning Messsage." + ansible.builtin.debug: + msg: "Warning!! Ensure nodev option set on removable media partitions." + + - name: "1.1.19 | AUDIT | Ensure nodev option set on removable media partitions. | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '1.1.19' + when: + - ubtu18cis_rule_1_1_19 + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_1.1.19 + - removable_media + +- name: "1.1.20 | AUDIT | Ensure nosuid option set on removable media partitions." + block: + - name: "1.1.20 | AUDIT | Ensure nosuid option set on removable media partitions. | Warning Message." + ansible.builtin.debug: + msg: "Warning!! Ensure nosuid option set on removable media partitions." + + - name: "1.1.20 | AUDIT | Ensure nosuid option set on removable media partitions. | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '1.1.20' + when: + - ubtu18cis_rule_1_1_20 + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_1.1.20 + - removable_media + +- name: "1.1.21 | AUDIT | Ensure noexec option set on removable media partitions." + block: + - name: "1.1.21 | AUDIT | Ensure noexec option set on removable media partitions. | Warning Message." + ansible.builtin.debug: + msg: "Warning!! Ensure noexec option set on removable media partitions." + + - name: "1.1.21 | AUDIT | Ensure noexec option set on removable media partitions. | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '1.1.21' + when: + - ubtu18cis_rule_1_1_21 + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_1.1.21 + - removable_media + +- name: "1.1.22 | PATCH | Ensure sticky bit is set on all world-writable directories" + ansible.builtin.shell: df --local -P | awk '{if (NR!=1) print $6}' | xargs -I '{}' find '{}' -xdev -type d \( -perm -0002 -a ! -perm -1000 \) 2>/dev/null | xargs -I '{}' chmod a+t '{}' + failed_when: ubtu18cis_1_1_22_status.rc > 0 + register: ubtu18cis_1_1_22_status + when: + - ubtu18cis_rule_1_1_22 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.22 + - sticky_bit + +- name: "1.1.23 | PATCH | Disable Automounting" + ansible.builtin.service: + name: autofs + state: stopped + enabled: false + when: + - ubtu18cis_rule_1_1_23 + - ubtu18cis_autofs_service_status.stdout == "loaded" + - not ubtu18cis_allow_autofs + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.23 + - automounting + +- name: "1.1.24 | PATCH | Disable USB Storage" + block: + - name: "1.1.24 | PATCH | Disable USB Storage | Set modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/usb_storage.conf + regexp: '^install usb-storage' + line: 'install usb-storage /bin/true' + create: true + + - name: "1.1.24 | PATCH | Disable USB Storage | Remove usb-storage module" + community.general.modprobe: + name: usb-storage + state: absent + when: ansible_connection != 'docker' + when: + - ubtu18cis_rule_1_1_24 + tags: + - level1-server + - level2-workstation + - automated + - patch + - rule_1.1.24 + - usb_storage diff --git a/tasks/section_1/cis_1.2.x.yml b/tasks/section_1/cis_1.2.x.yml new file mode 100644 index 0000000..0bd3b42 --- /dev/null +++ b/tasks/section_1/cis_1.2.x.yml @@ -0,0 +1,62 @@ +--- +- name: "1.2.1 | AUDIT | Ensure package manager repositories are configured" + block: + - name: "1.2.1 | AUDIT | Ensure package manager repositories are configured | Get repositories" + ansible.builtin.shell: apt-cache policy + changed_when: false + failed_when: false + register: ubtu18cis_1_2_1_apt_policy + + - name: "1.2.1 | AUDIT | Ensure package manager repositories are configured | Message out repository configs" + ansible.builtin.debug: + msg: + - "Warning!! Below are the apt package repositories" + - "Please review to make sure they conform to your sites policies" + - "{{ ubtu18cis_1_2_1_apt_policy.stdout_lines }}" + + - name: "1.2.1 | AUDIT | Ensure package manager repositories are configured | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '1.2.1' + when: + - ubtu18cis_rule_1_2_1 + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_1.2.1 + - apt + +- name: "1.2.2 | AUDIT | Ensure GPG keys are configured" + block: + - name: "1.2.2 | AUDIT | Ensure GPG keys are configured | Get apt gpg keys" + ansible.builtin.shell: apt-key list + changed_when: false + failed_when: false + register: ubtu18cis_1_2_2_apt_gpgkeys + + - name: "1.2.2 | AUDIT | Ensure GPG keys are configured | Message out apt gpg keys" + ansible.builtin.debug: + msg: + - "Warning!! Below are the apt gpg kyes configured" + - "Please review to make sure they are configured" + - "in accordance with site policy" + - "{{ ubtu18cis_1_2_2_apt_gpgkeys.stdout_lines }}" + + - name: "1.2.2 | AUDIT | Ensure GPG keys are configured | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '1.2.2' + when: + - ubtu18cis_rule_1_2_2 + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_1.2.2 + - gpg + - keys diff --git a/tasks/section_1/cis_1.3.x.yml b/tasks/section_1/cis_1.3.x.yml new file mode 100644 index 0000000..eb3bdde --- /dev/null +++ b/tasks/section_1/cis_1.3.x.yml @@ -0,0 +1,35 @@ +--- +- name: "1.3.1 | PATCH | Ensure AIDE is installed" + ansible.builtin.package: + name: ['aide', 'aide-common'] + state: present + when: + - ubtu18cis_rule_1_3_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.3.1 + - aide + +- name: "1.3.2 | PATCH | Ensure filesystem integrity is regularly checked" + ansible.builtin.cron: + name: Run AIDE integrity check + cron_file: "{{ ubtu18cis_aide_cron['cron_file'] }}" + user: "{{ ubtu18cis_aide_cron['cron_user'] }}" + minute: "{{ ubtu18cis_aide_cron['aide_minute'] | default('0') }}" + hour: "{{ ubtu18cis_aide_cron['aide_hour'] | default('5') }}" + day: "{{ ubtu18cis_aide_cron['aide_day'] | default('*') }}" + month: "{{ ubtu18cis_aide_cron['aide_month'] | default('*') }}" + weekday: "{{ ubtu18cis_aide_cron['aide_weekday'] | default('*') }}" + job: "{{ ubtu18cis_aide_cron['aide_job'] }}" + when: + - ubtu18cis_rule_1_3_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.3.2 + - cron diff --git a/tasks/section_1/cis_1.4.x.yml b/tasks/section_1/cis_1.4.x.yml new file mode 100644 index 0000000..732561f --- /dev/null +++ b/tasks/section_1/cis_1.4.x.yml @@ -0,0 +1,124 @@ +--- +- name: "1.4.1 | PATCH | Ensure permissions on bootloader config are not overridden" + block: + - name: "1.4.1 | PATCH | Ensure permissions on bootloader config are not overridden | Change chmod setting" + ansible.builtin.replace: + path: /usr/sbin/grub-mkconfig + regexp: 'chmod\s\d\d\d\s\${grub_cfg}.new' + replace: 'chmod 400 ${grub_cfg}.new' + + - name: "1.4.1 | PATCH | Ensure permissions on bootloader config are not overridden | Remove check on password" + ansible.builtin.lineinfile: + path: /usr/sbin/grub-mkconfig + regexp: 'if \[ \"x\$\{grub_cfg\}\" != "x" \] && ! grep "\^password" \${grub_cfg}.new' + line: if [ "x${grub_cfg}" != "x" ]; then + when: + - ubtu18cis_rule_1_4_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.4.1 + - grub + +# --------------- +# --------------- +# Come back to work on. Proposed code changes need to be looked at by team and discuss +# best practice for passing grub info. Control 90% done for now we are just auditing. +# --------------- +# --------------- +- name: "1.4.2 | PATCH | Ensure bootloader password is set." + block: + - name: "1.4.2 | AUDIT | Ensure bootloader password is set. | Superuser Check In Grub." + ansible.builtin.shell: grep "^set superusers" /boot/grub/grub.cfg + changed_when: false + failed_when: false + register: ubtu18cis_bootloader_grub_check + + - name: "1.4.2 | AUDIT | Ensure bootloader password is set. | Hash Password Check In Grub." + ansible.builtin.shell: grep "^password" /boot/grub/grub.cfg + changed_when: false + failed_when: false + register: ubtu18cis_bootloader_hash_check + + - name: "1.4.2 | AUDIT | Ensure bootloader password is set. | Set Fact." + ansible.builtin.set_fact: + ubtu18cis_bootloader_grub_name: "{{ ubtu18cis_bootloader_grub_check.stdout | regex_replace('\"','') | regex_replace('set superusers=','') }}" + + - name: "1.4.2 | AUDIT | Ensure bootloader password is set. | No Username And Password Meet Set For Bootloader." + ansible.builtin.debug: + msg: + - "Warning!! You do not currently have the bootloader password configured properly." + - "Please review your grub.cfg and check for errors." + when: "'superuser' not in ubtu18cis_bootloader_grub_check.stdout or 'password' not in ubtu18cis_bootloader_hash_check.stdout" + + - name: "1.4.2 | AUDIT | Ensure bootloader password is set. | Verify Username And Password Meet Site Policies For Bootloader." + ansible.builtin.debug: + msg: + - "Warning!! Below is your current bootloader configuration." + - "Please review to make sure they conform to your site policies." + - "Username: {{ ubtu18cis_bootloader_grub_name }}" + - "Password Hash: {{ ubtu18cis_bootloader_hash_check.stdout }}" + when: + - "'superuser' in ubtu18cis_bootloader_grub_check.stdout" + - "'password' in ubtu18cis_bootloader_hash_check.stdout" + + - name: "1.4.2 | AUDIT | Ensure bootloader password is set. | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '1.4.2' + when: + - "'superuser' in ubtu18cis_bootloader_grub_check.stdout" + - "'password' in ubtu18cis_bootloader_hash_check.stdout or + 'superuser' not in ubtu18cis_bootloader_grub_check.stdout or + 'password' not in ubtu18cis_bootloader_hash_check.stdout" + when: + - ubtu18cis_rule_1_4_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.4.2 + - grub + +- name: "1.4.3 | PATCH | Ensure permissions on bootloader config are configured" + block: + - name: "1.4.3 | AUDIT | Ensure permissions on bootloader config are configured | Check for Grub file" + ansible.builtin.stat: + path: /boot/grub/grub.cfg + register: ubtu18cis_1_4_3_grub_cfg_status + + - name: "1.4.3 | PATCH | Ensure permissions on bootloader config are configured | Set permissions" + ansible.builtin.file: + path: /boot/grub/grub.cfg + owner: root + group: root + mode: 0400 + when: + - ubtu18cis_1_4_3_grub_cfg_status.stat.exists + when: + - ubtu18cis_rule_1_4_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.4.3 + - grub + +- name: "1.4.4 | PATCH | Ensure authentication required for single user mode" + ansible.builtin.user: + name: root + password: "{{ ubtu18cis_root_pw }}" + when: + - ubtu18cis_rule_1_4_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.4.4 + - passwd diff --git a/tasks/section_1/cis_1.5.x.yml b/tasks/section_1/cis_1.5.x.yml new file mode 100644 index 0000000..a80a1dd --- /dev/null +++ b/tasks/section_1/cis_1.5.x.yml @@ -0,0 +1,100 @@ +--- +- name: "1.5.1 | AUDIT | Ensure XD/NX support is enabled" + block: + - name: "1.5.1 | AUDIT | Ensure XD/NX support is enabled | Find status of XD/NX" + ansible.builtin.shell: "journalctl | grep 'protection: active'" + changed_when: false + failed_when: false + register: ubtu18cis_1_5_1_xdnx_status + + - name: "1.5.1 | AUDIT | Ensure XD/NX support is enabled | Alert if XD/NX is not enabled" + ansible.builtin.debug: + msg: + - "Warning!! You do not have XD/NX (Execute Disable/No Execute) enabled" + - "To conform to CIS standards this needs to be enabled" + when: "'active' not in ubtu18cis_1_5_1_xdnx_status.stdout" + + - name: "1.5.1 | AUDIT | Ensure XD/NX support is enabled. | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '1.5.1' + when: + - ubtu18cis_rule_1_5_1 + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_1.5.1 + - xd/nx + +- name: "1.5.2 | PATCH | Ensure address space layout randomization (ASLR) is enabled" + block: + - name: "1.5.2 | PATCH | Ensure address space layout randomization (ASLR) is enabled | Set ASLR settings" + ansible.builtin.lineinfile: + path: /etc/sysctl.conf + regexp: '^kernel.randomize_va_space' + line: 'kernel.randomize_va_space = 2' + + - name: "1.5.2 | PATCH | Ensure address space layout randomization (ASLR) is enabled | Set active kernel parameter" + ansible.posix.sysctl: + name: kernel.randomize_va_space + value: '2' + when: + - ubtu18cis_rule_1_5_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.5.2 + - aslr + +- name: "1.5.3 | PATCH | Ensure prelink is disabled" + block: + - name: "1.5.3 | PATCH | Ensure prelink is disabled | Restore binaries to normal" + ansible.builtin.shell: prelink -ua + changed_when: false + failed_when: false + + - name: "1.5.3 | PATCH | Ensure prelink is disabled | Remove prelink package" + ansible.builtin.package: + name: prelink + state: absent + when: + - ubtu18cis_rule_1_5_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.5.3 + - prelink + +- name: "1.5.4 | PATCH | Ensure core dumps are restricted" + block: + - name: "1.5.4 | PATCH | Ensure core dumps are restricted" + ansible.builtin.lineinfile: + path: /etc/security/limits.conf + regexp: '^#?\\*.*core' + line: '* hard core 0' + insertbefore: '^# End of file' + + - name: "1.5.4 | PATCH | Ensure core dumps are restricted" + ansible.posix.sysctl: + name: fs.suid_dumpable + value: '0' + state: present + reload: true + sysctl_set: true + ignoreerrors: true + when: + - ubtu18cis_rule_1_5_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.5.4 + - coredump diff --git a/tasks/section_1/cis_1.6.x.yml b/tasks/section_1/cis_1.6.x.yml new file mode 100644 index 0000000..8f019fe --- /dev/null +++ b/tasks/section_1/cis_1.6.x.yml @@ -0,0 +1,116 @@ +--- +- name: "1.6.1.1 | PATCH | Ensure AppArmor is installed" + ansible.builtin.package: + name: ['apparmor', 'apparmor-utils'] + state: present + when: + - ubtu18cis_rule_1_6_1_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.6.1.1 + - apparmor + +- name: "1.6.1.2 | PATCH | Ensure AppArmor is enabled in the bootloader configuration" + block: + - name: "1.6.1.2 | AUDIT | Ensure AppArmor is enabled in the bootloader configuration | Get current settings" + ansible.builtin.shell: grep "GRUB_CMDLINE_LINUX=" /etc/default/grub | cut -f2 -d'"' + changed_when: false + failed_when: false + register: ubtu18cis_1_6_1_2_cmdline_settings + + - name: "1.6.1.2 | PATCH | Ensure AppArmor is enabled in the bootloader configuration | Set apparmor settings if none exist" + ansible.builtin.lineinfile: + path: /etc/default/grub + regexp: '^GRUB_CMDLINE_LINUX' + line: 'GRUB_CMDLINE_LINUX="apparmor=1 security=apparmor {{ ubtu18cis_1_6_1_2_cmdline_settings.stdout }}"' + insertafter: '^GRUB_' + when: + - "'apparmor' not in ubtu18cis_1_6_1_2_cmdline_settings.stdout" + - "'security' not in ubtu18cis_1_6_1_2_cmdline_settings.stdout" + notify: grub update + + - name: "1.6.1.2 | PATCH | Ensure AppArmor is enabled in the bootloader configuration | Set apparmor settings if none exist | Replace apparmor settings when exists" + ansible.builtin.replace: + path: /etc/default/grub + regexp: "{{ item.regexp }}" + replace: "{{ item.replace }}" + with_items: + - { regexp: 'apparmor=\S+', replace: 'apparmor=1' } + - { regexp: 'security=\S+', replace: 'security=apparmor' } + when: + - "'apparmor' in ubtu18cis_1_6_1_2_cmdline_settings.stdout" + - "'security' in ubtu18cis_1_6_1_2_cmdline_settings.stdout" + notify: grub update + when: + - ubtu18cis_rule_1_6_1_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.6.1.2 + - apparmor + +# Any unconfined processes may need to have a profile created or activated for them and then be restarted So manual task +- name: "1.6.1.3 | PATCH | Ensure all AppArmor Profiles are in enforce or complain mode" + block: + - name: "1.6.1.3 | AUDIT | Ensure all AppArmor Profiles are in enforce or complain mode | capture state" + ansible.builtin.shell: apparmor_status | grep "processes.*unconfined" | awk '{ print $1 }' + changed_when: false + failed_when: false + register: ubtu18cis_rule_1_6_1_3_apparmor_unconfined + + - name: "1.6.1.3 | AUDIT | Ensure all AppArmor Profiles are in enforce or complain mode | Warning" + ansible.builtin.debug: + msg: "Warning!! AppArmor mode needs to be confirmed | Any unconfined processes may need to have a profile created" + when: ubtu18cis_rule_1_6_1_3_apparmor_unconfined.stdout != '0' + + - name: "1.6.1.3 | AUDIT | Ensure all AppArmor Profiles are in enforce or complain mode | Warn Count" + ansible.builtin.import_tasks: + file: warning_facts.yml + when: ubtu18cis_rule_1_6_1_3_apparmor_unconfined.stdout != '0' + vars: + warn_control_id: '1.6.1.3' + when: + - ubtu18cis_rule_1_6_1_3 + tags: + - level1-server + - level1-workstation + - manual + - patch + - rule_1.6.1.3 + - apparmor + +# These tasks can be forced to run but some apps may not have profiles associated so this will need to be manual task +- name: "1.6.1.4 | PATCH | Ensure all AppArmor Profiles are enforcing" + block: + - name: "1.6.1.4 | AUDIT | Ensure all AppArmor Profiles are enforcing | capture state" + ansible.builtin.shell: apparmor_status | grep -E "processes.*complain" | awk '{ print $1 }' + changed_when: false + failed_when: false + register: ubtu18cis_rule_1_6_1_4_apparmor_enforced + + - name: "1.6.1.4 | AUDIT | Ensure all AppArmor Profiles are enforcing | Warning" + ansible.builtin.debug: + msg: "Warning!! AppArmor mode is not in enforcing | Any unconfined processes may need to have a profile created" + when: ubtu18cis_rule_1_6_1_4_apparmor_enforced.stdout != '0' + + - name: "1.6.1.4 | AUDIT | Ensure all AppArmor Profiles are enforcing | Warn Count" + ansible.builtin.import_tasks: + file: warning_facts.yml + when: ubtu18cis_rule_1_6_1_4_apparmor_enforced.stdout != '0' + vars: + warn_control_id: '1.6.1.4' + when: + - ubtu18cis_rule_1_6_1_4 + tags: + - level2-server + - level2-workstation + - automated + - scored + - patch + - rule_1.6.1.4 + - apparmor diff --git a/tasks/section_1/cis_1.7.x.yml b/tasks/section_1/cis_1.7.x.yml new file mode 100644 index 0000000..a491d2d --- /dev/null +++ b/tasks/section_1/cis_1.7.x.yml @@ -0,0 +1,93 @@ +--- +- name: "1.7.1 | PATCH | Ensure message of the day is configured properly" + ansible.builtin.template: + src: etc/motd.j2 + dest: /etc/motd + when: + - ubtu18cis_rule_1_7_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.7.1 + - motd + +- name: "1.7.2 | PATCH | Ensure permissions on /etc/issue.net are configured" + ansible.builtin.file: + path: /etc/issue.net + owner: root + group: root + mode: 0644 + when: + - ubtu18cis_rule_1_7_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.7.2 + - permissions + - banner + +- name: "1.7.3 | PATCH | Ensure permissions on /etc/issue are configured" + ansible.builtin.file: + path: /etc/issue + owner: root + group: root + mode: 0644 + when: + - ubtu18cis_rule_1_7_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.7.3 + - permissions + - banner + +- name: "1.7.4 | PATCH | Ensure permissions on /etc/motd are configured" + ansible.builtin.file: + path: /etc/motd + owner: root + group: root + mode: 0644 + when: + - ubtu18cis_rule_1_7_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.7.4 + - permissions + - motd + +- name: "1.7.5 | PATCH | Ensure remote login warning banner is configured properly" + ansible.builtin.template: + src: etc/issue.net.j2 + dest: /etc/issue.net + when: + - ubtu18cis_rule_1_7_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.7.5 + - banner + +- name: "1.7.6 | PATCH | Ensure local login warning banner is configured properly" + ansible.builtin.template: + src: etc/issue.j2 + dest: /etc/issue + when: + - ubtu18cis_rule_1_7_6 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.7.6 + - banner diff --git a/tasks/section_1/cis_1.8.x.yml b/tasks/section_1/cis_1.8.x.yml new file mode 100644 index 0000000..5234847 --- /dev/null +++ b/tasks/section_1/cis_1.8.x.yml @@ -0,0 +1,77 @@ +--- +- name: "1.8.1 | PATCH | Ensure GNOME Display Manager is removed" + ansible.builtin.package: + name: gdm3 + state: absent + when: + - ubtu18cis_rule_1_8_1 + - not ubtu18cis_desktop_required + tags: + - level2-server + - manual + - patch + - rule_1.8.1 + - gnome + +- name: "1.8.2 | PATCH | Ensure GDM login banner is configured" + ansible.builtin.lineinfile: + path: /etc/gdm3/greeter.dconf-defaults + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + insertafter: "{{ item.insertafter }}" + create: true + owner: root + group: root + mode: 0644 + notify: reload gdm + with_items: + - { regexp: '\[org\/gnome\/login-screen\]', line: '[org/gnome/login-screen]', insertafter: EOF } + - { regexp: 'banner-message-enable', line: 'banner-message-enable=true', insertafter: '\[org\/gnome\/login-screen\]'} + - { regexp: 'banner-message-text', line: 'banner-message-text={{ ubtu18cis_warning_banner }}', insertafter: 'banner-message-enable' } + when: + - ubtu18cis_rule_1_8_2 + - ubtu18cis_desktop_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.8.2 + - banner + +- name: "1.8.3 | PATCH | Ensure disable-user-list is enabled" + ansible.builtin.lineinfile: + path: /etc/gdm3/greeter.dconf-defaul + regexp: '^disable-user-list=' + line: 'disable-user-list=true' + insertafter: 'banner-message-text=' + create: true + owner: root + group: root + mode: 0644 + notify: reload gdm + when: + - ubtu18cis_rule_1_8_3 + - ubtu18cis_desktop_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.8.3 + - gdm3 + +- name: "1.8.4 | PATCH | Ensure XDCMP is not enabled" + ansible.builtin.lineinfile: + path: /etc/gdm3/custom.conf + regexp: '^Enable=true' + state: absent + when: + - ubtu18cis_rule_1_8_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.8.4 + - xdcmp diff --git a/tasks/section_1/cis_1.9.yml b/tasks/section_1/cis_1.9.yml new file mode 100644 index 0000000..9299eb1 --- /dev/null +++ b/tasks/section_1/cis_1.9.yml @@ -0,0 +1,14 @@ +--- +- name: "1.9 | PATCH | Ensure updates, patches, and additional security software are installed" + ansible.builtin.package: + name: "*" + state: latest + when: + - ubtu18cis_rule_1_9 + tags: + - level1-server + - level1-workstation + - manual + - patch + - rule_1.9 + - patching diff --git a/tasks/section_1/main.yml b/tasks/section_1/main.yml new file mode 100644 index 0000000..1842296 --- /dev/null +++ b/tasks/section_1/main.yml @@ -0,0 +1,36 @@ +--- +- name: "SECTION | 1.1 | Disable Unused Filesystems" + ansible.builtin.import_tasks: + file: cis_1.1.x.yml + +- name: "SECTION | 1.2 | Configure Software Updates" + ansible.builtin.import_tasks: + file: cis_1.2.x.yml + +- name: "SECTION | 1.3. | Filesystem Integrity Checking" + ansible.builtin.import_tasks: + file: cis_1.3.x.yml + +- name: "SECTION | 1.4 | Secure Boot Settings" + ansible.builtin.import_tasks: + file: cis_1.4.x.yml + +- name: "SECTION | 1.5 | Additional Process Hardening" + ansible.builtin.import_tasks: + file: cis_1.5.x.yml + +- name: "SECTION | 1.6 | Mandatory Access Control" + ansible.builtin.import_tasks: + file: cis_1.6.x.yml + +- name: "SECTION | 1.7 | Command Line Warning Banners" + ansible.builtin.import_tasks: + file: cis_1.7.x.yml + +- name: "SECTION | 1.8 | GNOME Display Manager" + ansible.builtin.import_tasks: + file: cis_1.8.x.yml + +- name: "SECTION | 1.9 | Ensure updates, patches, and additional security software are installed" + ansible.builtin.import_tasks: + file: cis_1.9.yml diff --git a/tasks/section_2/cis_2.1.x.yml b/tasks/section_2/cis_2.1.x.yml new file mode 100644 index 0000000..86c08be --- /dev/null +++ b/tasks/section_2/cis_2.1.x.yml @@ -0,0 +1,409 @@ +--- +- name: "2.1.1.1 | PATCH | Ensure time synchronization is in use" + ansible.builtin.package: + name: "{{ ubtu18cis_time_sync_tool }}" + state: present + when: + - ubtu18cis_rule_2_1_1_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.1.1 + - chrony + +- name: "2.1.1.2 | PATCH | Ensure systemd-timesyncd is configured" + block: + - name: "2.1.1.2 | PATCH | Ensure systemd-timesyncd is configured | Set configuration for systemd-timesyncd" + ansible.builtin.lineinfile: + path: /etc/systemd/timesyncd.conf + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + insertafter: "{{ item.insertafter }}" + with_items: + - { regexp: '^\[Time\]', line: '[Time]', insertafter: EOF } + - { regexp: '^#NTP|^NTP', line: 'NTP=0.ubuntu.pool.ntp.org 1.ubuntu.pool.ntp.org 2.ubuntu.pool.ntp.org', insertafter: '\[Time\]' } + - { regexp: '^#FallbackNTP|^FallbackNTP', line: 'FallbackNTP=ntp.ubuntu.com 3.ubuntu.pool.ntp.org', insertafter: '\[Time\]' } + - { regexp: '^#RootDistanceMaxSec|^RootDistanceMaxSec', line: 'RootDistanceMaxSec=1', insertafter: '\[Time\]'} + + - name: "2.1.1.2 | AUDIT | Ensure systemd-timesyncd is configured | Capture NTP state" + ansible.builtin.shell: "timedatectl status | grep NTP | cut -d':' -f2 | sed 's/^[ \\t]*//;s/[ \\t]*$//'" + changed_when: false + failed_when: false + register: ubtu18cis_2_1_1_2_ntp_status + + - name: "2.1.1.2 | PATCH | Ensure systemd-timesyncd is configured | Set timedatectl to ntp" + ansible.builtin.shell: timedatectl set-ntp true + changed_when: true + when: "'inactive' in ubtu18cis_2_1_1_2_ntp_status.stdout" + when: + - ubtu18cis_rule_2_1_1_2 + tags: + - level1-server + - level1-workstation + - notscored + - patch + - rule_2.1.1.2 + - systemd-timesyncd + +- name: "2.1.1.3 | PATCH | Ensure chrony is configured" + block: + - name: "2.1.1.3 | AUDIT | Ensure chrony is configured | Check for chrony user" + ansible.builtin.shell: grep chrony /etc/passwd + changed_when: false + failed_when: false + register: ubtu18cis_2_1_1_3_chrony_user_status + + - name: "2.1.1.3 | PATCH | Ensure chrony is configured | Set chrony.conf file" + ansible.builtin.template: + src: chrony.conf.j2 + dest: /etc/chrony/chrony.conf + owner: root + group: root + mode: 0644 + + - name: "2.1.1.3 | PATCH | Ensure chrony is configured | Create chrony user" + ansible.builtin.user: + name: chrony + ansible.builtin.shell: /usr/sbin/nologin + system: true + when: ubtu18cis_2_1_1_3_chrony_user_status.stdout | length > 0 + + - name: "2.1.1.3 | PATCH | Ensure chrony is configured | Set option to use chrony user" + ansible.builtin.lineinfile: + path: /etc/default/chrony + regexp: '^DAEMON_OPTS' + line: 'DAEMON_OPTS="-u chrony"' + when: + - ubtu18cis_rule_2_1_1_3 + - ubtu18cis_time_sync_tool == "chrony" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.1.3 + - chrony + +- name: "2.1.1.4 | PATCH | Ensure ntp is configured" + block: + - name: "2.1.1.4 | PATCH | Ensure ntp is configured | Set ntp.conf settings" + ansible.builtin.template: + src: ntp.conf.j2 + dest: /etc/ntp.conf + owner: root + group: root + mode: 0644 + + - name: "2.1.1.4 | PATCH | Ensure ntp is configured | Modify sysconfig/ntpd" + ansible.builtin.lineinfile: + path: /etc/sysconfig/ntpd + regexp: "{{ item.regexp }}" + line: "{{ item. line }}" + create: true + with_items: + - { regexp: '^OPTIONS', line: 'OPTIONS="-u ntp:ntp"'} + - { regexp: '^NTPD_OPTIONS', line: 'NTPD_OPTIONS="-u ntp:ntp"' } + + - name: "2.1.1.4 | PATCH | Ensure ntp is configured | Modify /etc/init.d/npt" + ansible.builtin.lineinfile: + path: /etc/init.d/ntp + regexp: '^RUNAUSER' + line: 'RUNAUSER=npt' + when: + - ubtu18cis_rule_2_1_1_4 + - ubtu18cis_time_sync_tool == "ntp" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.1.4 + - ntp + +- name: "2.1.2 | PATCH | Ensure X Window System is not installed" + ansible.builtin.package: + name: xserver-xorg* + state: absent + when: + - ubtu18cis_rule_2_1_2 + - not ubtu18cis_desktop_required + tags: + - level1-server + - automated + - patch + - rule_2.1.2 + - xwindows + +- name: "2.1.3 | PATCH | Ensure Avahi Server is not installed" + block: + - name: "2.1.3 | PATCH | Ensure Avahi Server is not installed | Stop and disable Avahi service" + ansible.builtin.service: + name: avahi-daemon + state: stopped + enabled: false + when: avahi_service_status.stdout == "loaded" + + - name: "2.1.3 | PATCH | Ensure Avahi Server is not installed | Uninstall Avahi service" + ansible.builtin.package: + name: avahi-daemon + state: absent + when: + - ubtu18cis_rule_2_1_3 + - not ubtu18cis_avahi_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.3 + - avahi + - services + +- name: "2.1.4 | PATCH | Ensure CUPS is not installed" + ansible.builtin.package: + name: cups + state: absent + when: + - ubtu18cis_rule_2_1_4 + - not ubtu18cis_cups_server + tags: + - level1-server + - level2-workstation + - automated + - patch + - rule_2.1.4 + - cups + - services + +- name: "2.1.5 | PATCH | Ensure DHCP Server is not installed" + ansible.builtin.package: + name: isc-dhcp-server + state: absent + when: + - ubtu18cis_rule_2_1_5 + - not ubtu18cis_dhcp_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.5 + - dhcp + - services + +- name: "2.1.6 | PATCH | Ensure LDAP server is not installed" + ansible.builtin.package: + name: slapd + state: absent + when: + - ubtu18cis_rule_2_1_6 + - not ubtu18cis_ldap_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.6 + - ldap + - services + +- name: "2.1.7 | PATCH | Ensure NFS is not installed" + ansible.builtin.package: + name: nfs-kernel-server + state: absent + when: + - ubtu18cis_rule_2_1_7 + - not ubtu18cis_nfs_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.7 + - nfs + - services + +- name: "2.1.8 | PATCH | Ensure DNS Server is not installed" + ansible.builtin.package: + name: bind9 + state: absent + when: + - ubtu18cis_rule_2_1_8 + - not ubtu18cis_dns_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.8 + - dns + - service + +- name: "2.1.9 | PATCH | Ensure FTP Server is not installed" + ansible.builtin.package: + name: vsftpd + state: absent + when: + - ubtu18cis_rule_2_1_9 + - not ubtu18cis_vsftpd_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.9 + - ftp + - service + +- name: "2.1.10 | PATCH | Ensure HTTP server is not installed" + ansible.builtin.package: + name: apache2 + state: absent + when: + - ubtu18cis_rule_2_1_10 + - not ubtu18cis_httpd_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.10 + - httpd + - service + +- name: "2.1.11 | PATCH | Ensure IMAP and POP3 server are not installed" + ansible.builtin.package: + name: ['dovecot-imapd', 'dovecot-pop3d'] + state: absent + when: + - ubtu18cis_rule_2_1_11 + - not ubtu18cis_dovecot_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.11 + - dovecot + - service + +- name: "2.1.12 | PATCH | Ensure Samba is not installed" + ansible.builtin.package: + name: samba + state: absent + when: + - ubtu18cis_rule_2_1_12 + - not ubtu18cis_smb_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.12 + - samba + - service + +- name: "2.1.13 | PATCH | Ensure HTTP Proxy Server is not installed" + ansible.builtin.package: + name: squid + state: absent + when: + - ubtu18cis_rule_2_1_13 + - not ubtu18cis_squid_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.13 + - http_proxy + - service + +- name: "2.1.14 | PATCH | Ensure SNMP Server is not installed" + ansible.builtin.package: + name: snmpd + state: absent + when: + - ubtu18cis_rule_2_1_14 + - not ubtu18cis_snmp_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.14 + - snmp + - service + +- name: "2.1.15 | PATCH | Ensure mail transfer agent is configured for local-only mode" + block: + - name: "2.1.15 | PATCH | Ensure mail transfer agent is configured for local-only mode | For postfix" + ansible.builtin.lineinfile: + path: /etc/postfix/main.cf + regexp: '^(#)?inet_interfaces' + line: 'inet_interfaces = loopback-only' + notify: restart postfix + when: "'postfix' in ansible_facts.packages" + + - name: "2.1.15 | PATCH | Ensure mail transfer agent is configured for local-only mode | for exim4" + ansible.builtin.lineinfile: + path: /etc/exim4/update-exim4.conf.conf + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + notify: restart exim4 + with_items: + - { regexp: '^dc_eximconfig_configtype=', line: dc_eximconfig_configtype='local' } + - { regexp: '^dc_local_interfaces=', line: "dc_local_interfaces='127.0.0.1 ; ansible.builtin.lineinfile::1'" } + - { regexp: '^dc_readhost=', line: dc_readhost='' } + - { regexp: '^dc_relay_domains=', line: dc_relay_domains='' } + - { regexp: '^dc_minimaldns=', line: dc_minimaldns='false' } + - { regexp: '^dc_relay_nets=', line: dc_relay_nets='' } + - { regexp: '^dc_smarthost=', line: dc_smarthost='' } + - { regexp: '^dc_use_split_config=', line: dc_use_split_config='false' } + - { regexp: '^dc_hide_mailname=', line: dc_hide_mailname='' } + - { regexp: '^dc_mailname_in_oh=', line: dc_mailname_in_oh='true' } + - { regexp: '^dc_localdelivery=', line: dc_localdelivery='mail_spool' } + when: "'exim4' in ansible_facts.packages" + when: + - ubtu18cis_rule_2_1_15 + - ubtu18cis_mail_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.15 + - postfix + +- name: "2.1.16 | PATCH | Ensure rsync service is not installed" + ansible.builtin.package: + name: rsync + state: absent + purge: true + when: + - ubtu18cis_rule_2_1_16 + - not ubtu18cis_rsync_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.16 + - rsync + +- name: "2.1.17 | PATCH | Ensure NIS Server is not enabled" + ansible.builtin.package: + name: nis + state: absent + when: + - ubtu18cis_rule_2_1_17 + - not ubtu18cis_nis_server + tags: + - level1-server + - level1-workstation + - automated + - rule_2.1.17 + - nis + - service diff --git a/tasks/section_2/cis_2.2.x.yml b/tasks/section_2/cis_2.2.x.yml new file mode 100644 index 0000000..abeada5 --- /dev/null +++ b/tasks/section_2/cis_2.2.x.yml @@ -0,0 +1,91 @@ +--- +- name: "2.2.1 | PATCH | Ensure NIS Client is not installed" + ansible.builtin.apt: + name: nis + state: absent + when: + - ubtu18cis_rule_2_2_1 + - not ubtu18cis_nis_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.1 + - nis + +- name: "2.2.2 | PATCH | Ensure rsh client is not installed" + ansible.builtin.apt: + name: rsh-client + state: absent + when: + - ubtu18cis_rule_2_2_2 + - not ubtu18cis_rsh_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.2 + - rsh + +- name: "2.2.3 | PATCH | Ensure talk client is not installed" + ansible.builtin.apt: + name: talk + state: absent + when: + - ubtu18cis_rule_2_2_3 + - not ubtu18cis_talk_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.3 + - talk + +- name: "2.2.4 | PATCH | Ensure telnet client is not installed" + ansible.builtin.apt: + name: telnet + state: absent + when: + - ubtu18cis_rule_2_2_4 + - not ubtu18cis_telnet_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.4 + - telnet + +- name: "2.2.5 | PATCH | Ensure LDAP client is not installed" + ansible.builtin.apt: + name: ldap-utils + state: absent + when: + - ubtu18cis_rule_2_2_5 + - not ubtu18cis_ldap_clients_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.5 + - ldap + +- name: "2.2.6 | PATCH | Ensure RPC is not installed" + ansible.builtin.apt: + name: rpcbind + state: absent + when: + - ubtu18cis_rule_2_2_6 + - not ubtu18cis_rpc_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.7 + - rpc + - services diff --git a/tasks/section_2/cis_2.3.yml b/tasks/section_2/cis_2.3.yml new file mode 100644 index 0000000..f2c1d08 --- /dev/null +++ b/tasks/section_2/cis_2.3.yml @@ -0,0 +1,29 @@ +--- +- name: "2.3 | AUDIT | Ensure nonessential services are removed or masked" + block: + - name: "2.3 | AUDIT | Ensure nonessential services are removed or masked | Get list of all services" + ansible.builtin.shell: lsof -i -P -n | grep -v "(ESTABLISHED)" + changed_when: false + failed_when: false + register: ubtu18cis_2_3_services + + - name: "2.3 | AUDIT | Ensure nonessential services are removed or masked | Display services" + ansible.builtin.debug: + msg: + - "Warning!! Below are the list of services. Please make sure all are required and remove any non-required services" + - "{{ ubtu18cis_2_3_services.stdout_lines }}" + + - name: "2.3 | AUDIT | Ensure nonessential services are removed or masked | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '2.3' + when: + - ubtu18cis_rule_2_3 + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_2.3 + - nonessential_services diff --git a/tasks/section_2/main.yml b/tasks/section_2/main.yml new file mode 100644 index 0000000..d66b7af --- /dev/null +++ b/tasks/section_2/main.yml @@ -0,0 +1,12 @@ +--- +- name: "SECTION | 2.1 | Special Purpose Services" + ansible.builtin.import_tasks: + file: cis_2.1.x.yml + +- name: "SECTION | 2.2 | Service Clients" + ansible.builtin.import_tasks: + file: cis_2.2.x.yml + +- name: "SECTION | 2.3 | Ensure nonessential services are removed or masked" + ansible.builtin.import_tasks: + file: cis_2.3.yml diff --git a/tasks/section_3/cis_3.1.x.yml b/tasks/section_3/cis_3.1.x.yml new file mode 100644 index 0000000..03fab8c --- /dev/null +++ b/tasks/section_3/cis_3.1.x.yml @@ -0,0 +1,49 @@ +--- +- name: "3.1.1 | PATCH | Disable IPv6" + ansible.builtin.lineinfile: + path: /etc/default/grub + regexp: '^(GRUB_CMDLINE_LINUX=.*(?!.*ipv6\.disable=1)\"[^\"]+)(\".*)' + line: '\1 ipv6.disable=1\2' + backrefs: true + notify: grub update + when: + - ubtu18cis_rule_3_1_1 + - not ubtu18cis_ipv6_required + tags: + - level2-server + - level2-workstation + - manual + - patch + - rule_3.1.1 + - ipv6 + +- name: "3.1.2 | PATCH | Ensure wireless interfaces are disabled" + block: + - name: "3.1.2 | AUDIT | Ensure wireless interfaces are disabled | Check if nmcli command is available" + ansible.builtin.shell: dpkg -s network-manager + changed_when: false + failed_when: false + check_mode: false + register: ubtu18cis_nmcli_available + + - name: "3.1.2 | AUDIT | Ensure wireless interfaces are disabled | Check if wifi is enabled" + ansible.builtin.shell: nmcli radio wifi + register: ubtu18cis_wifi_enabled + check_mode: false + changed_when: ubtu18cis_wifi_enabled.stdout != "disabled" + when: + - ubtu18cis_nmcli_available is defined + - ubtu18cis_nmcli_available.rc == 0 + + - name: "3.1.2 | PATCH | Ensure wireless interfaces are disabled | Disable wifi if enabled" + ansible.builtin.shell: nmcli radio wifi off + when: ubtu18cis_wifi_enabled is changed # noqa: no-handler + when: + - ubtu18cis_rule_3_1_2 + tags: + - level1-server + - level2-workstation + - automated + - patch + - rule_3.1.2 + - wireless diff --git a/tasks/section_3/cis_3.2.x.yml b/tasks/section_3/cis_3.2.x.yml new file mode 100644 index 0000000..08a33ad --- /dev/null +++ b/tasks/section_3/cis_3.2.x.yml @@ -0,0 +1,60 @@ +--- +- name: "3.2.1 | PATCH | Ensure packet redirect sending is disabled" + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + with_items: + - net.ipv4.conf.all.send_redirects + - net.ipv4.conf.default.send_redirects + notify: sysctl flush ipv4 route table + when: + - ubtu18cis_rule_3_2_1 + - not ubtu18cis_is_router + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.2.1 + - packet_redirect + - sysctl + +- name: "3.2.2 | PATCH | Ensure IP forwarding is disabled" + block: + - name: "3.2.2 | PATCH | Ensure IP forwarding is disabled | IPv4 settings" + ansible.posix.sysctl: + name: net.ipv4.ip_forward + value: '0' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + notify: + - sysctl flush ipv4 route table + + - name: "3.2.2 | PATCH | Ensure IP forwarding is disabled | IPv6 settings." + ansible.posix.sysctl: + name: net.ipv6.conf.all.forwarding + value: '0' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + notify: + - sysctl flush ipv6 route table + when: ubtu18cis_ipv6_required + when: + - ubtu18cis_rule_3_2_2 + - not ubtu18cis_is_router + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.2.2 + - ip_forwarding + - sysctl diff --git a/tasks/section_3/cis_3.3.x.yml b/tasks/section_3/cis_3.3.x.yml new file mode 100644 index 0000000..67ec862 --- /dev/null +++ b/tasks/section_3/cis_3.3.x.yml @@ -0,0 +1,233 @@ +--- +- name: "3.3.1 | PATCH | Ensure source routed packets are not accepted" + block: + - name: "3.3.1 | PATCH | Ensure source routed packets are not accepted | IPv4 settings" + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + with_items: + - net.ipv4.conf.all.accept_source_route + - net.ipv4.conf.default.accept_source_route + notify: sysctl flush ipv4 route table + + - name: "3.3.1 | PATCH | Ensure source routed packets are not accepted | IPv6 settings" + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + with_items: + - net.ipv6.conf.all.accept_source_route + - net.ipv6.conf.default.accept_source_route + notify: sysctl flush ipv6 route table + when: ubtu18cis_ipv6_required + when: + - ubtu18cis_rule_3_3_1 + - not ubtu18cis_is_router + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.3.1 + - routed_packets + - sysctl + +- name: "3.3.2 | PATCH | Ensure ICMP redirects are not accepted" + block: + - name: "3.3.2 | PATCH | Ensure ICMP redirects are not accepted | IPv4 settings" + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + with_items: + - net.ipv4.conf.all.accept_redirects + - net.ipv4.conf.default.accept_redirects + notify: sysctl flush ipv4 route table + + - name: "3.3.2 | PATCH | Ensure ICMP redirects are not accepted | IPv6 settings" + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + with_items: + - net.ipv6.conf.all.accept_redirects + - net.ipv6.conf.default.accept_redirects + notify: sysctl flush ipv6 route table + when: ubtu18cis_ipv6_required + when: + - ubtu18cis_rule_3_3_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.3.2 + - icmp + - sysctl + +- name: "3.3.3 | PATCH | Ensure secure ICMP redirects are not accepted" + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + with_items: + - net.ipv4.conf.all.secure_redirects + - net.ipv4.conf.default.secure_redirects + notify: sysctl flush ipv4 route table + when: + - ubtu18cis_rule_3_3_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.3.3 + - icmp + - sysctl + +- name: "3.3.4 | PATCH | Ensure suspicious packets are logged" + ansible.posix.sysctl: + name: "{{ item }}" + value: '1' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + with_items: + - net.ipv4.conf.all.log_martians + - net.ipv4.conf.default.log_martians + notify: sysctl flush ipv4 route table + when: + - ubtu18cis_rule_3_3_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.3.4 + - suspicious_packets + - sysctl + +- name: "3.3.5 | PATCH | Ensure broadcast ICMP requests are ignored" + ansible.posix.sysctl: + name: net.ipv4.icmp_echo_ignore_broadcasts + value: '1' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + notify: sysctl flush ipv4 route table + when: + - ubtu18cis_rule_3_3_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.3.5 + - icmp + - sysctl + +- name: "3.3.6 | PATCH | Ensure bogus ICMP responses are ignored" + ansible.posix.sysctl: + name: net.ipv4.icmp_ignore_bogus_error_responses + value: '1' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + notify: sysctl flush ipv4 route table + when: + - ubtu18cis_rule_3_3_6 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.3.6 + - icmp + - sysctl + +- name: "3.3.7 | PATCH | Ensure Reverse Path Filtering is enabled" + ansible.posix.sysctl: + name: "{{ item }}" + value: '1' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + with_items: + - net.ipv4.conf.all.rp_filter + - net.ipv4.conf.default.rp_filter + notify: sysctl flush ipv4 route table + when: + - ubtu18cis_rule_3_3_7 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.3.7 + - reverse_path_filtering + - sysctl + +- name: "3.3.8 | PATCH | Ensure TCP SYN Cookies is enabled" + ansible.posix.sysctl: + name: net.ipv4.tcp_syncookies + value: '1' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + notify: sysctl flush ipv4 route table + when: + - ubtu18cis_rule_3_3_8 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.3.8 + - tcp_syn_cookies + - sysctl + +- name: "3.3.9 | PATCH | Ensure IPv6 router advertisements are not accepted" + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + state: present + reload: true + ignoreerrors: true + with_items: + - net.ipv6.conf.all.accept_ra + - net.ipv6.conf.default.accept_ra + notify: sysctl flush ipv6 route table + when: + - ubtu18cis_rule_3_3_9 + - ubtu18cis_ipv6_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.3.9 + - ipv6 + - router_advertisements + - sysctl diff --git a/tasks/section_3/cis_3.4.x.yml b/tasks/section_3/cis_3.4.x.yml new file mode 100644 index 0000000..5e81f58 --- /dev/null +++ b/tasks/section_3/cis_3.4.x.yml @@ -0,0 +1,64 @@ +--- +- name: "3.4.1 | PATCH | Ensure DCCP is disabled" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/dccp.conf + regexp: '^(#)?install dccp(\\s|$)' + line: 'install dccp /bin/true' + create: true + when: + - ubtu18cis_rule_3_4_1 + tags: + - level2-server + - level2-workstation + - scored + - patch + - rule_3.4.1 + - DCCP + +- name: "3.4.2 | PATCH | Ensure SCTP is disabled" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/sctp.conf + regexp: "^(#)?install sctp(\\s|$)" + line: 'install sctp /bin/true' + create: true + when: + - ubtu18cis_rule_3_4_2 + tags: + - level2-server + - level2-workstation + - scored + - patch + - rule_3.4.2 + - sctp + +- name: "3.4.3 | PATCH | Ensure RDS is disabled" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/rds.conf + regexp: '^(#)?install rds(\\s|$)' + line: 'install rds /bin/true' + create: true + when: + - ubtu18cis_rule_3_4_3 + tags: + - level2-server + - level2-workstation + - scored + - patch + - rule_3.4.3 + - rds + +- name: "3.4.4 | PATCH | Ensure TIPC is disabled" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/tipc.conf + regexp: '^(#)?install tipc(\\s|$)' + line: 'install tipc /bin/true' + create: true + when: + - ubtu18cis_rule_3_4_4 + tags: + - level2-server + - level2-workstation + - scored + - patch + - rule_3.4.4 + - tipc diff --git a/tasks/section_3/cis_3.5.x.yml b/tasks/section_3/cis_3.5.x.yml new file mode 100644 index 0000000..13a5cdf --- /dev/null +++ b/tasks/section_3/cis_3.5.x.yml @@ -0,0 +1,894 @@ +--- +- name: "3.5.1.1 | PATCH | Ensure ufw is installed" + block: + - name: "3.5.1.1 | PATCH | Ensure ufw is installed | Install firewall package" + ansible.builtin.package: + name: ufw + state: present + + - name: "3.5.1.1 | PATCH | Ensure ufw is installed | Adjust sysctl.conf for UFW" + ansible.builtin.lineinfile: + path: /etc/default/ufw + regexp: '^IPT_SYSCTL=' + line: 'IPT_SYSCTL=/etc/sysctl.conf' + when: ubtu18cis_ufw_system_sysctlconf + when: + - ubtu18cis_rule_3_5_1_1 + - ubtu18cis_firewall_package == "ufw" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.5.1.1 + - apt + - firewall + +- name: "3.5.1.2 | PATCH | Ensure iptables-persistent is not installed with ufw" + ansible.builtin.package: + name: iptables-persistent + state: absent + when: + - ubtu18cis_rule_3_5_1_2 + - ubtu18cis_firewall_package == "ufw" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.5.1.2 + - apt + - firewall + +# Adding the allow OpenSSH rule while enabling ufw to allow ansible to run after enabling +- name: "3.5.1.3 | PATCH | Ensure ufw service is enabled." + community.general.ufw: + rule: allow + name: OpenSSH + state: enabled + when: + - ubtu18cis_rule_3_5_1_3 + - ubtu18cis_firewall_package == "ufw" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.5.1.3 + - ufw + +- name: "3.5.1.4 | PATCH | Ensure loopback traffic is configured." + block: + - name: "3.5.1.4 | PATCH | Ensure loopback traffic is configured | Set allow ufw rules." + community.general.ufw: + rule: allow + direction: in + interface: lo + notify: reload ufw + + - name: "3.5.1.4 | PATCH | Ensure loopback traffic is configured | Set deny ufw rules IPv4" + community.general.ufw: + rule: deny + direction: in + from_ip: 127.0.0.0/8 + notify: reload ufw + + - name: "3.5.1.4 | PATCH | Ensure loopback traffic is configured | Set deny ufw rules IPv6" + community.general.ufw: + rule: deny + direction: in + from_ip: "::1" + notify: reload ufw + when: ubtu18cis_ipv6_required + when: + - ubtu18cis_rule_3_5_1_4 + - ubtu18cis_firewall_package == "ufw" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.5.1.4 + - ufw + +- name: "3.5.1.5 | PATCH | Ensure outbound connections are configured" + block: + - name: "3.5.1.5 | PATCH | Ensure outbound connections are configured | Custom ports" + community.general.ufw: + rule: allow + direction: out + to_port: '{{ item }}' + with_items: + - "{{ ubtu18cis_ufw_allow_out_ports }}" + notify: reload ufw + when: ubtu18cis_ufw_allow_out_ports != "all" + + - name: "3.5.1.5 | PATCH | Ensure outbound connections are configured | Allow all" + community.general.ufw: + default: allow + direction: outgoing + notify: reload ufw + when: "'all' in ubtu18cis_ufw_allow_out_ports" + when: + - ubtu18cis_rule_3_5_1_5 + - ubtu18cis_firewall_package == "ufw" + tags: + - level1-server + - level1-workstation + - manual + - patch + - rule_3.5.1.5 + - ufw + +- name: "3.5.1.6 | AUDIT | Ensure firewall rules exist for all open ports" + block: + - name: "3.5.1.6 | AUDIT | Ensure firewall rules exist for all open ports | Get list of open ports" + ansible.builtin.shell: ss -4tuln + changed_when: false + failed_when: false + register: ubtu18cis_3_5_1_6_open_listen_ports + + - name: "3.5.1.6 | AUDIT | Ensure firewall rules exist for all open ports | Get list of firewall rules" + ansible.builtin.shell: ufw status + changed_when: false + failed_when: false + register: ubtu18cis_3_5_1_6_firewall_rules + + - name: "3.5.1.6 | AUDIT | Ensure firewall rules exist for all open ports | Message out settings" + ansible.builtin.debug: + msg: + - "Warning!! Below are the listening ports and firewall rules" + - "Please create firewall rule for any open ports if not already done" + - "*****---Open Listen Ports---*****" + - "{{ ubtu18cis_3_5_1_6_open_listen_ports.stdout_lines }}" + - "*****---Firewall Rules---*****" + - "{{ ubtu18cis_3_5_1_6_firewall_rules.stdout_lines }}" + + - name: "3.5.1.6 | AUDIT | Ensure firewall rules exist for all open ports | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.1.6' + when: + - ubtu18cis_rule_3_5_1_6 + - ubtu18cis_firewall_package == "ufw" + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_3.5.1.6 + - ufw + +- name: "3.5.1.7 | PATCH | Ensure ufw default deny firewall policy" + community.general.ufw: + default: deny + direction: "{{ item }}" + notify: reload ufw + with_items: + - incoming + - outgoing + - routed + when: + - ubtu18cis_rule_3_5_1_7 + - ubtu18cis_firewall_package == "ufw" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.5.1.7 + - ufw + +# # --------------- +# # --------------- +# # NFTables is unsupported with this role. However I have the actions commented out as a guide +# # --------------- +# # --------------- +- name: "3.5.2.1 | AUDIT | Ensure nftables is installed." + block: + - name: "3.5.2.1 | AUDIT | Ensure nftables is installed. | Warning." + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW or iptables" + + - name: "3.5.2.1 | AUDIT | Ensure firewall rules exist for all open ports | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.2.1' + # ansible.builtin.package: + # name: nftables + # state: present + when: + - ubtu18cis_rule_3_5_2_1 + - ubtu18cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_3.5.2.1 + - nftables + - firewall + - notimplemented + +- name: "3.5.2.2 | AUDIT | Ensure ufw is uninstalled or disabled with nftables" + block: + - name: "3.5.2.2 | AUDIT | Ensure ufw is uninstalled or disabled with nftables. | Warning Message." + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW or iptables" + + - name: "3.5.2.2 | AUDIT | Ensure ufw is uninstalled or disabled with nftables. | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.2.2' + # ansible.builtin.package: + # name: ufw + # state: absent + when: + - ubtu18cis_rule_3_5_2_2 + - ubtu18cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_3.5.2.2 + - nftables + - firewall + - notimplemented + +- name: "3.5.2.3 | PATCH | Ensure iptables are flushed" + block: + - name: "3.5.2.3 | PATCH | Ensure iptables are flushed. | Warning Message." + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW or iptables" + + - name: "3.5.2.3 | PATCH | Ensure iptables are flushed. | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.2.3' + # ansible.builtin.iptables: + # flush: yes + # + when: + - ubtu18cis_rule_3_5_2_3 + - ubtu18cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_3.5.2.3 + - nftables + - notimplemented + +- name: "3.5.2.4 | PATCH | Ensure a nftables table exists" + block: + - name: "3.5.2.4 | PATCH | Ensure a nftables table exists. | Warning Message." + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please us UFW or iptables" + + - name: "3.5.2.4 | PATCH | Ensure a nftables table exists. | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.2.4' + # ansible.builtin.shell: "nft create table {{ ubtu18cis_nftables_table_name }}" + # changed_when: ubtu18cis_3_5_2_4_new_table.rc == 0 + # failed_when: false + # register: ubtu18cis_3_5_2_4_new_table + when: + - ubtu18cis_rule_3_5_2_4 + - ubtu18cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_3.5.2.4 + - nftables + - notimplemented + +- name: "3.5.2.5 | PATCH | Ensure nftables base chains exist" + block: + - name: "3.5.2.5 | PATCH | Ensure nftables base chains exist | Warning Message." + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please us UFW or iptables" + + - name: "3.5.2.5 | PATCH | Ensure nftables base chains exist | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.2.5' + # block: + # - name: "3.5.2.5 | PATCH | Ensure base chains exist | Input entry" + # ansible.builtin.shell: 'nft create chain inet {{ ubtu18cis_nftables_table_name }} input { type filter hook input priority 0 \; }' + # changed_when: ubtu18cis_3_5_2_5_base_chains_input.rc == 0 + # failed_when: false + # register: ubtu18cis_3_5_2_5_base_chains_input + + # - name: "3.5.2.5 | PATCH | Ensure base chains exist | Forward entry" + # ansible.builtin.shell: 'nft create chain inet {{ ubtu18cis_nftables_table_name }} forward { type filter hook forward priority 0 \; }' + # changed_when: ubtu18cis_3_5_2_5_base_chains_forward.rc == 0 + # failed_when: false + # register: ubtu18cis_3_5_2_5_base_chains_forward + + # - name: "3.5.2.5 | PATCH | Ensure base chains exist | Output entry" + # ansible.builtin.shell: 'nft create chain inet {{ ubtu18cis_nftables_table_name }} output { type filter hook output priority 0 \; }' + # changed_when: ubtu18cis_3_5_2_5_base_chains_output.rc == 0 + # failed_when: false + # register: ubtu18cis_3_5_2_5_base_chains_output + when: + - ubtu18cis_rule_3_5_2_5 + - ubtu18cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_3.5.2.5 + - nftables + - notimplemented + +- name: "3.5.2.6 | PATCH | Ensure nftables loopback traffic is configured" + block: + - name: "3.5.2.6 | PATCH | Ensure nftables loopback traffic is configured | Warning Message." + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please us UFW or iptables" + + - name: "3.5.2.6 | PATCH | Ensure nftables loopback traffic is configured. | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.2.6' + # block: + # - name: "3.5.2.6 | AUDIT | Ensure nftables loopback traffic is configured | Get input iif lo accept status" + # ansible.builtin.shell: nft list ruleset | awk '/hook input/,/}/' | grep 'iif "lo" accept' + # changed_when: false + # failed_when: false + # register: ubtu18cis_3_5_2_6_loopback_iif_status + + # - name: "3.5.2.6 | AUDIT | Ensure nftables loopback traffic is configured | Get input iif lo accept status" + # ansible.builtin.shell: nft list ruleset | awk '/hook input/,/}/' | grep 'ip saddr' + # changed_when: false + # failed_when: false + # register: ubtu18cis_3_5_2_6_loopback_input_drop_status + + # - name: "3.5.2.6 | AUDIT | Ensure nftables loopback traffic is configured | Get input iif lo accept status" + # ansible.builtin.shell: nft list ruleset | awk '/hook input/,/}/' | grep 'ip6 saddr' + # changed_when: false + # failed_when: false + # register: ubtu18cis_3_5_2_6_loopback_ipv6_drop_status + + # - name: "3.5.2.6 | PATCH | Ensure nftables loopback traffic is configured | Loopback iif lo accept" + # ansible.builtin.shell: 'nft add rule inet {{ ubtu18cis_nftables_table_name }} input iif lo accept' + # changed_when: ubtu18cis_3_5_3_4_loopback_iif.rc == 0 + # failed_when: false + # register: ubtu18cis_3_5_3_4_loopback_iif + # when: "'iif \"lo\" accept' not in ubtu18cis_3_5_3_4_loopback_iif_status.stdout" + + # - name: "3.5.2.6 | PATCH | Ensure nftables loopback traffic is configured | Loopback input drop" + # ansible.builtin.shell: 'nft add rule inet {{ ubtu18cis_nftables_table_name }} input ip saddr 127\.0\.0\.0\/8 counter drop' + # changed_when: ubtu18cis_3_5_2_6_loopback_input_drop.rc == 0 + # failed_when: false + # register: ubtu18cis_3_5_2_6_loopback_input_drop + # when: + # - "'ip saddr 127.0.0.0/8' not in ubtu18cis_3_5_3_4_loopback_input_drop_status.stdout" + # - "'drop' not in ubtu18cis_3_5_2_6_loopback_input_drop_status.stdout" + + # - name: "3.5.2.6 | PATCH | Ensure nftables loopback traffic is configured | Loopback ipv6 drop" + # ansible.builtin.shell: 'nft add rule inet {{ ubtu18cis_nftables_table_name }} input ip6 saddr ansible.builtin.lineinfile::1 counter drop' + # changed_when: ubtu18cis_3_5_2_6_loopback_ipv6_drop.rc == 0 + # failed_when: false + # register: ubtu18cis_3_5_2_6_loopback_ipv6_drop + # when: + # - "'ip6 saddr' not in ubtu18cis_3_5_2_6_loopback_ipv6_drop_status.stdout" + # - "'drop' not in ubtu18cis_3_5_2_6_loopback_ipv6_drop_status.stdout" + when: + - ubtu18cis_rule_3_5_2_6 + - ubtu18cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_3.5.2.6 + - nftables + - notimplemented + +- name: "3.5.2.7 | PATCH | Ensure nftables outbound and established connections are configured" + block: + - name: "3.5.2.7 | PATCH | Ensure nftables outbound and established connections are configured | Warning Message." + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please us UFW or iptables" + + - name: "3.5.2.7 | PATCH | Ensure nftables outbound and established connections are configured | Warn Count" + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.2.7' + when: + - ubtu18cis_rule_3_5_2_7 + - ubtu18cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_3.5.2.7 + - nftables + - notimplemented + +- name: "3.5.2.8 | PATCH | Ensure nftables default deny firewall policy" + block: + - name: "3.5.2.8 | PATCH | Ensure nftables default deny firewall policy | Warning Message." + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please us UFW or iptables" + + - name: "3.5.2.8 | PATCH | Ensure nftables default deny firewall policy | Warn Count" + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.2.8' + when: + - ubtu18cis_rule_3_5_2_8 + - ubtu18cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_3.5.2.8 + - nftables + - notimplemented + +- name: "3.5.2.9 | PATCH | Ensure nftables service is enabled" + block: + - name: "3.5.2.9 | PATCH | Ensure nftables service is enabled | Warning Message." + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please us UFW or iptables" + + - name: "3.5.2.9 | PATCH | Ensure nftables service is enabled | Warn Count" + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.2.9' + # service: + # name: nftables + # state: started + # enabled: yes + when: + - ubtu18cis_rule_3_5_2_9 + - ubtu18cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_3.5.2.9 + - nftables + - notimplemented + +- name: "3.5.2.10 | PATCH | Ensure nftables rules are permanent" + block: + - name: "3.5.2.10 | PATCH | Ensure nftables rules are permanent. | Warning Message." + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please us UFW or iptables" + + - name: "3.5.2.10 | PATCH | Ensure nftables rules are permanent. | Warn Count" + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.2.10' + when: + - ubtu18cis_rule_3_5_2_10 + - ubtu18cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_3.5.2.10 + - nftables + - notimplemented + +- name: "3.5.3.1.1 | PATCH | Ensure iptables packages are installed" + ansible.builtin.package: + name: ['iptables', 'iptables-persistent'] + state: present + when: + - ubtu18cis_rule_3_5_3_1_1 + - ubtu18cis_firewall_package == "iptables" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.5.3.1.1 + - iptables + - firewall + +- name: "3.5.3.1.2 | PATCH | Ensure nftables is not installed with iptables" + ansible.builtin.package: + name: nftables + state: absent + when: + - ubtu18cis_rule_3_5_3_1_2 + - ubtu18cis_firewall_package == "iptables" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.5.3.1.2 + - iptables + - firewall + +- name: "3.5.3.1.3 | PATCH | Ensure ufw is uninstalled or disabled with iptables" + ansible.builtin.package: + name: ufw + state: absent + when: + - ubtu18cis_rule_3_5_3_1_3 + - ubtu18cis_firewall_package == "iptables" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.5.3.1.3 + - iptables + - firewall + +- name: "3.5.3.2.1 | PATCH | Ensure iptables default deny firewall policy" + block: + - name: "3.5.3.2.1 | PATCH | Ensure iptables default deny firewall policy | Configure SSH to be allowed in" + ansible.builtin.iptables: + chain: INPUT + protocol: tcp + destination_port: 22 + jump: ACCEPT + ctstate: 'NEW,ESTABLISHED' + + - name: "3.5.3.2.1 | PATCH | Ensure iptables default deny firewall policy | Configure SSH to be allowed out" + ansible.builtin.iptables: + chain: OUTPUT + protocol: tcp + source_port: 22 + jump: ACCEPT + ctstate: 'NEW,ESTABLISHED' + + - name: "3.5.3.2.1 | PATCH | Ensure iptables default deny firewall policy | Enable apt traffic" + ansible.builtin.iptables: + chain: INPUT + ctstate: 'ESTABLISHED' + jump: ACCEPT + + - name: "3.5.3.2.1 | PATCH | Ensure iptables default deny firewall policy | Set drop items" + ansible.builtin.iptables: + policy: DROP + chain: "{{ item }}" + with_items: + - INPUT + - FORWARD + - OUTPUT + when: + - ubtu18cis_rule_3_5_3_2_1 + - ubtu18cis_firewall_package == "iptables" + - not ubtu18cis_iptables_v6 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.5.3.2.1 + - iptables + - firewall + +- name: "3.5.3.2.2 | PATCH | Ensure iptables loopback traffic is configured" + block: + - name: "3.5.3.2.2 | PATCH | Ensure iptables loopback traffic is configured | INPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: INPUT + in_interface: lo + jump: ACCEPT + + - name: "3.5.3.2.2 | PATCH | Ensure iptables loopback traffic is configured | OUTPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: OUTPUT + out_interface: lo + jump: ACCEPT + + - name: "3.5.3.2.2 | PATCH | Ensure iptables loopback traffic is configured | OUTPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: INPUT + source: 127.0.0.0/8 + jump: DROP + when: + - ubtu18cis_rule_3_5_3_2_2 + - ubtu18cis_firewall_package == "iptables" + - not ubtu18cis_iptables_v6 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.5.3.2.2 + - iptables + - firewall + +- name: "3.5.3.2.3 | PATCH | Ensure iptables outbound and established connections are configured" + ansible.builtin.iptables: + action: append + chain: '{{ item.chain }}' + protocol: '{{ item.protocol }}' + match: state + ctstate: '{{ item.ctstate }}' + jump: ACCEPT + with_items: + - { chain: OUTPUT, protocol: tcp, ctstate: 'NEW,ESTABLISHED' } + - { chain: OUTPUT, protocol: udp, ctstate: 'NEW,ESTABLISHED' } + - { chain: OUTPUT, protocol: icmp, ctstate: 'NEW,ESTABLISHED' } + - { chain: INPUT, protocol: tcp, ctstate: 'ESTABLISHED' } + - { chain: INPUT, protocol: udp, ctstate: 'ESTABLISHED' } + - { chain: INPUT, protocol: icmp, ctstate: 'ESTABLISHED' } + when: + - ubtu18cis_rule_3_5_3_2_3 + - ubtu18cis_firewall_package == "iptables" + - not ubtu18cis_iptables_v6 + tags: + - level1-server + - level1-workstation + - manual + - patch + - rule_3.5.3.2.3 + - iptables + - firewall + +- name: "3.5.3.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports" + block: + - name: "3.5.3.2.4 | AUDIT | Ensure firewall rules exist for all open ports | Get list of open ports" + ansible.builtin.shell: ss -4tuln + changed_when: false + failed_when: false + register: ubtu18cis_3_5_3_2_4_open_ports + + - name: "3.5.3.2.4 | AUDIT | Ensure firewall rules exist for all open ports | Get list of rules" + ansible.builtin.shell: iptables -L INPUT -v -n + changed_when: false + failed_when: false + register: ubtu18cis_3_5_3_2_4_current_rules + + - name: "3.5.3.2.4 | AUDIT | Ensure firewall rules exist for all open ports | Alert about settings" + ansible.builtin.debug: + msg: + - "Warning!! Below is the list the open ports and current rules" + - "Please create a rule for any open port that does not have a current rule" + - "Open Ports:" + - "{{ ubtu18cis_3_5_3_2_4_open_ports.stdout_lines }}" + - "Current Rules:" + - "{{ ubtu18cis_3_5_3_2_4_current_rules.stdout_lines }}" + + - name: "3.5.3.2.4 | AUDIT | Ensure firewall rules exist for all open ports | Warn Count" + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.3.2.4' + when: + - ubtu18cis_rule_3_5_3_2_4 + - ubtu18cis_firewall_package == "iptables" + - not ubtu18cis_iptables_v6 + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_3.5.3.2.4 + - iptables + +# --------------- +# --------------- +# This is not a control however using the iptables module only writes to memery +# if a reboot occurs that means changes can revert. This task will make the +# above iptables settings permanent +# --------------- +# --------------- +- name: "Make IPTables persistent | Not a control" + block: + - name: "Make IPTables persistent | Install iptables-persistent" + ansible.builtin.package: + name: iptables-persistent + state: present + + - name: "Make IPTables persistent | Save to persistent files" + ansible.builtin.shell: bash -c "iptables-save > /etc/iptables/rules.v4" + changed_when: ubtu18cis_iptables_save.rc == 0 + failed_when: ubtu18cis_iptables_save.rc > 0 + register: ubtu18cis_iptables_save + when: + - ubtu18cis_firewall_package == "iptables" + - not ubtu18cis_iptables_v6 + - ubtu18cis_save_iptables_cis_rules + - ubtu18cis_rule_3_5_3_2_1 or + ubtu18cis_rule_3_5_3_2_2 or + ubtu18cis_rule_3_5_3_2_3 or + ubtu18cis_rule_3_5_3_2_4 + +- name: "3.5.3.3.1 | PATCH | Ensure ip6tables default deny firewall policy" + block: + - name: "3.5.3.3.1 | PATCH | Ensure ip6tables default deny firewall policy | Configure SSH to be allowed out" + ansible.builtin.iptables: + chain: OUTPUT + protocol: tcp + source_port: 22 + jump: ACCEPT + ctstate: 'NEW,ESTABLISHED' + ip_version: ipv6 + + - name: "3.5.3.3.1 | PATCH | Ensure ip6tables default deny firewall policy | Enable apt traffic" + ansible.builtin.iptables: + chain: INPUT + ctstate: 'ESTABLISHED' + jump: ACCEPT + ip_version: ipv6 + + - name: "3.5.3.3.1| PATCH | Ensure ip6tables default deny firewall policy | Set drop items" + ansible.builtin.iptables: + policy: DROP + chain: "{{ item }}" + ip_version: ipv6 + with_items: + - INPUT + - FORWARD + - OUTPUT + when: + - ubtu18cis_rule_3_5_3_3_1 + - ubtu18cis_firewall_package == "iptables" + - ubtu18cis_iptables_v6 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.5.3.3.1 + - ip6tables + +- name: "3.5.3.3.2 | PATCH | Ensure ip6tables loopback traffic is configured" + block: + - name: "3.5.3.3.2 | PATCH | Ensure ip6tables loopback traffic is configured | INPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: INPUT + in_interface: lo + jump: ACCEPT + ip_version: ipv6 + + - name: "3.5.3.3.2 | PATCH | Ensure ip6tables loopback traffic is configured | OUTPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: OUTPUT + out_interface: lo + jump: ACCEPT + ip_version: ipv6 + + - name: "3.5.3.3.2 | PATCH | Ensure ip6tables loopback traffic is configured | INPUT loopback drop" + ansible.builtin.iptables: + action: append + chain: INPUT + source: ansible.builtin.lineinfile::1 + jump: DROP + ip_version: ipv6 + when: + - ubtu18cis_rule_3_5_3_3_2 + - ubtu18cis_firewall_package == "iptables" + - ubtu18cis_iptables_v6 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.5.3.3.2 + - ip6tables + +- name: "3.5.3.3.3 | PATCH | Ensure ip6tables outbound and established connections are configured" + ansible.builtin.iptables: + action: append + chain: '{{ item.chain }}' + protocol: '{{ item.protocol }}' + match: state + ctstate: '{{ item.ctstate }}' + jump: ACCEPT + ip_version: ipv6 + with_items: + - { chain: OUTPUT, protocol: tcp, ctstate: 'NEW,ESTABLISHED' } + - { chain: OUTPUT, protocol: udp, ctstate: 'NEW,ESTABLISHED' } + - { chain: OUTPUT, protocol: icmp, ctstate: 'NEW,ESTABLISHED' } + - { chain: INPUT, protocol: tcp, ctstate: 'ESTABLISHED' } + - { chain: INPUT, protocol: udp, ctstate: 'ESTABLISHED' } + - { chain: INPUT, protocol: icmp, ctstate: 'ESTABLISHED' } + when: + - ubtu18cis_rule_3_5_3_3_3 + - ubtu18cis_firewall_package == "iptables" + - ubtu18cis_iptables_v6 + tags: + - level1-server + - level1-workstation + - manual + - patch + - rule_3.5.3.3.3 + - ip6tables + +- name: "3.5.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports" + block: + - name: "3.5.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Get list of open ports" + ansible.builtin.shell: ss -6tuln + changed_when: false + failed_when: false + register: ubtu18cis_3_5_3_3_4_open_ports + + - name: "3.5.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Get list of rules" + ansible.builtin.shell: ip6tables -L INPUT -v -n + changed_when: false + failed_when: false + register: ubtu18cis_3_5_3_3_4_current_rules + + - name: "3.5.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Alert about settings" + ansible.builtin.debug: + msg: + - "Warning!! Below is the list the open ports and current rules" + - "Please create a rule for any open port that does not have a current rule" + - "Open Ports:" + - "{{ ubtu18cis_3_5_3_3_4_open_ports.stdout_lines }}" + - "Current Rules:" + - "{{ ubtu18cis_3_5_3_3_4_current_rules.stdout_lines }}" + + - name: "3.5.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Warn Count" + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '3.5.3.3.4' + when: + - ubtu18cis_rule_3_5_3_3_4 + - ubtu18cis_firewall_package == "iptables" + - ubtu18cis_iptables_v6 + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_3.5.4.2.4 + - ip6tables + +# --------------- +# --------------- +# This is not a control however using the ip6tables module only writes to memery +# if a reboot occurs that means changes can revert. This task will make the +# above ip6tables settings permanent +# --------------- +# --------------- +- name: "Make IP6Tables persistent | Not a control" + block: + - name: "Make IP6Tables persistent | Install iptables-persistent" + ansible.builtin.package: + name: iptables-persistent + state: present + + - name: "Make IP6Tables persistent | Save to persistent files" + ansible.builtin.shell: bash -c "ip6tables-save > /etc/iptables/rules.v6" + changed_when: ubtu18cis_ip6tables_save.rc == 0 + failed_when: ubtu18cis_ip6tables_save.rc > 0 + register: ubtu18cis_ip6tables_save + when: + - ubtu18cis_firewall_package == "iptables" + - ubtu18cis_iptables_v6 + - ubtu18cis_save_iptables_cis_rules + - ubtu18cis_rule_3_5_3_3_1 or + ubtu18cis_rule_3_5_3_3_2 or + ubtu18cis_rule_3_5_3_3_3 or + ubtu18cis_rule_3_5_3_3_4 diff --git a/tasks/section_3/main.yml b/tasks/section_3/main.yml new file mode 100644 index 0000000..f95dc6a --- /dev/null +++ b/tasks/section_3/main.yml @@ -0,0 +1,20 @@ +--- +- name: "SECTION | 3.1 | Disable unused network protocols and devices" + ansible.builtin.import_tasks: + file: cis_3.1.x.yml + +- name: "SECTION | 3.2 | Network Parameters Host Only" + ansible.builtin.import_tasks: + file: cis_3.2.x.yml + +- name: "SECTION | 3.3 | Network Parameters Host and Router" + ansible.builtin.import_tasks: + file: cis_3.3.x.yml + +- name: "SECTION | 3.4 | Uncommong Network Protocols" + ansible.builtin.import_tasks: + file: cis_3.4.x.yml + +- name: "SECTION | 3.5 | Firewall Configuration" + ansible.builtin.import_tasks: + file: cis_3.5.x.yml diff --git a/tasks/section_4/cis_4.1.1.x.yml b/tasks/section_4/cis_4.1.1.x.yml new file mode 100644 index 0000000..6e15169 --- /dev/null +++ b/tasks/section_4/cis_4.1.1.x.yml @@ -0,0 +1,81 @@ +--- +- name: "4.1.1.1 | PATCH | Ensure auditd is installed" + ansible.builtin.package: + name: ['auditd', 'audispd-plugins'] + state: present + when: + - ubtu18cis_rule_4_1_1_1 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_4.1.1.1 + - auditd + +- name: "4.1.1.2 | PATCH | Ensure auditd service is enabled" + ansible.builtin.service: + name: auditd + state: started + enabled: true + when: + - ubtu18cis_rule_4_1_1_2 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_4.1.1.2 + - auditd + +- name: "4.1.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled" + block: + - name: "4.1.1.3 | AUDIT | Ensure auditing for processes that start prior to auditd is enabled | Get GRUB_CMDLINE_LINUX" + ansible.builtin.shell: grep "GRUB_CMDLINE_LINUX=" /etc/default/grub | cut -f2 -d'"' + changed_when: false + failed_when: false + register: ubtu18cis_4_1_1_3_cmdline_settings + + - name: "4.1.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled | Add setting if doesn't exist" + ansible.builtin.lineinfile: + path: /etc/default/grub + regexp: '^GRUB_CMDLINE_LINUX=' + line: 'GRUB_CMDLINE_LINUX="{{ ubtu18cis_4_1_1_3_cmdline_settings.stdout }} audit=1"' + when: "'audit=' not in ubtu18cis_4_1_1_3_cmdline_settings.stdout" + notify: grub update + + - name: "4.1.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled | Update setting if exists" + ansible.builtin.replace: + dest: /etc/default/grub + regexp: 'audit=([0-9]+)' + replace: 'audit=1' + after: '^GRUB_CMDLINE_LINUX="' + before: '"' + notify: grub update + when: "'audit=' in ubtu18cis_4_1_1_3_cmdline_settings.stdout" + when: + - ubtu18cis_rule_4_1_1_3 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_4_1_1_3 + - auditd + +- name: "4.1.1.4 | PATCH | Ensure audit_backlog_limit is sufficient" + ansible.builtin.replace: + dest: /etc/default/grub + regexp: '(^GRUB_CMDLINE_LINUX\s*\=\s*)(?:")(.+)(?/dev/null; done + register: priv_procs + changed_when: false + check_mode: false + + - name: "4.1.11 | PATCH | Ensure use of privileged commands is collected | Set privileged rules" + ansible.builtin.template: + src: audit/ubtu18cis_4_1_11_privileged.rules.j2 + dest: /etc/audit/rules.d/privileged.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu18cis_rule_4_1_11 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_4.1.11 + - auditd + +- name: "4.1.12 | PATCH | Ensure successful file system mounts are collected" + ansible.builtin.template: + src: audit/ubtu18cis_4_1_12_audit.rules.j2 + dest: /etc/audit/rules.d/audit.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + ubtu18cis_rule_4_1_12 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_4.1.12 + - auditd + +- name: "4.1.13 | PATCH | Ensure file deletion events by users are collected" + ansible.builtin.template: + src: audit/ubtu18cis_4_1_13_delete.rules.j2 + dest: /etc/audit/rules.d/delete.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu18cis_rule_4_1_13 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_4.1.13 + - auditd + +- name: "4.1.14 | PATCH | Ensure changes to system administration scope (sudoers) is collected" + ansible.builtin.template: + src: audit/ubtu18cis_4_1_14_scope.rules.j2 + dest: /etc/audit/rules.d/scope.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu18cis_rule_4_1_14 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_4.1.14 + - auditd + +- name: "4.1.15 | PATCH | Ensure system administrator command executions (sudo) are collected" + ansible.builtin.template: + src: audit/ubtu18cis_4_1_15_actions.rules.j2 + dest: /etc/audit/rules.d/actions.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu18cis_rule_4_1_15 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_4.1.15 + - auditd + +- name: "4.1.16 | PATCH | Ensure kernel module loading and unloading is collected" + ansible.builtin.template: + src: audit/ubtu18cis_4_1_16_modules.rules.j2 + dest: /etc/audit/rules.d/modules.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu18cis_rule_4_1_16 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_4.1.16 + - auditd + +- name: "4.1.17 | PATCH | Ensure the audit configuration is immutable" + ansible.builtin.template: + src: audit/ubtu18cis_4_1_17_99finalize.rules.j2 + dest: /etc/audit/rules.d/99-finalize.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu18cis_rule_4_1_17 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_4.1.17 + - auditd diff --git a/tasks/section_4/cis_4.2.1.x.yml b/tasks/section_4/cis_4.2.1.x.yml new file mode 100644 index 0000000..1597953 --- /dev/null +++ b/tasks/section_4/cis_4.2.1.x.yml @@ -0,0 +1,158 @@ +--- +- name: "4.2.1.1 | PATCH | Ensure rsyslog is installed" + ansible.builtin.package: + name: rsyslog + state: present + when: + - ubtu18cis_rule_4_2_1_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.2.1.1 + - rsyslog + - apt + +- name: "4.2.1.2 | PATCH | Ensure rsyslog Service is enabled" + ansible.builtin.service: + name: rsyslog + enabled: true + when: + - ubtu18cis_rule_4_2_1_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.2.1.2 + - rsyslog + +- name: "4.2.1.3 | PATCH | Ensure logging is configured" + block: + - name: "4.2.1.3 | PATCH | Ensure logging is configured | Find configuration file" + ansible.builtin.shell: grep -r "*.emerg" /etc/* | cut -f1 -d":" + changed_when: false + failed_when: false + register: ubtu18cis_4_2_1_3_rsyslog_config_path + + - name: "4.2.1.3 | PATCH | Ensure logging is configured | Gather rsyslog current config" + ansible.builtin.shell: "cat {{ ubtu18cis_4_2_1_3_rsyslog_config_path.stdout }}" + changed_when: false + failed_when: false + register: ubtu18cis_4_2_1_3_rsyslog_config + + - name: "4.2.1.3 | PATCH | Ensure logging is configured | Message out config" + ansible.builtin.debug: + msg: + - "Warning!! Below is the current logging configurations for rsyslog, please review" + - "{{ ubtu18cis_4_2_1_3_rsyslog_config.stdout_lines }}" + when: not ubtu18cis_rsyslog_ansible_managed + + - name: "4.2.1.3 | PATCH | Ensure logging is configured | Warn Count" + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '4.2.1.3' + when: not ubtu18cis_rsyslog_ansible_managed + + - name: "4.2.1.3 | PATCH | Ensure logging is configured | Automated rsyslog configuration" + ansible.builtin.lineinfile: + path: "{{ ubtu18cis_4_2_1_3_rsyslog_config_path.stdout }}" + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + insertafter: "{{ item.insertafter }}" + with_items: + - { regexp: '^\*.emerg', line: '*.emerg :omusrmsg:*', insertafter: '^# Emergencies are sent to everybody logged in' } + - { regexp: '^auth,authpriv.\*', line: 'auth,authpriv.* /var/log/auth.log', insertafter: '^# First some standard log files. Log by facility' } + - { regexp: '^mail.\*|^#mail.\*', line: 'mail.* -/var/log/mail', insertafter: '^# First some standard log files' } + - { regexp: '^mail.info|^#mail.info', line: 'mail.info -/var/log/mail.info', insertafter: '^# Logging for the mail system' } + - { regexp: '^mail.warn|^#mail.warn', line: 'mail.warn -/var/log/mail.warn', insertafter: '^# Logging for the mail system.' } + - { regexp: '^mail.err|^#mail.err', line: 'mail.err /var/log/mail.err', insertafter: '^# Logging for the mail system.' } + - { regexp: '^news.crit|^#news.crit', line: 'news.crit -/var/log/news/news.crit', insertafter: '^# First some standard log files'} + - { regexp: '^news.err|^#news.err', line: 'news.err -/var/log/news/news.err', insertafter: '^# First some standard log files' } + - { regexp: '^news.notice|^#news.notice', line: 'news.notice -/var/log/news/news.notice', insertafter: '^# First some standard log files' } + - { regexp: '^\*.=warning;\*.=err|^#\*.=warning;\*.=err', line: '*.=warning;*.=err -/var/log/warn', insertafter: '^# First some standard log files' } + - { regexp: '^\*.crit|^#\*.crit', line: '*.crit /var/log/warn', insertafter: '^# First some standard log files' } + - { regexp: '^\*.\*;mail.none;news.none|^#\*.\*;mail.none;news.none', line: '*.*;mail.none;news.none -/var/log/messages', insertafter: '^# First some standard log files' } + - { regexp: '^local0,local1.\*|^#local0,local1.\*', line: 'local0,local1.* -/var/log/localmessages', insertafter: '^# First some standard log files' } + - { regexp: '^local2,local3.\*|^#local2,local3.\*', line: 'local2,local3.* -/var/log/localmessages', insertafter: '^# First some standard log files' } + - { regexp: '^local4,local5.\*|^#local4,local5.\*', line: 'local4,local5.* -/var/log/localmessages', insertafter: '^# First some standard log files' } + - { regexp: '^local6,local7.\*|^#local6,local7.\*', line: 'local6,local7.* -/var/log/localmessages', insertafter: '^# First some standard log files' } + notify: restart rsyslog + when: ubtu18cis_rsyslog_ansible_managed + when: + - ubtu18cis_rule_4_2_1_3 + tags: + - level1-server + - level1-workstation + - manual + - patch + - rule_4.2.1.3 + - rsyslog + +- name: "4.2.1.4 | PATCH | Ensure rsyslog default file permissions configured" + ansible.builtin.lineinfile: + path: /etc/rsyslog.conf + regexp: '^\$FileCreateMode|^#\$FileCreateMode' + line: '$FileCreateMode 0640' + notify: restart rsyslog + when: + - ubtu18cis_rule_4_2_1_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.2.1.4 + - rsyslog + +- name: "4.2.1.5 | PATCH | Ensure rsyslog is configured to send logs to a remote log host" + ansible.builtin.blockinfile: + path: /etc/rsyslog.conf + block: | + ##Enable sending of logs over TCP add the following line: + *.* @@{{ ubtu18cis_remote_log_server }} + insertafter: EOF + when: + - ubtu18cis_rule_4_2_1_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.2.1.5 + - rsyslog + +- name: "4.2.1.6 | PATCH | Ensure remote rsyslog messages are only accepted on designated log hosts" + block: + - name: "4.2.1.6 | PATCH | Ensure remote rsyslog messages are only accepted on designated log hosts | When not a log host" + ansible.builtin.replace: + path: /etc/rsyslog.conf + regexp: '({{ item }})' + replace: '#\1' + with_items: + - '^(\$ModLoad)' + - '^(\$InputTCPServerRun)' + notify: restart rsyslog + when: not ubtu18cis_system_is_log_server + + - name: "4.2.1.6 | PATCH | Ensure remote rsyslog messages are only accepted on designated log hosts | When a log server" + ansible.builtin.lineinfile: + path: /etc/rsyslog.conf + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + with_items: + - { regexp: '^\$ModLoad|^#\$ModLoad', line: '$ModLoad imtcp' } + - { regexp: '^\$InputTCPServerRun|^#\$InputTCPServerRun', line: '$InputTCPServerRun 514' } + notify: restart rsyslog + when: ubtu18cis_system_is_log_server + when: + - ubtu18cis_rule_4_2_1_6 + tags: + - level1-server + - level1-workstation + - manual + - patch + - rule_4.2.1.6 + - rsyslog diff --git a/tasks/section_4/cis_4.2.2.x.yml b/tasks/section_4/cis_4.2.2.x.yml new file mode 100644 index 0000000..8e11dd5 --- /dev/null +++ b/tasks/section_4/cis_4.2.2.x.yml @@ -0,0 +1,51 @@ +--- +- name: "4.2.2.1 | PATCH | Ensure journald is configured to send logs to rsyslog" + ansible.builtin.lineinfile: + path: /etc/systemd/journald.conf + regexp: '^ForwardToSyslog|^#ForwardToSyslog' + line: 'ForwardToSyslog=yes' + insertafter: '\[Journal\]' + when: + - ubtu18cis_rule_4_2_2_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.2.2.1 + - rsyslog + - journald + +- name: "4.2.2.2 | PATCH | Ensure journald is configured to compress large log files" + ansible.builtin.lineinfile: + path: /etc/systemd/journald.conf + regexp: '^Compress|^#Compress' + line: 'Compress=yes' + insertafter: '\[Journal\]' + when: + - ubtu18cis_rule_4_2_2_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.2.2.2 + - rsyslog + - journald + +- name: "4.2.2.3 | PATCH | Ensure journald is configured to write logfiles to persistent disk" + ansible.builtin.lineinfile: + path: /etc/systemd/journald.conf + regexp: '^Storage|^#Storage' + line: 'Storage=persistent' + insertafter: '\[Journal\]' + when: + - ubtu18cis_rule_4_2_2_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.2.2.3 + - rsyslog + - journald diff --git a/tasks/section_4/cis_4.2.3.x.yml b/tasks/section_4/cis_4.2.3.x.yml new file mode 100644 index 0000000..2ac0474 --- /dev/null +++ b/tasks/section_4/cis_4.2.3.x.yml @@ -0,0 +1,15 @@ +--- +- name: "4.2.3 | PATCH | Ensure permissions on all logfiles are configured" + ansible.builtin.shell: find /var/log -type f -exec chmod g-wx,o-rwx "{}" + -o -type d -exec chmod g-w,o-rwx "{}" + + changed_when: ubtu18cis_4_2_3_logfile_perms_status.rc == 0 + register: ubtu18cis_4_2_3_logfile_perms_status + when: + - ubtu18cis_rule_4_2_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.2.3 + - logfiles + - permissions diff --git a/tasks/section_4/cis_4.3.yml b/tasks/section_4/cis_4.3.yml new file mode 100644 index 0000000..a7ef86d --- /dev/null +++ b/tasks/section_4/cis_4.3.yml @@ -0,0 +1,25 @@ +--- +- name: "4.3 | PATCH | Ensure logrotate is configured" + block: + - name: "4.3 | PATCH | Ensure logrotate is configured | Get logrotate files" + ansible.builtin.find: + paths: /etc/logrotate.d/ + register: ubtu18cis_4_3_logrotate_files + + - name: "4.3 | PATCH | Ensure logrotate is configured | Set rotation configurations" + ansible.builtin.replace: + path: "{{ item.path }}" + regexp: '^(\s*)(daily|weekly|monthly|yearly)$' + replace: "\\1{{ ubtu18cis_logrotate }}" + with_items: + - "{{ ubtu18cis_4_3_logrotate_files.files }}" + - { path: "/etc/logrotate.conf" } + when: + - ubtu18cis_rule_4_3 + tags: + - level1-server + - level1-workstation + - manual + - patch + - rule_4.3 + - logrotate diff --git a/tasks/section_4/cis_4.4.yml b/tasks/section_4/cis_4.4.yml new file mode 100644 index 0000000..6333180 --- /dev/null +++ b/tasks/section_4/cis_4.4.yml @@ -0,0 +1,15 @@ +--- +- name: "4.4 | PATCH | Ensure logrotate assigns appropriate permissions" + ansible.builtin.lineinfile: + path: /etc/logrotate.conf + regex: '^create' + line: 'create 0640 root utmp' + when: + - ubtu18cis_rule_4_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.4 + - logrotate diff --git a/tasks/section_4/main.yml b/tasks/section_4/main.yml new file mode 100644 index 0000000..d1032c2 --- /dev/null +++ b/tasks/section_4/main.yml @@ -0,0 +1,32 @@ +--- +- name: "SECTION | 4.1.1 | Configure System Accounting" + ansible.builtin.import_tasks: + file: cis_4.1.1.x.yml + +- name: "SECTION | 4.1.2 | Configure Data Retention" + ansible.builtin.import_tasks: + file: cis_4.1.2.x.yml + +- name: "SECTION | 4.1.x | Login Settings" + ansible.builtin.import_tasks: + file: cis_4.1.x.yml + +- name: "SECTION | 4.2.1 | Configure rsyslog" + ansible.builtin.import_tasks: + file: cis_4.2.1.x.yml + +- name: "SECTION | 4.2.2 | Configure journald" + ansible.builtin.import_tasks: + file: cis_4.2.2.x.yml + +- name: "SECTION | 4.2.3 | Ensure permissions on all logfiles are configured" + ansible.builtin.import_tasks: + file: cis_4.2.3.x.yml + +- name: "SECTION | 4.3 | Ensure logrotate is configured" + ansible.builtin.import_tasks: + file: cis_4.3.yml + +- name: "SECTION | 4.4 | Ensure assigns appropriate permissions" + ansible.builtin.import_tasks: + file: cis_4.4.yml diff --git a/tasks/section_5/cis_5.1.x.yml b/tasks/section_5/cis_5.1.x.yml new file mode 100644 index 0000000..54f16c6 --- /dev/null +++ b/tasks/section_5/cis_5.1.x.yml @@ -0,0 +1,168 @@ +--- +- name: "5.1.1 | PATCH | Ensure cron daemon is enabled" + ansible.builtin.service: + name: cron + enabled: true + when: + - ubtu18cis_rule_5_1_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.1 + - cron + +- name: "5.1.2 | PATCH | Ensure permissions on /etc/crontab are configured" + ansible.builtin.file: + path: /etc/crontab + owner: root + group: root + mode: 0600 + when: + - ubtu18cis_rule_5_1_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.2 + - cron + +- name: "5.1.3 | PATCH | Ensure permissions on /etc/cron.hourly are configured" + ansible.builtin.file: + path: /etc/cron.hourly + owner: root + group: root + mode: 0700 + when: + - ubtu18cis_rule_5_1_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.3 + - cron + +- name: "5.1.4 | PATCH | Ensure permissions on /etc/cron.daily are configured" + ansible.builtin.file: + path: /etc/cron.daily + owner: root + group: root + mode: 0700 + when: + - ubtu18cis_rule_5_1_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.4 + - cron + +- name: "5.1.5 | PATCH | Ensure permissions on /etc/cron.weekly are configured" + ansible.builtin.file: + path: /etc/cron.weekly + owner: root + group: root + mode: 0700 + when: + - ubtu18cis_rule_5_1_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.5 + - cron + +- name: "5.1.6 | PATCH | Ensure permissions on /etc/cron.monthly are configured" + ansible.builtin.file: + path: /etc/cron.monthly + owner: root + group: root + mode: 0700 + when: + - ubtu18cis_rule_5_1_6 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.6 + - cron + +- name: "5.1.7 | PATCH | Ensure permissions on /etc/cron.d are configured" + ansible.builtin.file: + path: /etc/cron.d + owner: root + group: root + mode: 0700 + when: + - ubtu18cis_rule_5_1_7 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.7 + - cron + +- name: "5.1.8 | PATCH | Ensure cron is restricted to authorized users" + block: + - name: "5.1.8 | PATCH | Ensure cron is restricted to authorized users | Remove deny configs" + ansible.builtin.file: + path: /etc/cron.deny + state: absent + + - name: 5.1.8 | PATCH | Ensure cron is restricted to authorized users | confirm if file exists + ansible.builtin.stat: + path: /etc/cron.allow + register: ubtu18cis_5_1_8 + + - name: "5.1.8 | PATCH | Ensure cron is restricted to authorized users | Create allow files" + ansible.builtin.file: + path: /etc/cron.allow + owner: root + group: root + mode: 0640 + state: "{{ ( ubtu18cis_5_1_8.stat.exists ) | ternary('file', 'touch') }}" + when: + - ubtu18cis_rule_5_1_8 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.8 + - cron + +- name: "5.1.9 | PATCH | Ensure at is restricted to authorized users" + block: + - name: "5.1.9 | PATCH | Ensure at is restricted to authorized users | Remove deny configs" + ansible.builtin.file: + path: /etc/at.deny + state: absent + + - name: 5.1.8 | PATCH | Ensure at is restricted to authorized users | confirm if file exists + ansible.builtin.stat: + path: /etc/at.allow + register: ubtu18cis_5_1_9 + + - name: "5.1.9 | PATCH | Ensure at is restricted to authorized users | Create allow files" + ansible.builtin.file: + path: /etc/at.allow + owner: root + group: root + mode: 0640 + state: "{{ ( ubtu18cis_5_1_9.stat.exists ) | ternary('file', 'touch') }}" + when: + - ubtu18cis_rule_5_1_9 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.9 + - cron diff --git a/tasks/section_5/cis_5.2.x.yml b/tasks/section_5/cis_5.2.x.yml new file mode 100644 index 0000000..84befba --- /dev/null +++ b/tasks/section_5/cis_5.2.x.yml @@ -0,0 +1,46 @@ +--- +- name: "5.2.1 | PATCH | Ensure sudo is installed" + ansible.builtin.package: + name: "{{ ubtu18cis_sudo_package }}" + state: present + when: + - ubtu18cis_rule_5_2_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.2.1 + - sudo + +- name: "5.2.2 | PATCH | Ensure sudo commands use pty" + ansible.builtin.lineinfile: + path: /etc/sudoers + regexp: '^Defaults use_' + line: 'Defaults use_pty' + insertafter: '^Defaults' + when: + - ubtu18cis_rule_5_2_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.2.2 + - sudo + +- name: "5.2.3 | PATCH | Ensure sudo log file exists" + ansible.builtin.lineinfile: + path: /etc/sudoers + regexp: '^Defaults logfile' + line: 'Defaults logfile="{{ ubtu18cis_sudo_logfile }}"' + insertafter: '^Defaults' + when: + - ubtu18cis_rule_5_2_3 + tags: + - level1-server + - level1-workstation + - scored + - patch + - rule_5.2.3 + - sudo diff --git a/tasks/section_5/cis_5.3.x.yml b/tasks/section_5/cis_5.3.x.yml new file mode 100644 index 0000000..8434ba5 --- /dev/null +++ b/tasks/section_5/cis_5.3.x.yml @@ -0,0 +1,413 @@ +--- +- name: "5.3.1 | PATCH | Ensure permissions on /etc/ssh/sshd_config are configured" + ansible.builtin.file: + path: /etc/ssh/sshd_config + owner: root + group: root + mode: 0600 + when: + - ubtu18cis_rule_5_3_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.1 + - ssh + +- name: "5.3.2 | PATCH | Ensure permissions on SSH private host key files are configured" + block: + - name: "5.3.2 | AUDIT | Ensure permissions on SSH private host key files are configured | Find ssh_host private keys" + ansible.builtin.find: + paths: /etc/ssh + patterns: 'ssh_host_*_key' + register: ubtu18cis_5_3_2_ssh_host_priv_keys + + - name: "5.3.2 | PATCH | Ensure permissions on SSH private host key files are configured | Set permissions" + ansible.builtin.file: + path: "{{ item.path }}" + owner: root + group: root + mode: 0600 + with_items: + - "{{ ubtu18cis_5_3_2_ssh_host_priv_keys.files }}" + when: + - ubtu18cis_rule_5_3_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.2 + - ssh + +- name: "5.3.3 | PATCH | Ensure permissions on SSH public host key files are configured" + block: + - name: "5.3.3 | PATCH | Ensure permissions on SSH public host key files are configured | Find ssh_host public keys" + ansible.builtin.find: + paths: /etc/ssh + patterns: 'ssh_host_*_key.pub' + register: ubtu18cis_5_3_3_ssh_host_pub_keys + + - name: "5.3.3 | PATCH | Ensure permissions on SSH public host key files are configured | Set permissions" + ansible.builtin.file: + path: "{{ item.path }}" + owner: root + group: root + mode: 0644 + with_items: + - "{{ ubtu18cis_5_3_3_ssh_host_pub_keys.files }}" + when: + - ubtu18cis_rule_5_3_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.3 + - ssh + +- name: "5.3.4 | PATCH | Ensure SSH access is limited" + block: + - name: "5.3.4 | PATCH | Ensure SSH access is limited | Add allowed users" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^AllowUsers|^#AllowUsers' + line: 'AllowUsers {{ ubtu18cis_sshd.allow_users }}' + notify: restart sshd + when: "ubtu18cis_sshd['allow_users']|default('') != ''" + + - name: "5.3.4 | PATCH | Ensure SSH access is limited | Add allowed groups" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^AllowGroups|^#AllowGroups' + line: 'AllowGroups {{ ubtu18cis_sshd.allow_groups }}' + notify: restart sshd + when: "ubtu18cis_sshd['allow_groups']|default('') != ''" + + - name: "5.3.4 | PATCH | Ensure SSH access is limited | Add deny users" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^DenyUsers|^#DenyUsers' + line: 'DenyUsers {{ ubtu18cis_sshd.deny_users }}' + notify: restart sshd + when: "ubtu18cis_sshd['deny_users']|default('') != ''" + + - name: "5.3.4 | PATCH | Ensure SSH access is limited | Add deny groups" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^DenyGroups|^#DenyGroups' + line: 'DenyGroups {{ ubtu18cis_sshd.deny_groups }}' + notify: restart sshd + when: "ubtu18cis_sshd['deny_groups']|default('') != ''" + when: + - ubtu18cis_rule_5_3_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.4 + - ssh + +- name: "5.3.5 | PATCH | Ensure SSH LogLevel is appropriate" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^LogLevel|^#LogLevel' + line: 'LogLevel {{ ubtu18cis_sshd.log_level }}' + insertafter: '^# Logging' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.5 + - ssh + +- name: "5.3.6 | PATCH | Ensure SSH X11 forwarding is disabled" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^X11Forwarding|^#X11Forwarding' + line: 'X11Forwarding no' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_6 + tags: + - level2-server + - level1-workstation + - automated + - patch + - rule_5.3.6 + - ssh + +- name: "5.3.7 | PATCH | Ensure SSH MaxAuthTries is set to 4 or less" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^MaxAuthTries|^#MaxAuthTries' + line: 'MaxAuthTries {{ ubtu18cis_sshd.max_auth_tries }}' + insertafter: '^# Authentication' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_7 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.7 + - ssh + +- name: "5.3.8 | PATCH | Ensure SSH IgnoreRhosts is enabled" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^IgnoreRhosts|^#IgnoreRhosts' + line: 'IgnoreRhosts yes' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_8 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.8 + - ssh + +- name: "5.3.9 | PATCH | Ensure SSH HostbasedAuthentication is disabled" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^HostbasedAuthentication|^#HostbasedAuthentication' + line: 'HostbasedAuthentication no' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_9 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.9 + - ssh + +- name: "5.3.10 | PATCH | Ensure SSH root login is disabled" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^PermitRootLogin|^#PermitRootLogin' + line: 'PermitRootLogin no' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_10 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.10 + - ssh + +- name: "5.3.11 | PATCH | Ensure SSH PermitEmptyPasswords is disabled" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^PermitEmptyPasswords|^#PermitEmptyPasswords' + line: 'PermitEmptyPasswords no' + insertafter: '# To disable tunneled clear text passwords' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_11 + tags: + - level1-server + - level1-workstation + - scored + - patch + - rule_5.3.11 + - ssh + +- name: "5.3.12 | PATCH | Ensure SSH PermitUserEnvironment is disabled" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^PermitUserEnvironment|^#PermitUserEnvironment' + line: 'PermitUserEnvironment no' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_12 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.12 + - ssh + +- name: "5.3.13 | PATCH | Ensure only strong Ciphers are used" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^Ciphers|^#Ciphers' + line: 'Ciphers {{ ubtu18cis_sshd.ciphers }}' + insertafter: '^# Ciphers and keying' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_13 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.13 + - ssh + +- name: "5.3.14 | PATCH | Ensure only strong MAC algorithms are used" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^MACs|^#MACs' + line: 'MACs {{ ubtu18cis_sshd.macs }}' + insertafter: '^# Ciphers and keying' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_14 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.14 + - ssh + +- name: "5.3.15 | PATCH | Ensure only strong Key Exchange algorithms are used" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^KexAlgorithms|^#KexAlgorithms' + line: 'KexAlgorithms {{ ubtu18cis_sshd.kex_algorithms }}' + insertafter: '^# Ciphers and keying' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_15 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.15 + - ssh + +- name: "5.3.16 | PATCH | Ensure SSH Idle Timeout Interval is configured" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + with_items: + - { regexp: '^ClientAliveInterval|^#ClientAliveInterval', line: 'ClientAliveInterval {{ ubtu18cis_sshd.client_alive_interval }}' } + - { regexp: '^ClientAliveCountMax|^#ClientAliveCountMax', line: 'ClientAliveCountMax {{ ubtu18cis_sshd.client_alive_count_max }}' } + notify: restart sshd + when: + - ubtu18cis_rule_5_3_16 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.16 + - sshd + +- name: "5.3.17 | PATCH | Ensure SSH LoginGraceTime is set to one minute or less" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^LoginGraceTime|^#LoginGraceTime' + line: 'LoginGraceTime {{ ubtu18cis_sshd.login_grace_time }}' + insertafter: '^# Authentication' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_17 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.17 + - ssh + +- name: "5.3.18 | PATCH | Ensure SSH warning banner is configured" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^Banner|^#Banner' + line: Banner /etc/issue.net + insertafter: '^# no default banner path' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_18 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.18 + - ssh + +- name: "5.3.19 | PATCH | Ensure SSH PAM is enabled" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^UsePAM|^#UsePAM' + line: 'UsePAM yes' + insertafter: '^# and ChallengeResponseAuthentication' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_19 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.19 + - ssh + - pam + +- name: "5.3.20 | PATCH | Ensure SSH AllowTcpForwarding is disabled" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^AllowTcpForwarding|^#AllowTcpForwarding' + line: 'AllowTcpForwarding no' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_20 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.3.20 + - ssh + +- name: "5.3.21 | PATCH | Ensure SSH MaxStartups is configured" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^MaxStartups|^#MaxStartups' + line: 'MaxStartups 10:30:60' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_21 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.21 + - ssh + +- name: "5.3.22 | PATCH | Ensure SSH MaxSessions is limited" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^MaxSessions|^#MaxSessions' + line: 'MaxSessions {{ ubtu18cis_sshd.max_sessions }}' + insertafter: '^# Authentication' + notify: restart sshd + when: + - ubtu18cis_rule_5_3_22 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.3.22 + - ssh diff --git a/tasks/section_5/cis_5.4.x.yml b/tasks/section_5/cis_5.4.x.yml new file mode 100644 index 0000000..09f9823 --- /dev/null +++ b/tasks/section_5/cis_5.4.x.yml @@ -0,0 +1,184 @@ +--- +- name: "5.4.1 | PATCH | Ensure password creation requirements are configured" + block: + - name: "5.4.1 | PATCH | Ensure password creation requirements are configured | Install pam_pwquality module" + ansible.builtin.package: + name: libpam-pwquality + state: present + + - name: "5.4.1 | PATCH | Ensure password creation requirements are configured | Add minlen" + ansible.builtin.lineinfile: + path: /etc/security/pwquality.conf + regexp: '^minlen|^# minlen' + line: minlen = 14 + + - name: "5.4.1 | PATCH | Ensure password creation requirements are configured | Add minclass" + ansible.builtin.lineinfile: + path: /etc/security/pwquality.conf + regexp: '^minclass|^# minclass' + line: 'minclass = 4' + + - name: "5.4.1 | AUDIT | Ensure password creation requirements are configured | Confirm pwquality module in common-password" + ansible.builtin.shell: grep 'password.*requisite.*pam_pwquality.so' /etc/pam.d/common-password + changed_when: false + failed_when: false + register: ubtu18cis_5_4_1_pam_pwquality_state + + - name: "5.4.1 | PATCH | Ensure password creation requirements are configured | Set retry to 3 if pwquality exists" + community.general.pamd: + name: common-password + type: password + control: requisite + module_path: pam_pwquality.so + module_arguments: 'retry=3' + state: args_present + when: ubtu18cis_5_4_1_pam_pwquality_state.stdout | length > 0 + + - name: "5.4.1 | PATCH | Ensure password creation requirements are configured | Set retry to 3 if pwquality does not exist" + community.general.pamd: + name: common-password + type: password + control: required + module_path: pam_permit.so + new_type: password + new_control: requisite + new_module_path: pam_pwquality.so + module_arguments: 'retry=3' + state: after + when: ubtu18cis_5_4_1_pam_pwquality_state.stdout | length == 0 + when: + - ubtu18cis_rule_5_4_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.4.1 + - pam + +# ------------- +# ------------- +# There is a bug in pam_tally2.so where the use of the audit keyword may log credentials in the case of user error during authentication. +# To work around this bug the CIS documentation has you setting pam_tally2 to the account section. +# Once bug is fixed please set pam_tally2 to the auth sections. We have those commented out in the task +# ------------- +# ------------- + +# ------------- +# ------------- +# figure out why pam_deny kills vagrant user +# ------------- +# ------------- +- name: "5.4.2 | PATCH | Ensure lockout for failed password attempts is configured" + block: + - name: "5.4.2 | AUDIT | Ensure lockout for failed password attempts is configured | Confirm pam_tally2.so module in common-password" + ansible.builtin.shell: grep 'auth.*required.*pam_tally2.so' /etc/pam.d/common-auth + changed_when: false + failed_when: false + register: ubtu18cis_5_4_2_pam_tally2_state + + - name: "5.4.2 | PATCH | Ensure lockout for failed password attempts is configured | Set pam_tally2.so settings if exists" + community.general.pamd: + name: common-auth + type: auth + control: required + module_path: pam_tally2.so + module_arguments: 'onerr=fail + audit + silent + deny=5 + unlock_time=900' + when: ubtu18cis_5_4_2_pam_tally2_state.stdout | length > 0 + + - name: "5.4.2 | PATCH | Ensure lockout for failed password attempts is configured | Set pam_tally2.so settings if does not exist" + ansible.builtin.lineinfile: + path: /etc/pam.d/common-auth + line: 'auth required pam_tally2.so onerr=fail audit silent deny=5 unlock_time=900' + insertafter: '^# end of pam-auth-update config' + when: ubtu18cis_5_4_2_pam_tally2_state.stdout | length == 0 + + - name: "5.4.2 | PATCH | Ensure lockout for failed password attempts is configured | Set pam_deny.so and pam_tally2.so" + ansible.builtin.lineinfile: + path: /etc/pam.d/common-account + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + insertafter: '^# end of pam-auth-update config' + with_items: + - { regexp: '^account.*required.*pam_tally2.so', line: 'account required pam_tally2.so' } + when: + - ubtu18cis_rule_5_4_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.4.2 + - pamd + +- name: "5.4.3 | PATCH | Ensure password reuse is limited" + block: + - name: "5.4.3 | AUDIT | Ensure password reuse is limited | Confirm pam_pwhistory.so in common-password" + ansible.builtin.shell: grep 'password.*required.*pam_pwhistory.so' /etc/pam.d/common-password + changed_when: false + failed_when: false + register: ubtu18cis_5_4_3_pam_pwhistory_state + + - name: "5.4.3 | PATCH | Ensure password reuse is limited | Set remember value if pam_pwhistory exists" + community.general.pamd: + name: common-password + type: password + control: required + module_path: pam_pwhistory.so + module_arguments: 'remember={{ ubtu18cis_pamd_pwhistory_remember }}' + state: args_present + when: ubtu18cis_5_4_3_pam_pwhistory_state.stdout | length > 0 + + - name: "5.4.3 | PATCH | Ensure password reuse is limited | Set remember value if pam_pwhistory does no exist" + ansible.builtin.lineinfile: + path: /etc/pam.d/common-password + line: 'password required pam_pwhistory.so remember={{ ubtu18cis_pamd_pwhistory_remember }}' + insertbefore: 'pam_unix.so' + when: ubtu18cis_5_4_3_pam_pwhistory_state.stdout | length == 0 + when: + - ubtu18cis_rule_5_4_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.4.3 + - pamd + +- name: "5.4.4 | PATCH | Ensure password hashing algorithm is SHA-512" + block: + - name: "5.4.4 | PATCH | Ensure password hashing algorithm is SHA-512 | Confirm pam_unix.so" + ansible.builtin.shell: grep -E '^\s*password\s+(\S+\s+)+pam_unix\.so\s+(\S+\s+)*sha512\s*(\S+\s*)*(\s+#.*)?$' /etc/pam.d/common-password + changed_when: false + failed_when: false + register: ubtu18cis_5_4_4_pam_unix_state + + - name: "5.4.4 | PATCH | Ensure password hashing algorithm is SHA-512 | Set hashing if pam_unix.so exists" + community.general.pamd: + name: common-password + type: password + control: '[success=1 default=ignore]' + module_path: pam_unix.so + module_arguments: sha512 + state: args_present + when: ubtu18cis_5_4_4_pam_unix_state.stdout | length > 0 + + - name: "5.4.4 | PATCH | Ensure password hashing algorithm is SHA-512 | Set hashing if pam_unix.so does not exist" + ansible.builtin.lineinfile: + path: /etc/pam.d/common-password + line: 'password [success=1 default=ignore] pam_unix.so sha512' + insertbefore: '^# end of pam-auth-update config' + when: ubtu18cis_5_4_4_pam_unix_state.stdout | length == 0 + when: + - ubtu18cis_rule_5_4_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.4.4 + - pamd diff --git a/tasks/section_5/cis_5.5.1.x.yml b/tasks/section_5/cis_5.5.1.x.yml new file mode 100644 index 0000000..62bfc3f --- /dev/null +++ b/tasks/section_5/cis_5.5.1.x.yml @@ -0,0 +1,146 @@ +--- +- name: "5.5.1.1 | PATCH | Ensure minimum days between password changes is configured" + block: + - name: "5.5.1.1 | PATCH | Ensure minimum days between password changes is configured | Set /etc/login.defs PASS_MIN_DAYS" + ansible.builtin.lineinfile: + path: /etc/login.defs + regexp: '^PASS_MIN_DAYS|^#PASS_MIN_DAYS' + line: 'PASS_MIN_DAYS {{ ubtu18cis_pass.min_days }}' + + - name: "5.5.1.1 | PATCH | Ensure minimum days between password changes is configured | Set existing users PASS_MIN_DAYS" + ansible.builtin.shell: chage --mindays {{ ubtu18cis_pass.min_days }} {{ item }} + failed_when: false + with_items: + - "{{ ubtu18cis_passwd| selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" + when: ubtu18cis_disruption_high + when: + - ubtu18cis_rule_5_5_1_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.5.1.1 + - user + - login + +- name: "5.5.1.2 | PATCH | Ensure password expiration is 365 days or less" + block: + - name: "5.5.1.2 | PATCH | Ensure password expiration is 365 days or less | Set /etc/login.defs PASS_MAX_DAYS" + ansible.builtin.lineinfile: + path: /etc/login.defs + regexp: '^PASS_MAX_DAYS|^#PASS_MAX_DAYS' + line: 'PASS_MAX_DAYS {{ ubtu18cis_pass.max_days }}' + insertafter: '# Password aging controls' + + - name: "5.5.1.2 | PATCH | Ensure password expiration is 365 days or less | Set existing users PASS_MAX_DAYS" + ansible.builtin.shell: chage --maxdays {{ ubtu18cis_pass.max_days }} {{ item }} + failed_when: false + with_items: + - "{{ ubtu18cis_passwd| selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" + when: ubtu18cis_disruption_high + when: + - ubtu18cis_rule_5_5_1_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.5.1.2 + - user + - login + +- name: "5.5.1.3 | PATCH | Ensure password expiration warning days is 7 or more" + block: + - name: "5.5.1.3 | PATCH | Ensure password expiration warning days is 7 or more | Set /etc/login.defs PASS_WARN_AGE" + ansible.builtin.lineinfile: + path: /etc/login.defs + regexp: '^PASS_WARN_AGE|^#PASS_WARN_AGE' + line: 'PASS_WARN_AGE {{ ubtu18cis_pass.warn_age }}' + + - name: "5.5.1.3 | PATCH | Ensure password expiration warning days is 7 or more | Set existing users PASS_WARN_AGE" + ansible.builtin.shell: chage --warndays {{ ubtu18cis_pass.warn_age }} {{ item }} + failed_when: false + with_items: + - "{{ ubtu18cis_passwd| selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" + when: ubtu18cis_disruption_high + when: + - ubtu18cis_rule_5_5_1_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.5.1.3 + - user + - login + +- name: "5.5.1.4 | PATCH | Ensure inactive password lock is 30 days or less" + block: + - name: "5.5.1.4 | PATCH | Ensure inactive password lock is 30 days or less | Set inactive period for new users" + ansible.builtin.shell: useradd -D -f {{ ubtu18cis_pass.inactive }} + failed_when: false + + - name: "5.5.1.4 | PATCH | Ensure inactive password lock is 30 days or less | Set inactive period for existing users" + ansible.builtin.shell: chage --inactive {{ ubtu18cis_pass.inactive }} {{ item }} + failed_when: false + with_items: + - "{{ ubtu18cis_passwd| selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" + when: ubtu18cis_disruption_high + when: + - ubtu18cis_rule_5_5_1_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.5.1.4 + - user + - login + +- name: "5.5.1.5 | PATCH | Ensure all users last password change date is in the past" + block: + - name: "5.5.1.5 | PATCH | Ensure all users last password change date is in the past | Get current date in Unix Time" + ansible.builtin.shell: echo $(($(date --utc --date "$1" +%s)/86400)) + changed_when: false + failed_when: false + register: ubtu18cis_5_5_1_5_current_time + + - name: "5.5.1.5 | PATCH | Ensure all users last password change date is in the past | Get list of users with last changed PW date in future" + ansible.builtin.shell: "cat /etc/shadow | awk -F: '{if($3>{{ ubtu18cis_5_5_1_5_current_time.stdout }})print$1}'" + changed_when: false + failed_when: false + register: ubtu18cis_5_5_1_5_user_list + + - name: "5.5.1.5 | PATCH | Ensure all users last password change date is in the past | Warn about users" + ansible.builtin.debug: + msg: + - "Warning!! The following accounts have the last PW change date in the future" + - "{{ ubtu18cis_5_5_1_5_user_list.stdout_lines }}" + when: ubtu18cis_5_5_1_5_user_list.stdout | length > 0 + + - name: "5.5.1.5 | PATCH | Ensure all users last password change date is in the past | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '5.5.1.5' + when: ubtu18cis_5_5_1_5_user_list.stdout | length > 0 + + - name: "5.5.1.5 | PATCH | Ensure all users last password change date is in the past | Lock accounts with furtre PW changed dates" + ansible.builtin.shell: passwd --expire {{ item }} + failed_when: false + with_items: + - "{{ ubtu18cis_5_5_1_5_user_list.stdout_lines }}" + when: + - ubtu18cis_disruption_high + - ubtu18cis_5_5_1_5_user_list.stdout | length > 0 + when: + - ubtu18cis_rule_5_5_1_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.5.1.5 + - user + - login diff --git a/tasks/section_5/cis_5.5.x.yml b/tasks/section_5/cis_5.5.x.yml new file mode 100644 index 0000000..d1f560e --- /dev/null +++ b/tasks/section_5/cis_5.5.x.yml @@ -0,0 +1,101 @@ +--- +- name: "5.5.2 | PATCH | Ensure system accounts are secured" + block: + - name: "5.5.2 | PATCH | Ensure system accounts are secured | Set system accounts to login" + ansible.builtin.user: + name: "{{ item }}" + shell: /sbin/nologin + with_items: + - "{{ ubtu18cis_passwd | selectattr('uid', '<', 1000) | map(attribute='id') | list }}" + when: + - item != "root" + - item != "sync" + - item != "shutdown" + - item != "halt" + + - name: "5.5.2 | PATCH | Ensure system accounts are secured | Lock non-root system accounts" + ansible.builtin.user: + name: "{{ item }}" + password_lock: true + with_items: + - "{{ ubtu18cis_passwd | selectattr('uid', '<', 1000) | map(attribute='id') | list }}" + when: + - item != "root" + when: + - ubtu18cis_rule_5_5_2 + - ubtu18cis_disruption_high + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.5.2 + - user + - system + +- name: "5.5.3 | PATCH | Ensure default group for the root account is GID 0" + block: + - name: "5.5.3 | PATCH | Ensure default group for the root account is GID 0 | Set root group to GUID 0" + ansible.builtin.group: + name: root + gid: 0 + + - name: "5.5.3 | PATCH | Ensure default group for the root account is GID 0 | Set root user to root group" + ansible.builtin.user: + name: root + group: root + when: + - ubtu18cis_rule_5_5_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.5.3 + - user + - system + +- name: "5.5.4 | PATCH | Ensure default user umask is 027 or more restrictive" + ansible.builtin.lineinfile: + path: "{{ item }}" + regexp: '^umask ' + line: "umask {{ ubtu18cis_bash_umask }}" + with_items: + - /etc/bash.bashrc + - /etc/profile + when: + - ubtu18cis_rule_5_5_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.5.4 + - user + +- name: "5.5.5 | PATCH | Ensure default user shell timeout is 900 seconds or less" + ansible.builtin.blockinfile: + create: true + mode: 0644 + dest: "{{ item.dest }}" + state: "{{ item.state }}" + marker: "# {mark} ANSIBLE MANAGED" + block: | + # Set session timeout - CIS ID 5.5.5 + # only set TMOUT if it isn't set yet to avoid a shell error + : TMOUT={{ ubtu18cis_shell_session_timeout.timeout }} + readonly TMOUT + export TMOUT + with_items: + - { dest: "{{ ubtu18cis_shell_session_timeout.file }}", state: present } + - { dest: /etc/profile, state: "{{ (ubtu18cis_shell_session_timeout.file == '/etc/profile') | ternary('present', 'absent') }}" } + - { dest: /etc/bash.bashrc, state: present } + when: + - ubtu18cis_rule_5_5_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.5.5 + - user diff --git a/tasks/section_5/cis_5.6.yml b/tasks/section_5/cis_5.6.yml new file mode 100644 index 0000000..61c5257 --- /dev/null +++ b/tasks/section_5/cis_5.6.yml @@ -0,0 +1,30 @@ +--- +- name: "5.6 | AUDIT | Ensure root login is restricted to system console" + block: + - name: "5.6 | AUDIT | Ensure root login is restricted to system console | Get list of all terminals" + ansible.builtin.shell: cat /etc/securetty + changed_when: false + failed_when: false + register: ubtu18cis_5_6_terminal_list + + - name: "5.6 | AUDIT | Ensure root login is restricted to system console | Message out list" + ansible.builtin.debug: + msg: + - "Warning!! Below is the list of consoles with root login access" + - "Please review for any consoles that are not in a physically secure location" + - "{{ ubtu18cis_5_6_terminal_list.stdout_lines }}" + + - name: "5.6 | AUDIT | Ensure root login is restricted to system console | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '5.6' + when: + - ubtu18cis_rule_5_6 + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_5.6 + - user diff --git a/tasks/section_5/cis_5.7.yml b/tasks/section_5/cis_5.7.yml new file mode 100644 index 0000000..da1e6cc --- /dev/null +++ b/tasks/section_5/cis_5.7.yml @@ -0,0 +1,39 @@ +--- +- name: "5.7 | PATCH | Ensure access to the su command is restricted" + block: + - name: "5.7 | PATCH | Ensure access to the su command is restricted | Check for pam_wheel.so module" + ansible.builtin.shell: grep '^auth.*required.*pam_wheel' /etc/pam.d/su + changed_when: false + failed_when: false + register: ubtu18cis_5_7_pam_wheel_status + + - name: "5.7 | PATCH | Ensure access to the su command is restricted | Create empty sugroup" + ansible.builtin.group: + name: "{{ ubtu18cis_su_group }}" + + - name: "5.7 | PATCH | Ensure access to the su command is restricted | Set pam_wheel if exists" + community.general.pamd: + name: su + type: auth + control: required + module_path: pam_wheel.so + module_arguments: 'use_uid group={{ ubtu18cis_su_group }}' + when: ubtu18cis_5_7_pam_wheel_status.stdout | length > 0 + + - name: "5.7 | PATCH | Ensure access to the su command is restricted | Set pam_wheel if does not exist" + ansible.builtin.lineinfile: + path: /etc/pam.d/su + regex: (#)?auth\s+required\s+pam_wheel\.so\n|(#)?auth\s+required\s+pam_wheel\.so(.*?)use_uid + line: 'auth required pam_wheel.so use_uid group={{ ubtu18cis_su_group }}' + create: true + insertafter: '(#)?auth\s+sufficient\s+pam_rootok' + when: ubtu18cis_5_7_pam_wheel_status.stdout | length == 0 + when: + - ubtu18cis_rule_5_7 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.7 + - user diff --git a/tasks/section_5/main.yml b/tasks/section_5/main.yml new file mode 100644 index 0000000..cdd97ba --- /dev/null +++ b/tasks/section_5/main.yml @@ -0,0 +1,32 @@ +--- +- name: "SECTION | 5.1 | Configure time-based job schedulers" + ansible.builtin.import_tasks: + file: cis_5.1.x.yml + +- name: "SECTION | 5.2 | Configure sudo" + ansible.builtin.import_tasks: + file: cis_5.2.x.yml + +- name: "SECTION | 5.3 | Configure SSH Server" + ansible.builtin.import_tasks: + file: cis_5.3.x.yml + +- name: "SECTION | 5.4.x | User PAM" + ansible.builtin.import_tasks: + file: cis_5.4.x.yml + +- name: "SECTION | 5.5.1.x | User Accounts and Enironment part 1" + ansible.builtin.import_tasks: + file: cis_5.5.1.x.yml + +- name: "SECTION | 5.5.x | User Accounts and Enironment part 2" + ansible.builtin.import_tasks: + file: cis_5.5.x.yml + +- name: "SECTION | 5.6 | Ensure root login is restricted to system console" + ansible.builtin.import_tasks: + file: cis_5.6.yml + +- name: "SECTION | 5.7 | Ensure access to the su command is restricted" + ansible.builtin.import_tasks: + file: cis_5.7.yml diff --git a/tasks/section_6/cis_6.1.x.yml b/tasks/section_6/cis_6.1.x.yml new file mode 100644 index 0000000..58805cd --- /dev/null +++ b/tasks/section_6/cis_6.1.x.yml @@ -0,0 +1,383 @@ +--- +- name: "6.1.1 | AUDIT | Audit system file permissions" + block: + - name: "6.1.1 | AUDIT | Audit system file permissions | Register package list" + ansible.builtin.shell: dpkg-query -W -f='${binary:Package}\n' + changed_when: false + failed_when: false + register: ubtu18cis_6_1_1_packages + when: + - not ubtu18cis_manual_audit_dpkg + + - name: "6.1.1 | AUDIT | Audit system file permissions | Audit the packages" + ansible.builtin.shell: "dpkg --verify {{ item }}" + changed_when: false + failed_when: false + register: ubtu18cis_6_1_1_packages_audited + with_items: "{{ ubtu18cis_6_1_1_packages.stdout_lines }}" + when: + - not ubtu18cis_manual_audit_dpkg + + - name: "6.1.1 | AUDIT | Audit system file permissions | Merge Package Name And Stdout." + ansible.builtin.set_fact: + ubtu18cis_6_1_1_packages_issues: "{{ merged|default({}) | combine( {item.item: item.stdout} ) }}" + with_items: + - "{{ ubtu18cis_6_1_1_packages_audited.results }}" + when: + - not ubtu18cis_manual_audit_dpkg + - ubtu18cis_6_1_1_packages is defined + - item.stdout | length > 0 + + - name: "6.1.1 | AUDIT | Audit system file permissions | Audited Check Results." + ansible.builtin.debug: + msg: + - "Warning!! The packages below may not have the correct file permissions." + - "Please review the list below and make sure they conform to site policies." + - "{{ ubtu18cis_6_1_1_packages_issues }}" + when: + - not ubtu18cis_manual_audit_dpkg + + - name: "6.1.1 | AUDIT | Audit system file permissions | Message out packages results for review" + ansible.builtin.debug: + msg: + - "Warning!! Please validate system file permissions." + - "You can run dpkg --verify and if nothing is returned the package is installed correctly" + when: ubtu18cis_manual_audit_dpkg + + - name: "6.1.1 | AUDIT | Audit system file permissions | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.1.1' + when: + - ubtu18cis_rule_6_1_1 + tags: + - level2-server + - level2-workstation + - manual + - audit + - rule_6.1.1 + - permissions + +- name: "6.1.2 | PATCH | Ensure permissions on /etc/passwd are configured" + ansible.builtin.file: + path: /etc/passwd + owner: root + group: root + mode: 0644 + when: + - ubtu18cis_rule_6_1_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.2 + - permissions + +- name: "6.1.3 | PATCH | Ensure permissions on /etc/passwd- are configured" + ansible.builtin.file: + path: /etc/passwd- + owner: root + group: root + mode: 0600 + when: + - ubtu18cis_rule_6_1_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.3 + - permissions + +- name: "6.1.4 | PATCH | Ensure permissions on /etc/group are configured" + ansible.builtin.file: + path: /etc/group + owner: root + group: root + mode: 0644 + when: + - ubtu18cis_rule_6_1_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.4 + - permissions + +- name: "6.1.5 | PATCH | Ensure permissions on /etc/group- are configured" + ansible.builtin.file: + path: /etc/group- + owner: root + group: root + mode: 0644 + when: + - ubtu18cis_rule_6_1_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.5 + - permissions + +- name: "6.1.6 | PATCH | Ensure permissions on /etc/shadow are configured" + ansible.builtin.file: + path: /etc/shadow + owner: root + group: shadow + mode: 0640 + when: + - ubtu18cis_rule_6_1_6 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.6 + - permissions + +- name: "6.1.7 | PATCH | Ensure permissions on /etc/shadow- are configured" + ansible.builtin.file: + path: /etc/shadow- + owner: root + group: shadow + mode: 0600 + when: + - ubtu18cis_rule_6_1_7 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.7 + - permissions + +- name: "6.1.8 | PATCH | Ensure permissions on /etc/gshadow are configured" + ansible.builtin.file: + path: /etc/gshadow + owner: root + group: shadow + mode: 0640 + when: + - ubtu18cis_rule_6_1_8 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.8 + - permissions + +- name: "6.1.9 | PATCH | Ensure permissions on /etc/gshadow- are configured" + ansible.builtin.file: + path: /etc/gshadow- + owner: root + group: shadow + mode: 0640 + when: + - ubtu18cis_rule_6_1_9 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.9 + - permissions + +- name: "6.1.10 | PATCH | Ensure no world writable files exist" + block: + - name: "6.1.10 | PATCH | Ensure no world writable files exist | Get list of world-writable files" + ansible.builtin.shell: find {{ item.mount }} -xdev -type f -perm -0002 + changed_when: false + failed_when: false + register: ubtu18cis_6_1_10_wwf + with_items: + - "{{ ansible_mounts }}" + + - name: "6.1.10 | PATCH | Ensure no world writable files exist | Adjust world-writable files if they exist" + ansible.builtin.file: + path: "{{ item }}" + mode: o-w + with_items: + - "{{ ubtu18cis_6_1_10_wwf.results | map(attribute='stdout_lines') | flatten }}" + when: ubtu18cis_no_world_write_adjust + when: + - ubtu18cis_rule_6_1_10 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.10 + - permissions + +- name: "6.1.11 | AUDIT | Ensure no unowned files or directories exist" + block: + - name: "6.1.11 | AUDIT | Ensure no unowned files or directories exist | Finding all unowned files or directories" + ansible.builtin.shell: find "{{ item.mount }}" -xdev -nouser + changed_when: false + failed_when: false + check_mode: false + register: ubtu18cis_6_1_11_audit + loop: "{{ ansible_mounts }}" + loop_control: + label: "{{ item.mount }}" + when: item['device'].startswith('/dev') and not 'bind' in item['options'] + + - name: "6.1.11 | AUDIT | Ensure no unowned files or directories exist | set fact" + ansible.builtin.set_fact: + ubtu18cis_6_1_11_unowned_files_found: true + loop: "{{ ubtu18cis_6_1_11_audit.results }}" + when: + - item.stdout is defined + - item.stdout | length > 0 + + - name: "6.1.11 | AUDIT | Ensure no unowned files or directories exist | Displaying any unowned files or directories" + ansible.builtin.debug: + msg: "Warning!! Missing owner on items in {{ ubtu18cis_6_1_11_audit | json_query('results[*].stdout_lines[*]') | flatten }}" # noqa: jinja[invalid] + when: ubtu18cis_6_1_11_unowned_files_found + + - name: "6.1.11 | AUDIT | Ensure no unowned files or directories exist | warning" + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.1.11' + when: ubtu18cis_6_1_11_unowned_files_found + vars: + ubtu18cis_6_1_11_unowned_files_found: false + when: + - ubtu18cis_rule_6_1_11 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.11 + - permissions + +- name: "6.1.12 | AUDIT | Ensure no ungrouped files or directories exist" + block: + - name: "6.1.12 | AUDIT | Ensure no ungrouped files or directories exist | Finding all ungrouped files or directories" + ansible.builtin.shell: find "{{ item.mount }}" -xdev -nogroup + check_mode: false + failed_when: false + changed_when: false + register: ubtu18cis_6_1_12_audit + loop: "{{ ansible_mounts }}" + loop_control: + label: "{{ item.mount }}" + when: item['device'].startswith('/dev') and not 'bind' in item['options'] + + - name: "6.1.12 | AUDIT | Ensure no ungrouped files or directories exist | set fact" + ansible.builtin.set_fact: + ubtu18cis_6_1_12_ungrouped_files_found: true + loop: "{{ ubtu18cis_6_1_12_audit.results }}" + when: + - item.stdout is defined + - item.stdout | length > 0 + + - name: "6.1.12 | AUDIT | Ensure no ungrouped files or directories exist | Displaying any unowned files or directories" + ansible.builtin.debug: + msg: "Warning!! Missing owner on items in {{ ubtu18cis_6_1_12_audit | json_query('results[*].stdout_lines[*]') | flatten }}" # noqa: jinja[invalid] + when: ubtu18cis_6_1_12_ungrouped_files_found + + - name: "6.1.12 | AUDIT | Ensure no ungrouped files or directories exist | warning" + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.1.12' + when: ubtu18cis_6_1_12_ungrouped_files_found + vars: + ubtu18cis_6_1_12_ungrouped_files_found: false + when: + - ubtu18cis_rule_6_1_12 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.12 + - permissions + +- name: "6.1.13 | AUDIT | Audit SUID executables" + block: + - name: "6.1.13 | AUDIT | Audit SUID executables | Find all SUID executables" + ansible.builtin.shell: df {{ item.mount }} -P | awk {'if (NR!=1) print $6'} | xargs -I '{}' find '{}' -xdev -type f -perm -4000 + changed_when: false + failed_when: false + register: ubtu18cis_6_1_13_suid_perms + loop: "{{ ansible_mounts }}" + loop_control: + label: "{{ item.mount }}" + + - name: "6.1.13 | AUDIT | Audit SUID executables | set fact SUID executables" + ansible.builtin.set_fact: + ubtu18cis_6_1_13_suid_found: true + loop: "{{ ubtu18cis_6_1_13_suid_perms.results }}" + when: item.stdout | length > 0 + + - name: "6.1.13 | AUDIT | Audit SUID executables | Alert SUID executables exist" + ansible.builtin.debug: + msg: "Warning!! SUID on items in {{ ubtu18cis_6_1_13_suid_perms | json_query('results[*].stdout_lines[*]') | flatten }}" # noqa: jinja[invalid] + when: ubtu18cis_6_1_13_suid_found + + - name: "6.1.13 | AUDIT | Audit SUID executables | Alert SUID executables exist | warning" + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.1.13' + when: ubtu18cis_6_1_13_suid_found + vars: + ubtu18cis_6_1_13_suid_found: false + when: + - ubtu18cis_rule_6_1_13 + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_6.1.13 + - permissions + +- name: "6.1.14 | AUDIT | Audit SGID executables" + block: + - name: "6.1.14 | AUDIT | Audit SGID executables | Find all SGID executables" + ansible.builtin.shell: df {{ item.mount }} -P | awk {'if (NR!=1) print $6'} | xargs -I '{}' find '{}' -xdev -type f -perm -2000 + changed_when: false + failed_when: false + register: ubtu18cis_6_1_14_sgid_perms + loop: "{{ ansible_mounts }}" + loop_control: + label: "{{ item.mount }}" + + - name: "6.1.14 | AUDIT | Audit SGID executables | Set fact SGID executables" + ansible.builtin.set_fact: + ubtu18cis_6_1_14_sgid_found: true + loop: "{{ ubtu18cis_6_1_14_sgid_perms.results }}" + when: item.stdout | length > 0 + + - name: "6.1.14 | AUDIT | Audit SGID executables | Alert SGID executables exist" + ansible.builtin.debug: + msg: "Warning!! SGID on items in {{ ubtu18cis_6_1_14_sgid_perms | json_query('results[*].stdout_lines[*]') | flatten }}" # noqa: jinja[invalid] + when: ubtu18cis_6_1_14_sgid_found + + - name: "6.1.14 | AUDIT | Audit SGID executables| warning" + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.1.14' + when: ubtu18cis_6_1_14_sgid_found + vars: + ubtu18cis_6_1_14_sgid_found: false + when: + - ubtu18cis_rule_6_1_14 + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_6.1.14 + - permissions diff --git a/tasks/section_6/cis_6.2.x.yml b/tasks/section_6/cis_6.2.x.yml new file mode 100644 index 0000000..b5fd197 --- /dev/null +++ b/tasks/section_6/cis_6.2.x.yml @@ -0,0 +1,620 @@ +--- +- name: "6.2.1 | AUDIT | Ensure accounts in /etc/passwd use shadowed passwords" + block: + - name: "6.2.1 | AUDIT | Ensure accounts in /etc/passwd use shadowed passwords | Get users not using shadowed passwords" + ansible.builtin.shell: awk -F':' '($2 != "x" ) { print $1}' /etc/passwd + changed_when: false + failed_when: false + register: ubtu18cis_6_2_1_nonshadowed_users + + - name: "6.2.1 | AUDIT | Ensure accounts in /etc/passwd use shadowed passwords | Alert on findings" + ansible.builtin.debug: + msg: + - "Warning!! You have users that are not using a shadowed password. Please convert the below accounts to use a shadowed password" + - "{{ ubtu18cis_6_2_1_nonshadowed_users.stdout_lines }}" + when: + - ubtu18cis_6_2_1_nonshadowed_users.stdout | length > 0 + + - name: "6.2.1 | AUDIT | Ensure accounts in /etc/passwd use shadowed passwords | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.2.1' + when: + - ubtu18cis_6_2_1_nonshadowed_users.stdout | length > 0 + when: + - ubtu18cis_rule_6_2_1 + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_6.2.1 + - user_accounts + +- name: "6.2.2 | PATCH | Ensure password fields are not empty" + block: + - name: "6.2.2 | AUDIT | Ensure password fields are not empty | Find users with no password" + ansible.builtin.shell: awk -F":" '($2 == "" ) { print $1 }' /etc/shadow + changed_when: false + failed_when: false + check_mode: false + register: ubtu18cis_6_2_2_empty_password_acct + + - name: "6.2.2 | PATCH | Ensure password fields are not empty | Lock users with empty password" + ansible.builtin.user: + name: "{{ item }}" + password_lock: true + with_items: + - "{{ ubtu18cis_6_2_1_empty_password_acct.stdout_lines }}" + when: ubtu18cis_6_2_2_empty_password_acct.stdout | length > 0 + when: + - ubtu18cis_rule_6_2_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.2.2 + - user + - permissions + +- name: "6.2.3 | AUDIT | Ensure all groups in /etc/passwd exist in /etc/group" + block: + - name: "6.2.3 | AUDIT | Ensure all groups in /etc/passwd exist in /etc/group | Check /etc/passwd entries" + ansible.builtin.shell: pwck -r | grep 'no group' | awk '{ gsub("[:\47]",""); print $2}' + changed_when: false + failed_when: false + register: ubtu18cis_6_2_3_passwd_gid_check + + - name: "6.2.3 | AUDIT | Ensure all groups in /etc/passwd exist in /etc/group | Print message that all groups match between passwd and group files" + ansible.builtin.debug: + msg: "Good News! There are no users that have non-existent GUIDs (Groups)" + when: ubtu18cis_6_2_3_passwd_gid_check.stdout | length == 0 + + - name: "6.2.3 | AUDIT | Ensure all groups in /etc/passwd exist in /etc/group | Print warning about users with invalid GIDs missing GID entries in /etc/group" + ansible.builtin.debug: + msg: "Warning!! The following users have non-existent GIDs (Groups): {{ ubtu18cis_6_2_15_passwd_gid_check.stdout_lines | join (', ') }}" + when: ubtu18cis_6_2_3_passwd_gid_check.stdout | length > 0 + + - name: "6.2.3 | AUDIT | Ensure all groups in /etc/passwd exist in /etc/group | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.2.3' + when: ubtu18cis_6_2_3_passwd_gid_check.stdout | length > 0 + + when: + - ubtu18cis_rule_6_2_3 + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_6.2.3 + - groups + +- name: "6.2.4 | PATCH | Ensure all users' home directories exist" + block: + - name: capture audit task for missing homedirs + block: &u18s_homedir_audit + - name: "6.2.4 | PATCH | Ensure all users' home directories exist | Find users missing home directories" + ansible.builtin.shell: pwck -r | grep -P {{ ld_regex | quote }} + check_mode: false + register: ubtu18cis_users_missing_home + changed_when: ubtu18cis_6_2_4_audit | length > 0 + # failed_when: 0: success, 1: no grep match, 2: pwck found something + failed_when: ubtu18cis_users_missing_home.rc not in [0,1,2] + + ### NOTE: due to https://github.com/ansible/ansible/issues/24862 This is a shell command, and is quite frankly less than ideal. + - name: "6.2.4 | PATCH | Ensure all users' home directories exist| Creates home directories" + ansible.builtin.shell: "mkhomedir_helper {{ item }}" + # check_mode: "{{ ubtu18cis_disruptive_check_mode }}" + with_items: "{{ ubtu18cis_6_2_4_audit | map(attribute='id') | list }}" + when: + - ubtu18cis_users_missing_home is changed + - ubtu18cis_disruption_high + + ### NOTE: Now we need to address that SELINUX will not let mkhomedir_helper create home directories for UUID < 500, so the ftp user will still show up in a pwck. Not sure this is needed, I need to confirm if that user is removed in an earlier task. + ### ^ Likely doesn't matter as 6.2.7 defines "local interactive users" as those w/ uid 1000-4999 + - name: replay audit task + block: *u18s_homedir_audit + + # CAUTION: debug loops don't show changed since 2.4: + # Fix: https://github.com/ansible/ansible/pull/59958 + - name: "6.2.4 | PATCH | Ensure all users' home directories exist | Alert about correcting owner and group" + ansible.builtin.debug: + msg: "You will need to mkdir -p {{ item }} and chown properly to the correct owner and group." + with_items: "{{ ubtu18cis_6_2_4_audit | map(attribute='dir') | list }}" + changed_when: ubtu18cis_audit_complex + when: + - ubtu18cis_users_missing_home is changed # noqa: no-handler + vars: + ld_regex: >- + ^user '(?P.*)': directory '(?P.*)' does not exist$ + ld_users: "{{ ubtu18cis_users_missing_home.stdout_lines | map('regex_replace', ld_regex, '\\g') | list }}" + ubtu18cis_6_2_4_audit: "{{ ubtu18cis_passwd | selectattr('uid', '>=', 1000) | selectattr('id', 'in', ld_users) | list }}" + when: + - ubtu18cis_rule_6_2_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.2.4 + - user + +- name: "6.2.5 | PATCH | Ensure users own their home directories" + ansible.builtin.file: + path: "{{ item.dir }}" + owner: "{{ item.id }}" + state: directory + with_items: "{{ ubtu18cis_passwd }}" + loop_control: + label: "{{ ubtu18cis_passwd_label }}" + when: + - ubtu18cis_rule_6_2_5 + - item.uid >= ubtu18cis_int_gid + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.2.5 + - user + +- name: "6.2.6 | PATCH | Ensure users' home directories permissions are 750 or more restrictive" + block: + - name: "6.2.6 | AUDIT | Ensure users' home directories permissions are 750 or more restrictive | Stat home directories" + ansible.builtin.stat: + path: "{{ item }}" + with_items: "{{ ubtu18cis_passwd | selectattr('uid', '>=', 1000) | selectattr('uid', '!=', 65534) | map(attribute='dir') | list }}" + register: ubtu18cis_6_2_6_audit + + - name: "6.2.6 | AUDIT | Ensure users' home directories permissions are 750 or more restrictive | Find home directories more 750" + ansible.builtin.shell: find -H {{ item.0 | quote }} -not -type l -perm /027 + register: ubtu18cis_6_2_6_patch_audit + changed_when: ubtu18cis_6_2_6_patch_audit.stdout | length > 0 + when: + - item.1.exists + with_together: + - "{{ ubtu18cis_6_2_6_audit.results | map(attribute='item') | list }}" + - "{{ ubtu18cis_6_2_6_audit.results | map(attribute='stat') | list }}" + loop_control: + label: "{{ item.0 }}" + + - name: "6.2.6 | PATCH | Ensure users' home directories permissions are 750 or more restrictive | Set home perms" + ansible.builtin.file: + path: "{{ item.0 }}" + recurse: true + mode: a-st,g-w,o-rwx + register: ubtu18cis_6_2_6_patch + when: + - ubtu18cis_disruption_high + - item.1.exists + with_together: + - "{{ ubtu18cis_6_2_6_audit.results | map(attribute='item') | list }}" + - "{{ ubtu18cis_6_2_6_audit.results | map(attribute='stat') | list }}" + loop_control: + label: "{{ item.0 }}" + + # set default ACLs so the homedir has an effective umask of 0027 + - name: "6.2.6 | PATCH | Ensure users' home directories permissions are 750 or more restrictive | Set ACL's" + ansible.posix.acl: + path: "{{ item.0 }}" + default: true + state: present + recursive: true + etype: "{{ item.1.etype }}" + permissions: "{{ item.1.mode }}" + when: not ubtu18cis_system_is_container + with_nested: + - "{{ (ansible_check_mode | ternary(ubtu18cis_6_2_6_patch_audit, ubtu18cis_6_2_6_patch)).results | + rejectattr('skipped', 'defined') | map(attribute='item') | map('first') | list }}" + - + - etype: group + mode: rx + - etype: other + mode: '0' + when: + - ubtu18cis_rule_6_2_6 + - ubtu18cis_disruption_high + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.2.6 + - user + +- name: "6.2.7 | PATCH | Ensure users' dot files are not group or world writable" + block: + - name: "6.2.7 | AUDIT | Ensure users' dot files are not group or world-writable | Check for files" + ansible.builtin.shell: find /home/ -name "\.*" -perm /g+w,o+w + changed_when: false + failed_when: false + register: ubtu18cis_6_2_7_audit + + - name: "6.2.7 | AUDIT | Ensure users' dot files are not group or world-writable | Alert on no files found" + ansible.builtin.debug: + msg: "Good news! We have not found any group or world-writable dot files on your sytem" + failed_when: false + changed_when: false + when: + - ubtu18cis_6_2_7_audit.stdout | length == 0 + + - name: "6.2.7 | PATCH | Ensure users' dot files are not group or world-writable | Changes files if configured" + ansible.builtin.file: + path: '{{ item }}' + mode: go-w + with_items: "{{ ubtu18cis_6_2_7_audit.stdout_lines }}" + when: + - ubtu18cis_6_2_7_audit.stdout | length > 0 + - ubtu18cis_dotperm_ansiblemanaged + + - name: "6.2.7 | AUDIT | Ensure users' dot files are not group or world-writable | Alert on files found" + ansible.builtin.debug: + msg: + - "Warning!! You have group or world-writable files on the system and do not have automation changing the permissions" + - "Please review the files below and adjust permissions to remove group and world writable options" + - "{{ ubtu18cis_6_2_7_audit.stdout_lines }}" + when: + - ubtu18cis_6_2_7_audit.stdout | length > 0 + - not ubtu18cis_dotperm_ansiblemanaged + + - name: "6.2.7 | AUDIT | Ensure users' dot files are not group or world-writable | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.2.7' + when: + - ubtu18cis_6_2_7_audit.stdout | length > 0 + - not ubtu18cis_dotperm_ansiblemanaged + when: + - ubtu18cis_rule_6_2_7 + - ubtu18cis_disruption_high + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.2.7 + - permissions + +- name: "6.2.8 | PATCH | Ensure no users have .netrc files" + ansible.builtin.file: + dest: "~{{ item }}/.netrc" + state: absent + with_items: + - "{{ ubtu18cis_users.stdout_lines }}" + when: + - ubtu18cis_rule_6_2_8 + - ubtu18cis_disruption_high + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.2.8 + - user + +- name: "6.2.9 | PATCH | Ensure no users have .forward files" + ansible.builtin.file: + dest: "~{{ item }}/.forward" + state: absent + with_items: + - "{{ ubtu18cis_users.stdout_lines }}" + when: + - ubtu18cis_rule_6_2_9 + - ubtu18cis_disruption_high + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.2.9 + - user + +- name: "6.2.10 | PATCH | Ensure no users have .rhosts files" + ansible.builtin.file: + dest: "~{{ item }}/.rhosts" + state: absent + with_items: + - "{{ ubtu18cis_users.stdout_lines }}" + when: + - ubtu18cis_rule_6_2_10 + - ubtu18cis_disruption_high + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.2.10 + - user + +- name: "6.2.11 | PATCH | Ensure root is the only UID 0 account" + block: + - name: "6.2.11 | AUDIT | Ensure root is the only UID 0 account | Get non-root users with UID of 0" + ansible.builtin.shell: awk -F":" '($3 == 0 && $1 != \"root\") {i++;print $1 }' /etc/passwd + changed_when: false + failed_when: false + register: ubtu18cis_6_2_11_uid_0_notroot + + - name: "6.2.11 | PATCH | Ensure root is the only UID 0 account | Lock UID 0 non-root users" + ansible.builtin.user: + name: "{{ item }}" + password_lock: true + with_items: + - "{{ ubtu18cis_6_2_11_uid_0_notroot.stdout_lines }}" + when: + - ubtu18cis_disruption_high + - ubtu18cis_6_2_11_uid_0_notroot.stdout | length > 0 + + - name: "6.2.11 | AUDIT | Ensure root is the only UID 0 account | Alert about non-root accounts with UID 0" + ansible.builtin.debug: + msg: + - "Warning!! You have non-root users with a UID of 0 and ubtu18cis_disruption_high enabled" + - "This means the following accounts were password locked and will need to have the UID's manually adjusted" + - "{{ ubtu18cis_6_2_11_uid_0_notroot.stdout_lines }}" + when: + - not ubtu18cis_disruption_high + - ubtu18cis_6_2_11_uid_0_notroot.stdout | length > 0 + + - name: "6.2.11 | AUDIT | Ensure root is the only UID 0 account | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.2.11' + when: + - not ubtu18cis_disruption_high + - ubtu18cis_6_2_11_uid_0_notroot.stdout | length > 0 + when: + - ubtu18cis_rule_6_2_11 + tags: + - level1-server + - level1-workstation + - patch + - automated + - rule_6.2.11 + - user + - root + +- name: "6.2.12 | PATCH | Ensure root PATH Integrity" + block: + - name: "6.2.12 | AUDIT | Ensure root PATH Integrity | Determine empty value" + ansible.builtin.shell: 'echo $PATH | grep ansible.builtin.lineinfile::' + changed_when: false + failed_when: ubtu18cis_6_2_12_path_empty.rc == 0 + register: ubtu18cis_6_2_12_path_empty + + - name: "6.2.12 | AUDIT | Ensure root PATH Integrity | Determine colon end" + ansible.builtin.shell: 'echo $PATH | grep ansible.builtin.lineinfile:$' + changed_when: false + failed_when: ubtu18cis_6_2_12_path_colon_end.rc == 0 + register: ubtu18cis_6_2_12_path_colon_end + + - name: "6.2.12 | AUDIT | Ensure root PATH Integrity | Determine working dir" + ansible.builtin.shell: echo "$PATH" + changed_when: false + failed_when: '"." in ubtu18cis_6_2_12_working_dir.stdout_lines' + register: ubtu18cis_6_2_12_working_dir + + - name: "6.2.12 | AUDIT | Ensure root PATH Integrity | Check paths" + ansible.builtin.stat: + path: "{{ item }}" + register: ubtu18cis_6_2_12_path_stat + with_items: + - "{{ ubtu18cis_6_2_12_working_dir.stdout.split(':') }}" + + - name: "6.2.12 | AUDIT | Ensure root PATH Integrity | Alert on empty value, colon end, and no working dir" + ansible.builtin.debug: + msg: + - "The following paths have no working directory: {{ ubtu18cis_6_2_12_path_stat.results | selectattr('stat.exists','equalto','false') | map(attribute='item') | list }}" + + - name: "6.2.12 | AUDIT | Ensure root PATH Integrity | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.2.12' + + - name: "6.2.12 | PATCH | Ensure root PATH Integrity | Set permissions" + ansible.builtin.file: + path: "{{ item }}" + owner: root + mode: 'o-w,g-w' + state: directory + with_items: + - "{{ ubtu18cis_6_2_12_working_dir.stdout.split(':') }}" + when: + - ubtu18cis_disruption_high + when: + - ubtu18cis_rule_6_2_12 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.2.12 + - user + - root + - notimplemented + +- name: "6.2.13 | AUDIT | Ensure no duplicate UIDs exist" + block: + - name: "6.2.13 | AUDIT | Ensure no duplicate UIDs exist | Check for duplicate UIDs" + ansible.builtin.shell: "pwck -r | awk -F: '{if ($3 in uid) print $1 ; else uid[$3]}' /etc/passwd" + changed_when: false + failed_when: false + register: ubtu18cis_6_2_13_user_uid_check + + - name: "6.2.13 | AUDIT | Ensure no duplicate UIDs exist | Print message that no duplicate UIDs exist" + ansible.builtin.debug: + msg: "Good News! There are no duplicate UID's in the system" + when: ubtu18cis_6_2_13_user_uid_check.stdout | length == 0 + + - name: "6.2.13 | AUDIT | Ensure no duplicate UIDs exist | Print warning about users with duplicate UIDs" + ansible.builtin.debug: + msg: "Warning!! The following users have UIDs that are duplicates: {{ ubtu18cis_6_2_13_user_uid_check.stdout_lines }}" + when: ubtu18cis_6_2_13_user_uid_check.stdout | length > 0 + + - name: "6.2.13 | AUDIT | Ensure no duplicate UIDs exist | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.2.13' + when: ubtu18cis_6_2_13_user_uid_check.stdout | length > 0 + when: + - ubtu18cis_rule_6_2_13 + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_6.2.13 + - user + +- name: "6.2.14 | AUDIT | Ensure no duplicate GIDs exist" + block: + - name: "6.2.14 | AUDIT | Ensure no duplicate GIDs exist | Check for duplicate GIDs" + ansible.builtin.shell: "pwck -r | awk -F: '{if ($3 in users) print $1 ; else users[$3]}' /etc/group" + changed_when: false + failed_when: false + register: ubtu18cis_6_2_14_user_check + + - name: "6.2.14 | AUDIT | Ensure no duplicate GIDs exist | Print message that no duplicate GID's exist" + ansible.builtin.debug: + msg: "Good News! There are no duplicate GIDs in the system" + when: ubtu18cis_6_2_14_user_check.stdout | length == 0 + + - name: "6.2.14 | AUDIT | Ensure no duplicate GIDs exist | Print warning about users with duplicate GIDs" + ansible.builtin.debug: + msg: "Warning!! The following groups have duplicate GIDs: {{ ubtu18cis_6_2_14_user_check.stdout_lines }}" + when: ubtu18cis_6_2_14_user_check.stdout | length > 0 + + - name: "6.2.14 | AUDIT | Ensure no duplicate GIDs exist | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.2.14' + when: ubtu18cis_6_2_14_user_check.stdout | length > 0 + when: + - ubtu18cis_rule_6_2_14 + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_6.2.14 + - groups + +- name: "6.2.15 | AUDIT | Ensure no duplicate user names exist" + block: + - name: "6.2.15 | AUDIT | Ensure no duplicate user names exist | Check for duplicate User Names" + ansible.builtin.shell: "pwck -r | awk -F: '{if ($1 in users) print $1 ; else users[$1]}' /etc/passwd" + changed_when: false + failed_when: false + register: ubtu18cis_6_2_15_user_username_check + + - name: "6.2.15 | AUDIT | Ensure no duplicate user names exist | Print message that no duplicate user names exist" + ansible.builtin.debug: + msg: "Good News! There are no duplicate user names in the system" + when: ubtu18cis_6_2_15_user_username_check.stdout | length == 0 + + - name: "6.2.15 | AUDIT | Ensure no duplicate user names exist | Print warning about users with duplicate User Names" + ansible.builtin.debug: + msg: "Warning!! The following user names are duplicates: {{ ubtu18cis_6_2_15_user_username_check.stdout_lines }}" + when: ubtu18cis_6_2_15_user_username_check.stdout | length > 0 + + - name: "6.2.15 | AUDIT | Ensure no duplicate user names exist | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.2.15' + when: ubtu18cis_6_2_15_user_username_check.stdout | length > 0 + when: + - ubtu18cis_rule_6_2_15 + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_6.2.15 + - user + +- name: "6.2.16 | AUDIT | Ensure no duplicate group names exist" + block: + - name: "6.2.16 | AUDIT | Ensure no duplicate group names exist | Check for duplicate group names" + ansible.builtin.shell: 'getent passwd | cut -d: -f1 | sort -n | uniq -d' + changed_when: false + failed_when: false + register: ubtu18cis_6_2_16_group_group_check + + - name: "6.2.16 | AUDIT | Ensure no duplicate group names exist | Print message that no duplicate groups exist" + ansible.builtin.debug: + msg: "Good News! There are no duplicate group names in the system" + when: ubtu18cis_6_2_16_group_group_check.stdout | length == 0 + + - name: "6.2.16 | AUDIT | Ensure no duplicate group names exist | Print warning about users with duplicate group names" + ansible.builtin.debug: + msg: "Warning!! The following group names are duplicates: {{ ubtu18cis_6_2_16_group_group_check.stdout_lines }}" + when: ubtu18cis_6_2_16_group_group_check.stdout | length > 0 + + - name: "6.2.16 | AUDIT | Ensure no duplicate group names exist | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.2.16' + when: ubtu18cis_6_2_16_group_group_check.stdout | length > 0 + when: + - ubtu18cis_rule_6_2_16 + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_6.2.16 + - groups + +- name: "6.2.17 | AUDIT | Ensure shadow group is empty" + block: + - name: "6.2.17 | AUDIT | Ensure shadow group is empty | Get Shadow GID" + ansible.builtin.shell: grep ^shadow /etc/group | cut -f3 -d":" + changed_when: false + failed_when: false + register: ubtu18cis_6_2_17_shadow_gid + + - name: "6.2.17 | AUDIT | Ensure shadow group is empty | List of users with Shadow GID" + ansible.builtin.shell: awk -F":" '($4 == "{{ ubtu18cis_6_2_17_shadow_gid.stdout }}") { print }' /etc/passwd | cut -f1 -d":" + changed_when: false + failed_when: false + register: ubtu18cis_6_2_17_users_shadow_gid + + - name: "6.2.17 | AUDIT | Ensure shadow group is empty | Message on no users" + ansible.builtin.debug: + msg: "Good News! There are no users with the Shado GID on your system" + when: ubtu18cis_6_2_17_users_shadow_gid.stdout | length == 0 + + - name: "6.2.17 | AUDIT | Ensure shadow group is empty | Message on users with Shadow GID" + ansible.builtin.debug: + msg: + - "Warning!! There are users that are in the Shadow group" + - "To conform to CIS standards no users should be in this group" + - "Please move the users below into another group" + - "{{ ubtu18cis_6_2_17_users_shadow_gid.stdout_lines }}" + when: ubtu18cis_6_2_17_users_shadow_gid.stdout | length > 0 + + - name: "6.2.17 | AUDIT | Ensure shadow group is empty | Warn Count." + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.2.17' + when: + - ubtu18cis_rule_6_2_17 + tags: + - level1-server + - level1-workstation + - automated + - audit + - rule_6.2.17 + - groups + - user diff --git a/tasks/section_6/main.yml b/tasks/section_6/main.yml new file mode 100644 index 0000000..9a01d99 --- /dev/null +++ b/tasks/section_6/main.yml @@ -0,0 +1,8 @@ +--- +- name: "SECTION | 6.1 | System File Permissions" + ansible.builtin.import_tasks: + file: cis_6.1.x.yml + +- name: "SECTION | 6.2 | User and Group Settings" + ansible.builtin.import_tasks: + file: cis_6.2.x.yml diff --git a/tasks/warning_facts.yml b/tasks/warning_facts.yml new file mode 100644 index 0000000..6e80487 --- /dev/null +++ b/tasks/warning_facts.yml @@ -0,0 +1,20 @@ +--- + +# This task is used to create variables used in giving a warning summary for manual tasks +# that need attention +# +# The warn_control_list and warn_count vars start life in vars/main.yml but get updated +# as the tasks that have a warning complete +# +# Those two variables are used in the tasks/main.yml to display a list of warnings +# +# warn_control_id is set within the task itself and has the control ID as the value +# +# warn_control_list is the main variable to be used and is a list made up of the warn_control_id’s +# +# warn_count the main variable for the number of warnings and each time a warn_control_id is added +# the count increases by a value of 1 +- name: "{{ warn_control_id }} | AUDIT | Set fact for manual task warning." + ansible.builtin.set_fact: + warn_control_list: "{{ warn_control_list }} [{{ warn_control_id }}]" + warn_count: "{{ warn_count | int + 1 }}" diff --git a/templates/.DS_Store b/templates/.DS_Store deleted file mode 100644 index 0347e7f..0000000 Binary files a/templates/.DS_Store and /dev/null differ diff --git a/templates/ansible_vars_goss.yml.j2 b/templates/ansible_vars_goss.yml.j2 new file mode 100644 index 0000000..15730ee --- /dev/null +++ b/templates/ansible_vars_goss.yml.j2 @@ -0,0 +1,487 @@ +## metadata for Audit benchmark +benchmark_version: '2.1.0' + +# Some audit tests may need to scan every filesystem or have an impact on a system +# these may need be scheduled to minimise impact also ability to set a timeout if taking too long +run_heavy_tests: {{ audit_run_heavy_tests }} +timeout_ms: {{ audit_cmd_timeout }} + + +ubuntu18cis_section1: true +ubuntu18cis_section2: true +ubuntu18cis_section3: true +ubuntu18cis_section4: true +ubuntu18cis_section5: true +ubuntu18cis_section6: true + +ubuntu18cis_level_1: true +ubuntu18cis_level_2: true + +ubuntu18cis_apparmor_disable: true + +# to enable rules that may have IO impact on a system e.g. full filesystem scans or CPU heavy +run_heavy_tests: true + +# True is BIOS based system else set to false +ubuntu18_legacy_boot: true + +ubuntu18_set_boot_pass: true + +# These variables correspond with the CIS rule IDs or paragraph numbers defined in +# the CIS benchmark documents. +# PLEASE NOTE: These work in coordination with the section # group variables and tags. +# You must enable an entire section in order for the variables below to take effect. +# Section 1 rules +ubuntu18cis_rule_1_1_1_1: {{ ubtu18cis_rule_1_1_1_1 }} +ubuntu18cis_rule_1_1_1_2: {{ ubtu18cis_rule_1_1_1_2 }} +ubuntu18cis_rule_1_1_1_3: {{ ubtu18cis_rule_1_1_1_3 }} +ubuntu18cis_rule_1_1_1_4: {{ ubtu18cis_rule_1_1_1_4 }} +ubuntu18cis_rule_1_1_1_5: {{ ubtu18cis_rule_1_1_1_5 }} +ubuntu18cis_rule_1_1_1_6: {{ ubtu18cis_rule_1_1_1_6 }} +ubuntu18cis_rule_1_1_2: {{ ubtu18cis_rule_1_1_2 }} +ubuntu18cis_rule_1_1_3: {{ ubtu18cis_rule_1_1_3 }} +ubuntu18cis_rule_1_1_4: {{ ubtu18cis_rule_1_1_4 }} +ubuntu18cis_rule_1_1_5: {{ ubtu18cis_rule_1_1_5 }} +ubuntu18cis_rule_1_1_6: {{ ubtu18cis_rule_1_1_6 }} +ubuntu18cis_rule_1_1_7: {{ ubtu18cis_rule_1_1_7 }} +ubuntu18cis_rule_1_1_8: {{ ubtu18cis_rule_1_1_8 }} +ubuntu18cis_rule_1_1_9: {{ ubtu18cis_rule_1_1_9 }} +ubuntu18cis_rule_1_1_10: {{ ubtu18cis_rule_1_1_10 }} +ubuntu18cis_rule_1_1_11: {{ ubtu18cis_rule_1_1_11 }} +ubuntu18cis_rule_1_1_12: {{ ubtu18cis_rule_1_1_12 }} +ubuntu18cis_rule_1_1_13: {{ ubtu18cis_rule_1_1_13 }} +ubuntu18cis_rule_1_1_14: {{ ubtu18cis_rule_1_1_14 }} +ubuntu18cis_rule_1_1_15: {{ ubtu18cis_rule_1_1_15 }} +ubuntu18cis_rule_1_1_16: {{ ubtu18cis_rule_1_1_16 }} +ubuntu18cis_rule_1_1_17: {{ ubtu18cis_rule_1_1_17 }} +ubuntu18cis_rule_1_1_18: {{ ubtu18cis_rule_1_1_18 }} +ubuntu18cis_rule_1_1_19: {{ ubtu18cis_rule_1_1_19 }} +ubuntu18cis_rule_1_1_20: {{ ubtu18cis_rule_1_1_20 }} +ubuntu18cis_rule_1_1_21: {{ ubtu18cis_rule_1_1_21 }} +ubuntu18cis_rule_1_1_22: {{ ubtu18cis_rule_1_1_22 }} +ubuntu18cis_rule_1_1_23: {{ ubtu18cis_rule_1_1_23 }} +ubuntu18cis_rule_1_1_24: {{ ubtu18cis_rule_1_1_24 }} +ubuntu18cis_rule_1_2_1: {{ ubtu18cis_rule_1_2_1 }} +ubuntu18cis_rule_1_2_2: {{ ubtu18cis_rule_1_2_2 }} +ubuntu18cis_rule_1_3_1: {{ ubtu18cis_rule_1_3_1 }} +ubuntu18cis_rule_1_3_2: {{ ubtu18cis_rule_1_3_2 }}= +ubuntu18cis_rule_1_4_1: {{ ubtu18cis_rule_1_4_1 }} +ubuntu18cis_rule_1_4_2: {{ ubtu18cis_rule_1_4_2 }} +ubuntu18cis_rule_1_4_3: {{ ubtu18cis_rule_1_4_3 }} +ubuntu18cis_rule_1_4_4: {{ ubtu18cis_rule_1_4_4 }} +ubuntu18cis_rule_1_5_1: {{ ubtu18cis_rule_1_5_1 }} +ubuntu18cis_rule_1_5_2: {{ ubtu18cis_rule_1_5_2 }} +ubuntu18cis_rule_1_5_3: {{ ubtu18cis_rule_1_5_3 }} +ubuntu18cis_rule_1_5_4: {{ ubtu18cis_rule_1_5_4 }} +ubuntu18cis_rule_1_6_1_1: {{ ubtu18cis_rule_1_6_1_1 }} +ubuntu18cis_rule_1_6_1_2: {{ ubtu18cis_rule_1_6_1_2 }} +ubuntu18cis_rule_1_6_1_3: {{ ubtu18cis_rule_1_6_1_3 }} +ubuntu18cis_rule_1_6_1_4: {{ ubtu18cis_rule_1_6_1_4 }} +ubuntu18cis_rule_1_7_1: {{ ubtu18cis_rule_1_7_1 }} +ubuntu18cis_rule_1_7_2: {{ ubtu18cis_rule_1_7_2 }} +ubuntu18cis_rule_1_7_3: {{ ubtu18cis_rule_1_7_3 }} +ubuntu18cis_rule_1_7_4: {{ ubtu18cis_rule_1_7_4 }} +ubuntu18cis_rule_1_7_5: {{ ubtu18cis_rule_1_7_5 }} +ubuntu18cis_rule_1_7_6: {{ ubtu18cis_rule_1_7_6 }} +ubuntu18cis_rule_1_8_1: {{ ubtu18cis_rule_1_8_1 }} +ubuntu18cis_rule_1_8_2: {{ ubtu18cis_rule_1_8_2 }} +ubuntu18cis_rule_1_8_3: {{ ubtu18cis_rule_1_8_3 }} +ubuntu18cis_rule_1_8_4: {{ ubtu18cis_rule_1_8_4 }} +ubuntu18cis_rule_1_9: {{ ubtu18cis_rule_1_9 }} + +# section 2 rules + +ubuntu18cis_rule_2_1_1_1: {{ ubtu18cis_rule_2_1_1_1 }} +ubuntu18cis_rule_2_1_1_2: {{ ubtu18cis_rule_2_1_1_2 }} +ubuntu18cis_rule_2_1_1_3: {{ ubtu18cis_rule_2_1_1_3 }} +ubuntu18cis_rule_2_1_1_4: {{ ubtu18cis_rule_2_1_1_4 }} +ubuntu18cis_rule_2_1_2: {{ ubtu18cis_rule_2_1_2 }} +ubuntu18cis_rule_2_1_3: {{ ubtu18cis_rule_2_1_3 }} +ubuntu18cis_rule_2_1_4: {{ ubtu18cis_rule_2_1_4 }} +ubuntu18cis_rule_2_1_5: {{ ubtu18cis_rule_2_1_5 }} +ubuntu18cis_rule_2_1_6: {{ ubtu18cis_rule_2_1_6 }} +ubuntu18cis_rule_2_1_7: {{ ubtu18cis_rule_2_1_7 }} +ubuntu18cis_rule_2_1_8: {{ ubtu18cis_rule_2_1_8 }} +ubuntu18cis_rule_2_1_9: {{ ubtu18cis_rule_2_1_9 }} +ubuntu18cis_rule_2_1_10: {{ ubtu18cis_rule_2_1_10 }} +ubuntu18cis_rule_2_1_11: {{ ubtu18cis_rule_2_1_11 }} +ubuntu18cis_rule_2_1_12: {{ ubtu18cis_rule_2_1_12 }} +ubuntu18cis_rule_2_1_13: {{ ubtu18cis_rule_2_1_13 }} +ubuntu18cis_rule_2_1_14: {{ ubtu18cis_rule_2_1_14 }} +ubuntu18cis_rule_2_1_15: {{ ubtu18cis_rule_2_1_15 }} +ubuntu18cis_rule_2_1_16: {{ ubtu18cis_rule_2_1_16 }} +ubuntu18cis_rule_2_1_17: {{ ubtu18cis_rule_2_1_17 }} +ubuntu18cis_rule_2_2_1: {{ ubtu18cis_rule_2_2_1 }} +ubuntu18cis_rule_2_2_2: {{ ubtu18cis_rule_2_2_2 }} +ubuntu18cis_rule_2_2_3: {{ ubtu18cis_rule_2_2_3 }} +ubuntu18cis_rule_2_2_4: {{ ubtu18cis_rule_2_2_4 }} +ubuntu18cis_rule_2_2_5: {{ ubtu18cis_rule_2_2_5 }} +ubuntu18cis_rule_2_2_6: {{ ubtu18cis_rule_2_2_6 }} +ubuntu18cis_rule_2_3: {{ ubtu18cis_rule_2_3 }} + +# Section 3 rules +ubuntu18cis_rule_3_1_1: {{ ubtu18cis_rule_3_1_1 }} +ubuntu18cis_rule_3_1_2: {{ ubtu18cis_rule_3_1_2 }} +ubuntu18cis_rule_3_2_1: {{ ubtu18cis_rule_3_2_1 }} +ubuntu18cis_rule_3_2_2: {{ ubtu18cis_rule_3_2_2 }} +ubuntu18cis_rule_3_3_1: {{ ubtu18cis_rule_3_3_1 }} +ubuntu18cis_rule_3_3_2: {{ ubtu18cis_rule_3_3_2 }} +ubuntu18cis_rule_3_3_3: {{ ubtu18cis_rule_3_3_3 }} +ubuntu18cis_rule_3_3_4: {{ ubtu18cis_rule_3_3_4 }} +ubuntu18cis_rule_3_3_5: {{ ubtu18cis_rule_3_3_5 }} +ubuntu18cis_rule_3_3_6: {{ ubtu18cis_rule_3_3_6 }} +ubuntu18cis_rule_3_3_7: {{ ubtu18cis_rule_3_3_7 }} +ubuntu18cis_rule_3_3_8: {{ ubtu18cis_rule_3_3_8 }} +ubuntu18cis_rule_3_3_9: {{ ubtu18cis_rule_3_3_9 }} +ubuntu18cis_rule_3_4_1: {{ ubtu18cis_rule_3_4_1 }} +ubuntu18cis_rule_3_4_2: {{ ubtu18cis_rule_3_4_2 }} +ubuntu18cis_rule_3_4_3: {{ ubtu18cis_rule_3_4_3 }} +ubuntu18cis_rule_3_4_4: {{ ubtu18cis_rule_3_4_4 }} +# UFW +ubuntu18cis_rule_3_5_1_1: {{ ubtu18cis_rule_3_5_1_1 }} +ubuntu18cis_rule_3_5_1_2: {{ ubtu18cis_rule_3_5_1_2 }} +ubuntu18cis_rule_3_5_1_3: {{ ubtu18cis_rule_3_5_1_3 }} +ubuntu18cis_rule_3_5_1_4: {{ ubtu18cis_rule_3_5_1_4 }} +ubuntu18cis_rule_3_5_1_5: {{ ubtu18cis_rule_3_5_1_5 }} +ubuntu18cis_rule_3_5_1_6: {{ ubtu18cis_rule_3_5_1_6 }} +ubuntu18cis_rule_3_5_1_7: {{ ubtu18cis_rule_3_5_1_7 }} +# NFTables +ubuntu18cis_rule_3_5_2_1: {{ ubtu18cis_rule_3_5_2_1 }} +ubuntu18cis_rule_3_5_2_2: {{ ubtu18cis_rule_3_5_2_2 }} +ubuntu18cis_rule_3_5_2_3: {{ ubtu18cis_rule_3_5_2_3 }} +ubuntu18cis_rule_3_5_2_4: {{ ubtu18cis_rule_3_5_2_4 }} +ubuntu18cis_rule_3_5_2_5: {{ ubtu18cis_rule_3_5_2_5 }} +ubuntu18cis_rule_3_5_2_6: {{ ubtu18cis_rule_3_5_2_6 }} +ubuntu18cis_rule_3_5_2_7: {{ ubtu18cis_rule_3_5_2_7 }} +ubuntu18cis_rule_3_5_2_8: {{ ubtu18cis_rule_3_5_2_8 }} +ubuntu18cis_rule_3_5_2_9: {{ ubtu18cis_rule_3_5_2_9 }} +ubuntu18cis_rule_3_5_2_10: {{ ubtu18cis_rule_3_5_2_10 }} +# IPTables +ubuntu18cis_rule_3_5_3_1_1: {{ ubtu18cis_rule_3_5_3_1_1 }} +ubuntu18cis_rule_3_5_3_1_2: {{ ubtu18cis_rule_3_5_3_1_2 }} +ubuntu18cis_rule_3_5_3_1_3: {{ ubtu18cis_rule_3_5_3_1_3 }} +ubuntu18cis_rule_3_5_3_2_1: {{ ubtu18cis_rule_3_5_3_2_1 }} +ubuntu18cis_rule_3_5_3_2_2: {{ ubtu18cis_rule_3_5_3_2_2 }} +ubuntu18cis_rule_3_5_3_2_3: {{ ubtu18cis_rule_3_5_3_2_3 }} +ubuntu18cis_rule_3_5_3_2_4: {{ ubtu18cis_rule_3_5_3_2_4 }} +ubuntu18cis_rule_3_5_3_3_1: {{ ubtu18cis_rule_3_5_3_3_1 }} +ubuntu18cis_rule_3_5_3_3_2: {{ ubtu18cis_rule_3_5_3_3_2 }} +ubuntu18cis_rule_3_5_3_3_3: {{ ubtu18cis_rule_3_5_3_3_3 }} +ubuntu18cis_rule_3_5_3_3_4: {{ ubtu18cis_rule_3_5_3_3_4 }} + +# Section 4 rules +ubuntu18cis_rule_4_1_1_1: {{ ubtu18cis_rule_4_1_1_1 }} +ubuntu18cis_rule_4_1_1_2: {{ ubtu18cis_rule_4_1_1_2 }} +ubuntu18cis_rule_4_1_1_3: {{ ubtu18cis_rule_4_1_1_3 }} +ubuntu18cis_rule_4_1_1_4: {{ ubtu18cis_rule_4_1_1_4 }} +ubuntu18cis_rule_4_1_2_1: {{ ubtu18cis_rule_4_1_2_1 }} +ubuntu18cis_rule_4_1_2_2: {{ ubtu18cis_rule_4_1_2_2 }} +ubuntu18cis_rule_4_1_2_3: {{ ubtu18cis_rule_4_1_2_3 }} +ubuntu18cis_rule_4_1_3: {{ ubtu18cis_rule_4_1_3 }} +ubuntu18cis_rule_4_1_4: {{ ubtu18cis_rule_4_1_4 }} +ubuntu18cis_rule_4_1_5: {{ ubtu18cis_rule_4_1_5 }} +ubuntu18cis_rule_4_1_6: {{ ubtu18cis_rule_4_1_6 }} +ubuntu18cis_rule_4_1_7: {{ ubtu18cis_rule_4_1_7 }} +ubuntu18cis_rule_4_1_8: {{ ubtu18cis_rule_4_1_8 }} +ubuntu18cis_rule_4_1_9: {{ ubtu18cis_rule_4_1_9 }} +ubuntu18cis_rule_4_1_10: {{ ubtu18cis_rule_4_1_10 }} +ubuntu18cis_rule_4_1_11: {{ ubtu18cis_rule_4_1_11 }} +ubuntu18cis_rule_4_1_12: {{ ubtu18cis_rule_4_1_12 }} +ubuntu18cis_rule_4_1_13: {{ ubtu18cis_rule_4_1_13}} +ubuntu18cis_rule_4_1_14: {{ ubtu18cis_rule_4_1_14 }} +ubuntu18cis_rule_4_1_15: {{ ubtu18cis_rule_4_1_15 }} +ubuntu18cis_rule_4_1_16: {{ ubtu18cis_rule_4_1_16 }} +ubuntu18cis_rule_4_1_17: {{ ubtu18cis_rule_4_1_17 }} +ubuntu18cis_rule_4_2_1_1: {{ ubtu18cis_rule_4_2_1_1 }} +ubuntu18cis_rule_4_2_1_2: {{ ubtu18cis_rule_4_2_1_2 }} +ubuntu18cis_rule_4_2_1_3: {{ ubtu18cis_rule_4_2_1_3 }} +ubuntu18cis_rule_4_2_1_4: {{ ubtu18cis_rule_4_2_1_4 }} +ubuntu18cis_rule_4_2_1_5: {{ ubtu18cis_rule_4_2_1_5 }} +ubuntu18cis_rule_4_2_1_6: {{ ubtu18cis_rule_4_2_1_6 }} +ubuntu18cis_rule_4_2_2_1: {{ ubtu18cis_rule_4_2_2_1 }} +ubuntu18cis_rule_4_2_2_2: {{ ubtu18cis_rule_4_2_2_2 }} +ubuntu18cis_rule_4_2_2_3: {{ ubtu18cis_rule_4_2_2_3 }} +ubuntu18cis_rule_4_2_3: {{ ubtu18cis_rule_4_2_3 }} +ubuntu18cis_rule_4_3: {{ ubtu18cis_rule_4_3 }} +ubuntu18cis_rule_4_4: {{ ubtu18cis_rule_4_4 }} + +# Section 5 +ubuntu18cis_rule_5_1_1: {{ ubtu18cis_rule_5_1_1 }} +ubuntu18cis_rule_5_1_2: {{ ubtu18cis_rule_5_1_2 }} +ubuntu18cis_rule_5_1_3: {{ ubtu18cis_rule_5_1_3 }} +ubuntu18cis_rule_5_1_4: {{ ubtu18cis_rule_5_1_4 }} +ubuntu18cis_rule_5_1_5: {{ ubtu18cis_rule_5_1_5 }} +ubuntu18cis_rule_5_1_6: {{ ubtu18cis_rule_5_1_6 }} +ubuntu18cis_rule_5_1_7: {{ ubtu18cis_rule_5_1_7 }} +ubuntu18cis_rule_5_1_8: {{ ubtu18cis_rule_5_1_8 }} +ubuntu18cis_rule_5_1_9: {{ ubtu18cis_rule_5_1_9 }} +ubuntu18cis_rule_5_2_1: {{ ubtu18cis_rule_5_2_1 }} +ubuntu18cis_rule_5_2_2: {{ ubtu18cis_rule_5_2_2 }} +ubuntu18cis_rule_5_2_3: {{ ubtu18cis_rule_5_2_3 }} +ubuntu18cis_rule_5_3_1: {{ ubtu18cis_rule_5_3_1 }} +ubuntu18cis_rule_5_3_2: {{ ubtu18cis_rule_5_3_2 }} +ubuntu18cis_rule_5_3_3: {{ ubtu18cis_rule_5_3_3 }} +ubuntu18cis_rule_5_3_4: {{ ubtu18cis_rule_5_3_4 }} +ubuntu18cis_rule_5_3_5: {{ ubtu18cis_rule_5_3_5 }} +ubuntu18cis_rule_5_3_6: {{ ubtu18cis_rule_5_3_6 }} +ubuntu18cis_rule_5_3_7: {{ ubtu18cis_rule_5_3_7 }} +ubuntu18cis_rule_5_3_8: {{ ubtu18cis_rule_5_3_8 }} +ubuntu18cis_rule_5_3_9: {{ ubtu18cis_rule_5_3_9 }} +ubuntu18cis_rule_5_3_10: {{ ubtu18cis_rule_5_3_10 }} +ubuntu18cis_rule_5_3_11: {{ ubtu18cis_rule_5_3_11 }} +ubuntu18cis_rule_5_3_12: {{ ubtu18cis_rule_5_3_12 }} +ubuntu18cis_rule_5_3_13: {{ ubtu18cis_rule_5_3_13 }} +ubuntu18cis_rule_5_3_14: {{ ubtu18cis_rule_5_3_14 }} +ubuntu18cis_rule_5_3_15: {{ ubtu18cis_rule_5_3_15 }} +ubuntu18cis_rule_5_3_16: {{ ubtu18cis_rule_5_3_16 }} +ubuntu18cis_rule_5_3_17: {{ ubtu18cis_rule_5_3_17 }} +ubuntu18cis_rule_5_3_18: {{ ubtu18cis_rule_5_3_18 }} +ubuntu18cis_rule_5_3_19: {{ ubtu18cis_rule_5_3_19 }} +ubuntu18cis_rule_5_3_20: {{ ubtu18cis_rule_5_3_20 }} +ubuntu18cis_rule_5_3_21: {{ ubtu18cis_rule_5_3_21 }} +ubuntu18cis_rule_5_3_22: {{ ubtu18cis_rule_5_3_22 }} +ubuntu18cis_rule_5_4_1: {{ ubtu18cis_rule_5_4_1 }} +ubuntu18cis_rule_5_4_2: {{ ubtu18cis_rule_5_4_2 }} +ubuntu18cis_rule_5_4_3: {{ ubtu18cis_rule_5_4_3 }} +ubuntu18cis_rule_5_4_4: {{ ubtu18cis_rule_5_4_4 }} +ubuntu18cis_rule_5_5_1_1: {{ ubtu18cis_rule_5_5_1_1 }} +ubuntu18cis_rule_5_5_1_2: {{ ubtu18cis_rule_5_5_1_2 }} +ubuntu18cis_rule_5_5_1_3: {{ ubtu18cis_rule_5_5_1_3 }} +ubuntu18cis_rule_5_5_1_4: {{ ubtu18cis_rule_5_5_1_4 }} +ubuntu18cis_rule_5_5_1_5: {{ ubtu18cis_rule_5_5_1_5 }} +ubuntu18cis_rule_5_5_2: {{ ubtu18cis_rule_5_5_2 }} +ubuntu18cis_rule_5_5_3: {{ ubtu18cis_rule_5_5_3 }} +ubuntu18cis_rule_5_5_4: {{ ubtu18cis_rule_5_5_4 }} +ubuntu18cis_rule_5_5_5: {{ ubtu18cis_rule_5_5_5 }} +ubuntu18cis_rule_5_6: {{ ubtu18cis_rule_5_6 }} +ubuntu18cis_rule_5_7: {{ ubtu18cis_rule_5_7 }} + +# Section 6 +ubuntu18cis_rule_6_1_1: {{ ubtu18cis_rule_6_1_1 }} +ubuntu18cis_rule_6_1_2: {{ ubtu18cis_rule_6_1_2 }} +ubuntu18cis_rule_6_1_3: {{ ubtu18cis_rule_6_1_3 }} +ubuntu18cis_rule_6_1_4: {{ ubtu18cis_rule_6_1_4 }} +ubuntu18cis_rule_6_1_5: {{ ubtu18cis_rule_6_1_5 }} +ubuntu18cis_rule_6_1_6: {{ ubtu18cis_rule_6_1_6 }} +ubuntu18cis_rule_6_1_7: {{ ubtu18cis_rule_6_1_7 }} +ubuntu18cis_rule_6_1_8: {{ ubtu18cis_rule_6_1_8 }} +ubuntu18cis_rule_6_1_9: {{ ubtu18cis_rule_6_1_9 }} +ubuntu18cis_rule_6_1_10: {{ ubtu18cis_rule_6_1_10 }} +ubuntu18cis_rule_6_1_11: {{ ubtu18cis_rule_6_1_11 }} +ubuntu18cis_rule_6_1_12: {{ ubtu18cis_rule_6_1_12 }} +ubuntu18cis_rule_6_1_13: {{ ubtu18cis_rule_6_1_13 }} +ubuntu18cis_rule_6_1_14: {{ ubtu18cis_rule_6_1_14 }} + +ubuntu18cis_rule_6_2_1: {{ ubtu18cis_rule_6_2_1 }} +ubuntu18cis_rule_6_2_2: {{ ubtu18cis_rule_6_2_2 }} +ubuntu18cis_rule_6_2_3: {{ ubtu18cis_rule_6_2_3 }} +ubuntu18cis_rule_6_2_4: {{ ubtu18cis_rule_6_2_4 }} +ubuntu18cis_rule_6_2_5: {{ ubtu18cis_rule_6_2_5 }} +ubuntu18cis_rule_6_2_6: {{ ubtu18cis_rule_6_2_6 }} +ubuntu18cis_rule_6_2_7: {{ ubtu18cis_rule_6_2_7 }} +ubuntu18cis_rule_6_2_8: {{ ubtu18cis_rule_6_2_8 }} +ubuntu18cis_rule_6_2_9: {{ ubtu18cis_rule_6_2_9 }} +ubuntu18cis_rule_6_2_10: {{ ubtu18cis_rule_6_2_10 }} +ubuntu18cis_rule_6_2_11: {{ ubtu18cis_rule_6_2_11 }} +ubuntu18cis_rule_6_2_12: {{ ubtu18cis_rule_6_2_12 }} +ubuntu18cis_rule_6_2_13: {{ ubtu18cis_rule_6_2_13 }} +ubuntu18cis_rule_6_2_14: {{ ubtu18cis_rule_6_2_14 }} +ubuntu18cis_rule_6_2_15: {{ ubtu18cis_rule_6_2_15 }} +ubuntu18cis_rule_6_2_16: {{ ubtu18cis_rule_6_2_16 }} +ubuntu18cis_rule_6_2_17: {{ ubtu18cis_rule_6_2_17 }} + +# AIDE +ubuntu18cis_config_aide: true + +# aide setup via - cron, timer +ubuntu18cis_aide_scan: cron + +# AIDE cron settings +ubuntu18_aide_cron: + cron_user: {{ ubtu18cis_aide_cron.cron_user }} + cron_file: {{ ubtu18cis_aide_cron.cron_file }} + aide_job: {{ ubtu18cis_aide_cron.aide_job }} + aide_minute: {{ ubtu18cis_aide_cron.aide_minute }} + aide_hour: {{ ubtu18cis_aide_cron.aide_hour }} + aide_day: '{{ ubtu18cis_aide_cron.aide_day }}' + aide_month: '{{ ubtu18cis_aide_cron.aide_month }}' + aide_weekday: '{{ ubtu18cis_aide_cron.aide_weekday }}' + +# 1.1 +ubuntu18cis_allow_autofs: {{ ubtu18cis_allow_autofs }} + +# 1.4 +ubuntu18cis_grub_conf_file: /boot/grub/grub.cfg +ubuntu18cis_grub_username: root +ubuntu18cis_grub_hash: blah +# 1.5.1 Bootloader password +ubuntu18cis_bootloader_password: {{ ubtu18cis_root_pw }} + +# 1.6 - Only have apparmor enforcing +ubuntu18cis_apparmor_enforce_only: false + +# Warning Banner Content (issue, issue.net, motd) +ubuntu18_warning_banner: {{ ubtu18cis_warning_banner }} +# End Banner + +# Section 2 +# Time sync - can be timesync or chriny or ntp +ubuntu18cis_time_service: {{ ubtu18cis_time_sync_tool }} +ubuntu18cis_ntp_servers: {{ ubtu18cis_time_synchronization_servers }} +ubuntu18cis_ntp_fallback: {{ ubtu18cis_ntp_fallback_server_list }} +ubuntu18cis_ntp_root_distance: + +# Whether or not to run tasks related to auditing/patching the desktop environment +ubuntu18cis_gui: {{ ubtu18cis_desktop_required }} + +# Service configuration booleans set true to keep service +ubuntu18cis_avahi_server: {{ ubtu18cis_avahi_server }} +ubuntu18cis_cups_server: {{ ubtu18cis_cups_server }} +ubuntu18cis_nfs_server: {{ ubtu18cis_nfs_server }} +ubuntu18cis_dhcp_server: {{ ubtu18cis_dhcp_server }} +ubuntu18cis_ldap_server: {{ ubtu18cis_ldap_server }} +ubuntu18cis_dns_server: {{ ubtu18cis_dns_server }} +ubuntu18cis_vsftpd_server: {{ ubtu18cis_vsftpd_server }} +ubuntu18cis_httpd_server: {{ ubtu18cis_httpd_server }} +ubuntu18cis_is_mail_server: false +ubuntu18cis_dovecot_server: {{ ubtu18cis_dovecot_server }} +ubuntu18cis_samba_server: {{ ubtu18cis_smb_server }} +ubuntu18cis_squid_server: {{ ubtu18cis_squid_server }} +ubuntu18cis_snmp_server: {{ ubtu18cis_snmp_server }} + +# Mail Server config +{% if ubtu18_cis_mail_transfer_agent is defined %} +ubuntu18cis_mailserver: {{ ubtu18_cis_mail_transfer_agent }} +{% else %} +ubuntu18cis_mailserver: Not_defined +{% endif %} +ubuntu18_exim_conf: + - dc_eximconfig_configtype='local' + - dc_local_interfaces='127.0.0.1 ; ::1' + - dc_readhost='' + - dc_relay_domains='' + - dc_minimaldns='false' + - dc_relay_nets='' + - dc_smarthost='' + - dc_use_split_config='false' + - dc_hide_mailname='' + - dc_mailname_in_oh='true' + - dc_localdelivery='mail_spool' + + +ubuntu18cis_rsyncd_server: {{ ubtu18cis_rsync_server }} +ubuntu18cis_nis_server: {{ ubtu18cis_nis_server }} + +ubuntu18cis_xwindows_required: false + +# 2.2 client services +ubuntu18cis_rsh_required: {{ ubtu18cis_rsh_required }} +ubuntu18cis_talk_required: {{ ubtu18cis_talk_required }} +ubuntu18cis_telnet_required: {{ ubtu18cis_telnet_required }} +ubuntu18cis_ldap_clients_required: {{ ubtu18cis_ldap_clients_required }} +ubuntu18cis_rpc_required: {{ ubtu18cis_rpc_server }} + + +# Section 3 +# IPv6 required +ubuntu18cis_ipv6_required: {{ ubtu18cis_ipv6_required }} + +# System network parameters (host only OR host and router) +ubuntu18cis_is_router: false + + +ubuntu18cis_firewall: {{ ubtu18cis_firewall_package }} + +ubuntu18_default_firewall_zone: public +ubuntu18_firewall_interface: + - ['ens224'] + - ['ens192'] +ubuntu18_firewall_services: + - ssh + - dhcpv6-client + +### Section 4 +## auditd settings +ubuntu18cis_auditd: + space_left_action: email + action_mail_acct: root + admin_space_left_action: {{ ubtu18cis_auditd.admin_space_left_action }} + max_log_file_action: {{ ubtu18cis_auditd.max_log_file_action }} + auditd_backlog_limit: {{ ubtu18cis_audit_back_log_limit }} + +## syslog +ubuntu18cis_is_syslog_server: {{ ubtu18cis_system_is_log_server }} +### Section 5 +ubuntu18cis_sshd_limited: false +# Note the following to understand precedence and layout +ubuntu18cis_sshd_access: + - AllowUser + - AllowGroup + - DenyUser + - DenyGroup + +ubuntu18cis_ssh_strong_ciphers: Ciphers chacha20-poly1305@openssh.com,aes256-gcm@openssh.com,aes128-gcm@openssh.com,aes256-ctr,aes192-ctr,aes128-ctr +ubuntu18cis_ssh_weak_ciphers: + - 3des-cbc + - aes128-cbc + - aes192-cbc + - aes256-cbc + - arcfour + - arcfour128 + - arcfour256 + - blowfish-cbc + - cast128-cbc + - rijndael-cbc@lysator.liu.se + +ubuntu18cis_ssh_strong_macs: MACs hmac-sha2-512-etm@openssh.com,hmac-sha2-256-etm@openssh.com,hmac-sha2-512,hmac-sha2-256 +ubuntu18cis_ssh_weak_macs: + - hmac-md5 + - hmac-md5-96 + - hmac-ripemd160 + - hmac-sha1 + - hmac-sha1-96 + - umac-64@openssh.com + - umac-128@openssh.com + - hmac-md5-etm@openssh.com + - hmac-md5-96-etm@openssh.com + - hmac-ripemd160-etm@openssh.com + - hmac-sha1-etm@openssh.com + - hmac-sha1-96-etm@openssh.com + - umac-64-etm@openssh.com + - umac-128-etm@openssh.com + +ubuntu18cis_ssh_strong_kex: KexAlgorithms curve25519-sha256,curve25519-sha256@libssh.org,diffie-hellman-group14-sha256,diffie-hellman-group16-sha512,diffie-hellman-group18-sha512,ecdh-sha2-nistp521,ecdh-sha2-nistp384,ecdh-sha2-nistp256,diffie-hellman-group-exchange-sha256 +ubuntu18cis_ssh_weak_kex: + - diffie-hellman-group1-sha1 + - diffie-hellman-group14-sha1 + - diffie-hellman-group-exchange-sha1 + + +ubuntu18cis_ssh_aliveinterval: 300 +ubuntu18cis_ssh_countmax: 3 +## PAM +ubuntu18cis_pam_password: + minlen: "14" + minclass: "4" + +ubuntu18cis_pam_passwd_retry: "3" + +# choose one of below +ubuntu18cis_pwhistory_so: "14" +ubuntu18cis_unix_so: false +ubuntu18cis_passwd_remember: {{ ubtu18cis_pamd_pwhistory_remember }} + +# logins.def password settings +ubuntu18cis_pass: + max_days: {{ ubtu18cis_pass.max_days }} + min_days: {{ ubtu18cis_pass.min_days }} + warn_age: {{ ubtu18cis_pass.warn_age }} + +# set sugroup if differs from wheel +ubuntu18cis_sugroup: {{ ubtu18cis_su_group }} + +# sugroup users list +ubuntu18_sugroup_users: "root" + +# var log location variable +ubuntu18_varlog_location: {{ ubtu18cis_sudo_logfile }} diff --git a/templates/audit/chrony.conf.j2 b/templates/audit/chrony.conf.j2 index 8d78e5d..51d7254 100644 --- a/templates/audit/chrony.conf.j2 +++ b/templates/audit/chrony.conf.j2 @@ -1,3 +1,3 @@ -w /var/run/utmp -p wa -k session -w /var/log/wtmp -p wa -k logins --w /var/log/btmp -p wa -k logins \ No newline at end of file +-w /var/log/btmp -p wa -k logins diff --git a/templates/audit/ubtu18cis_4_1_10_access.rules.j2 b/templates/audit/ubtu18cis_4_1_10_access.rules.j2 index 381fced..17635e1 100644 --- a/templates/audit/ubtu18cis_4_1_10_access.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_10_access.rules.j2 @@ -3,4 +3,4 @@ {% if ansible_architecture == 'x86_64' -%} -a always,exit -F arch=b64 -S creat -S open -S openat -S truncate -S ftruncate -F exit=-EACCES -F auid>=1000 -F auid!=4294967295 -k access -a always,exit -F arch=b64 -S creat -S open -S openat -S truncate -S ftruncate -F exit=-EPERM -F auid>=1000 -F auid!=4294967295 -k access -{% endif %} \ No newline at end of file +{% endif %} diff --git a/templates/audit/ubtu18cis_4_1_11_privileged.rules.j2 b/templates/audit/ubtu18cis_4_1_11_privileged.rules.j2 index 4839963..47de826 100644 --- a/templates/audit/ubtu18cis_4_1_11_privileged.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_11_privileged.rules.j2 @@ -1,3 +1,3 @@ -{% for proc in priv_procs.stdout_lines -%} +{% for proc in priv_procs.stdout_lines -%} -a always,exit -F path={{ proc }} -F perm=x -F auid>=1000 -F auid!=4294967295 -k privileged -{% endfor %} \ No newline at end of file +{% endfor %} diff --git a/templates/audit/ubtu18cis_4_1_12_audit.rules.j2 b/templates/audit/ubtu18cis_4_1_12_audit.rules.j2 index 04403cd..fa95efb 100644 --- a/templates/audit/ubtu18cis_4_1_12_audit.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_12_audit.rules.j2 @@ -1,4 +1,4 @@ -a always,exit -F arch=b32 -S mount -F auid>=1000 -F auid!=4294967295 -k mounts {% if ansible_architecture == 'x86_64' -%} -a always,exit -F arch=b64 -S mount -F auid>=1000 -F auid!=4294967295 -k mounts -{% endif %} \ No newline at end of file +{% endif %} diff --git a/templates/audit/ubtu18cis_4_1_13_delete.rules.j2 b/templates/audit/ubtu18cis_4_1_13_delete.rules.j2 index 2f7a4c3..7a97b22 100644 --- a/templates/audit/ubtu18cis_4_1_13_delete.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_13_delete.rules.j2 @@ -1,4 +1,4 @@ -a always,exit -F arch=b32 -S unlink -S unlinkat -S rename -S renameat -F auid>=1000 -F auid!=4294967295 -k delete {% if ansible_architecture == 'x86_64' -%} -a always,exit -F arch=b64 -S unlink -S unlinkat -S rename -S renameat -F auid>=1000 -F auid!=4294967295 -k delete -{% endif %} \ No newline at end of file +{% endif %} diff --git a/templates/audit/ubtu18cis_4_1_14_scope.rules.j2 b/templates/audit/ubtu18cis_4_1_14_scope.rules.j2 index fbe6c0f..0ae21fd 100644 --- a/templates/audit/ubtu18cis_4_1_14_scope.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_14_scope.rules.j2 @@ -1,2 +1,2 @@ -w /etc/sudoers -p wa -k scope --w /etc/sudoers.d/ -p wa -k scope \ No newline at end of file +-w /etc/sudoers.d/ -p wa -k scope diff --git a/templates/audit/ubtu18cis_4_1_15_actions.rules.j2 b/templates/audit/ubtu18cis_4_1_15_actions.rules.j2 index c53a628..ef134a9 100644 --- a/templates/audit/ubtu18cis_4_1_15_actions.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_15_actions.rules.j2 @@ -1 +1,4 @@ --w /var/log/sudo.log -p wa -k actions \ No newline at end of file +-a always,exit -F arch=b32 -C euid!=uid -F euid=0 -F auid>=1000 -F auid!=4294967295 -S execve -k actions +{% if ansible_architecture == 'x86_64' -%} +-a always,exit -F arch=b64 -C euid!=uid -F euid=0 -F auid>=1000 -F auid!=4294967295 -S execve -k actions +{% endif %} diff --git a/templates/audit/ubtu18cis_4_1_16_modules.rules.j2 b/templates/audit/ubtu18cis_4_1_16_modules.rules.j2 index e95823b..bc1813b 100644 --- a/templates/audit/ubtu18cis_4_1_16_modules.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_16_modules.rules.j2 @@ -6,4 +6,4 @@ {% endif %} {% if ansible_architecture == 'x86_64' -%} -a always,exit -F arch=b64 -S init_module -S delete_module -k modules -{% endif %} \ No newline at end of file +{% endif %} diff --git a/templates/audit/ubtu18cis_4_1_17_99finalize.rules.j2 b/templates/audit/ubtu18cis_4_1_17_99finalize.rules.j2 index 381a9ac..bc95eba 100644 --- a/templates/audit/ubtu18cis_4_1_17_99finalize.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_17_99finalize.rules.j2 @@ -1 +1 @@ --e 2 \ No newline at end of file +-e 2 diff --git a/templates/audit/ubtu18cis_4_1_3_timechange.rules.j2 b/templates/audit/ubtu18cis_4_1_3_timechange.rules.j2 index 439bad2..fd08cfe 100644 --- a/templates/audit/ubtu18cis_4_1_3_timechange.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_3_timechange.rules.j2 @@ -4,4 +4,4 @@ {% if ansible_architecture == 'x86_64' -%} -a always,exit -F arch=b64 -S adjtimex -S settimeofday -k time-change -a always,exit -F arch=b64 -S clock_settime -k time-change -{% endif %} \ No newline at end of file +{% endif %} diff --git a/templates/audit/ubtu18cis_4_1_4_identity.rules.j2 b/templates/audit/ubtu18cis_4_1_4_identity.rules.j2 index f16cd78..358f999 100644 --- a/templates/audit/ubtu18cis_4_1_4_identity.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_4_identity.rules.j2 @@ -2,4 +2,4 @@ -w /etc/passwd -p wa -k identity -w /etc/gshadow -p wa -k identity -w /etc/shadow -p wa -k identity --w /etc/security/opasswd -p wa -k identity \ No newline at end of file +-w /etc/security/opasswd -p wa -k identity diff --git a/templates/audit/ubtu18cis_4_1_5_systemlocale.rules.j2 b/templates/audit/ubtu18cis_4_1_5_systemlocale.rules.j2 index 57fac30..73c912d 100644 --- a/templates/audit/ubtu18cis_4_1_5_systemlocale.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_5_systemlocale.rules.j2 @@ -1,5 +1,8 @@ -a always,exit -F arch=b32 -S sethostname -S setdomainname -k system-locale +{% if ansible_architecture == 'x86_64' -%} +-a always,exit -F arch=b64 -S sethostname -S setdomainname -k system-locale +{% endif %} -w /etc/issue -p wa -k system-locale -w /etc/issue.net -p wa -k system-locale -w /etc/hosts -p wa -k system-locale --w /etc/network -p wa -k system-locale \ No newline at end of file +-w /etc/network -p wa -k system-locale diff --git a/templates/audit/ubtu18cis_4_1_6_macpolicy.rules.j2 b/templates/audit/ubtu18cis_4_1_6_macpolicy.rules.j2 index 61ab493..10354ae 100644 --- a/templates/audit/ubtu18cis_4_1_6_macpolicy.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_6_macpolicy.rules.j2 @@ -1,2 +1,2 @@ -w /etc/apparmor/ -p wa -k MAC-policy --w /etc/apparmor.d/ -p wa -k MAC-policy \ No newline at end of file +-w /etc/apparmor.d/ -p wa -k MAC-policy diff --git a/templates/audit/ubtu18cis_4_1_7_logins.rules.j2 b/templates/audit/ubtu18cis_4_1_7_logins.rules.j2 index 10deed3..b38f823 100644 --- a/templates/audit/ubtu18cis_4_1_7_logins.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_7_logins.rules.j2 @@ -1,3 +1,3 @@ -w /var/log/faillog -p wa -k logins -w /var/log/lastlog -p wa -k logins --w /var/log/tallylog -p wa -k logins \ No newline at end of file +-w /var/log/tallylog -p wa -k logins diff --git a/templates/audit/ubtu18cis_4_1_8_session.rules.j2 b/templates/audit/ubtu18cis_4_1_8_session.rules.j2 index 8d78e5d..51d7254 100644 --- a/templates/audit/ubtu18cis_4_1_8_session.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_8_session.rules.j2 @@ -1,3 +1,3 @@ -w /var/run/utmp -p wa -k session -w /var/log/wtmp -p wa -k logins --w /var/log/btmp -p wa -k logins \ No newline at end of file +-w /var/log/btmp -p wa -k logins diff --git a/templates/audit/ubtu18cis_4_1_9_permmod.rules.j2 b/templates/audit/ubtu18cis_4_1_9_permmod.rules.j2 index 5b45a51..19d9884 100644 --- a/templates/audit/ubtu18cis_4_1_9_permmod.rules.j2 +++ b/templates/audit/ubtu18cis_4_1_9_permmod.rules.j2 @@ -5,4 +5,4 @@ -a always,exit -F arch=b64 -S chmod -S fchmod -S fchmodat -F auid>=1000 -F auid!=4294967295 -k perm_mod -a always,exit -F arch=b64 -S chown -S fchown -S fchownat -S lchown -F auid>=1000 -F auid!=4294967295 -k perm_mod -a always,exit -F arch=b64 -S setxattr -S lsetxattr -S fsetxattr -S removexattr -S lremovexattr -S fremovexattr -F auid>=1000 -F auid!=4294967295 -k perm_mod -{% endif %} \ No newline at end of file +{% endif %} diff --git a/templates/ntp.conf.j2 b/templates/ntp.conf.j2 index fd5eafe..2ec66b1 100644 --- a/templates/ntp.conf.j2 +++ b/templates/ntp.conf.j2 @@ -65,4 +65,4 @@ restrict source notrap nomodify noquery #fudge 127.127.8.1 time1 0.0042 # relative to PPS for my hardware #server 127.127.22.1 # ATOM(PPS) -#fudge 127.127.22.1 flag3 1 # enable PPS API \ No newline at end of file +#fudge 127.127.22.1 flag3 1 # enable PPS API diff --git a/templates/ubtu18cis_4_1_3_timechange64.rules.j2 b/templates/ubtu18cis_4_1_3_timechange64.rules.j2 index bd8666d..7f79962 100644 --- a/templates/ubtu18cis_4_1_3_timechange64.rules.j2 +++ b/templates/ubtu18cis_4_1_3_timechange64.rules.j2 @@ -2,4 +2,4 @@ -a always,exit -F arch=b32 -S adjtimex -S settimeofday -S stime -k time-change -a always,exit -F arch=b64 -S clock_settime -k time-change -a always,exit -F arch=b32 -S clock_settime -k time-change --w /etc/localtime -p wa -k time-change \ No newline at end of file +-w /etc/localtime -p wa -k time-change diff --git a/tests/inventory b/tests/inventory deleted file mode 100644 index 878877b..0000000 --- a/tests/inventory +++ /dev/null @@ -1,2 +0,0 @@ -localhost - diff --git a/tests/test.yml b/tests/test.yml deleted file mode 100644 index 73c38bc..0000000 --- a/tests/test.yml +++ /dev/null @@ -1,5 +0,0 @@ ---- -- hosts: localhost - remote_user: root - roles: - - UBUNTU18-CIS \ No newline at end of file diff --git a/vars/main.yml b/vars/main.yml index 0e6073b..e8ece67 100644 --- a/vars/main.yml +++ b/vars/main.yml @@ -1,2 +1,8 @@ --- -# vars file for UBUNTU18-CIS \ No newline at end of file +# vars file for UBUNTU18-CIS + +min_ansible_version: 2.10.1 + +# Used to control warning summary +warn_control_list: "" +warn_count: 0