diff --git a/.fixtures.yml b/.fixtures.yml
index 07998564..3ce148c8 100644
--- a/.fixtures.yml
+++ b/.fixtures.yml
@@ -5,5 +5,3 @@ fixtures:
stdlib: 'https://github.com/puppetlabs/puppetlabs-stdlib.git'
zookeeper: 'https://github.com/deric/puppet-zookeeper.git'
systemd: 'https://github.com/voxpupuli/puppet-systemd.git'
- symlinks:
- kafka: "#{source_dir}"
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index 80110135..6aaa603f 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -25,7 +25,7 @@ By participating in this project you agree to abide by its terms.
* Fork the repo.
* Create a separate branch for your change.
-* We only take pull requests with passing tests, and documentation. GitHub Actions run the tests for us (check the .github/workflows/ directory). You can also execute them locally. This is explained [in a later section](#the-test-matrix).
+* We only take pull requests with passing tests, and documentation. [GitHub Actions](https://docs.github.com/en/actions) run the tests for us. You can also execute them locally. This is explained [in a later section](#the-test-matrix).
* Checkout [our docs](https://voxpupuli.org/docs/reviewing_pr/) we use to review a module and the [official styleguide](https://puppet.com/docs/puppet/6.0/style_guide.html). They provide some guidance for new code that might help you before you submit a pull request.
* Add a test for your change. Only refactoring and documentation changes require no new tests. If you are adding functionality or fixing a bug, please add a test.
* Squash your commits down into logical components. Make sure to rebase against our current master.
@@ -124,7 +124,7 @@ If you have Ruby 2.x or want a specific version of Puppet,
you must set an environment variable such as:
```sh
-export PUPPET_VERSION="~> 5.5.6"
+export PUPPET_GEM_VERSION="~> 6.1.0"
```
You can install all needed gems for spec tests into the modules directory by
@@ -232,19 +232,21 @@ simple tests against it after applying the module. You can run this
with:
```sh
-BEAKER_setfile=debian10-x64 bundle exec rake beaker
+BEAKER_PUPPET_COLLECTION=puppet7 BEAKER_setfile=debian11-64 bundle exec rake beaker
```
-You can replace the string `debian10` with any common operating system.
+You can replace the string `debian11` with any common operating system.
The following strings are known to work:
-* ubuntu1604
-* ubuntu1804
* ubuntu2004
-* debian9
-* debian10
+* ubuntu2204
+* debian11
* centos7
* centos8
+* centos9
+* almalinux8
+* almalinux9
+* fedora36
For more information and tips & tricks, see [voxpupuli-acceptance's documentation](https://github.com/voxpupuli/voxpupuli-acceptance#running-tests).
diff --git a/.github/SECURITY.md b/.github/SECURITY.md
deleted file mode 100644
index cacadf22..00000000
--- a/.github/SECURITY.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# Vox Pupuli Security Policy
-
-Our vulnerabilities reporting process is at https://voxpupuli.org/security/
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 150e6ace..7216724f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -4,76 +4,20 @@
name: CI
-on: pull_request
+on:
+ pull_request: {}
+ push:
+ branches:
+ - main
+ - master
-jobs:
- setup_matrix:
- name: 'Setup Test Matrix'
- runs-on: ubuntu-latest
- timeout-minutes: 40
- outputs:
- beaker_setfiles: ${{ steps.get-outputs.outputs.beaker_setfiles }}
- puppet_major_versions: ${{ steps.get-outputs.outputs.puppet_major_versions }}
- puppet_unit_test_matrix: ${{ steps.get-outputs.outputs.puppet_unit_test_matrix }}
- env:
- BUNDLE_WITHOUT: development:system_tests:release
- steps:
- - uses: actions/checkout@v2
- - name: Setup ruby
- uses: ruby/setup-ruby@v1
- with:
- ruby-version: '3.0'
- bundler-cache: true
- - name: Run static validations
- run: bundle exec rake validate lint check
- - name: Run rake rubocop
- run: bundle exec rake rubocop
- - name: Setup Test Matrix
- id: get-outputs
- run: bundle exec metadata2gha --use-fqdn --pidfile-workaround false
-
- unit:
- needs: setup_matrix
- runs-on: ubuntu-latest
- timeout-minutes: 40
- strategy:
- fail-fast: false
- matrix:
- include: ${{fromJson(needs.setup_matrix.outputs.puppet_unit_test_matrix)}}
- env:
- BUNDLE_WITHOUT: development:system_tests:release
- PUPPET_VERSION: "~> ${{ matrix.puppet }}.0"
- name: Puppet ${{ matrix.puppet }} (Ruby ${{ matrix.ruby }})
- steps:
- - uses: actions/checkout@v2
- - name: Setup ruby
- uses: ruby/setup-ruby@v1
- with:
- ruby-version: ${{ matrix.ruby }}
- bundler-cache: true
- - name: Run tests
- run: bundle exec rake parallel_spec
+concurrency:
+ group: ${{ github.ref_name }}
+ cancel-in-progress: true
- acceptance:
- needs: setup_matrix
- runs-on: ubuntu-latest
- env:
- BUNDLE_WITHOUT: development:test:release
- strategy:
- fail-fast: false
- matrix:
- setfile: ${{fromJson(needs.setup_matrix.outputs.beaker_setfiles)}}
- puppet: ${{fromJson(needs.setup_matrix.outputs.puppet_major_versions)}}
- name: ${{ matrix.puppet.name }} - ${{ matrix.setfile.name }}
- steps:
- - uses: actions/checkout@v2
- - name: Setup ruby
- uses: ruby/setup-ruby@v1
- with:
- ruby-version: '3.0'
- bundler-cache: true
- - name: Run tests
- run: bundle exec rake beaker
- env:
- BEAKER_PUPPET_COLLECTION: ${{ matrix.puppet.collection }}
- BEAKER_setfile: ${{ matrix.setfile.value }}
+jobs:
+ puppet:
+ name: Puppet
+ uses: voxpupuli/gha-puppet/.github/workflows/beaker.yml@v2
+ with:
+ pidfile_workaround: 'false'
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 664ba694..55324aa6 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -9,26 +9,14 @@ on:
tags:
- '*'
-env:
- BUNDLE_WITHOUT: development:test:system_tests
-
jobs:
- deploy:
- name: 'deploy to forge'
- runs-on: ubuntu-latest
- if: github.repository_owner == 'voxpupuli'
- steps:
- - name: Checkout repository
- uses: actions/checkout@v2
- - name: Setup Ruby
- uses: ruby/setup-ruby@v1
- with:
- ruby-version: '2.7'
- bundler-cache: true
- - name: Build and Deploy
- env:
- # Configure secrets here:
- # https://docs.github.com/en/free-pro-team@latest/actions/reference/encrypted-secrets
- BLACKSMITH_FORGE_USERNAME: '${{ secrets.PUPPET_FORGE_USERNAME }}'
- BLACKSMITH_FORGE_API_KEY: '${{ secrets.PUPPET_FORGE_API_KEY }}'
- run: bundle exec rake module:push
+ release:
+ name: Release
+ uses: voxpupuli/gha-puppet/.github/workflows/release.yml@v2
+ with:
+ allowed_owner: 'voxpupuli'
+ secrets:
+ # Configure secrets here:
+ # https://docs.github.com/en/actions/security-guides/encrypted-secrets
+ username: ${{ secrets.PUPPET_FORGE_USERNAME }}
+ api_key: ${{ secrets.PUPPET_FORGE_API_KEY }}
diff --git a/.gitignore b/.gitignore
index 9b95224c..84fd904c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,23 +1,23 @@
# Managed by modulesync - DO NOT EDIT
# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/
-pkg/
-Gemfile.lock
-Gemfile.local
-vendor/
-.vendor/
-spec/fixtures/manifests/
-spec/fixtures/modules/
-.vagrant/
-.bundle/
-.ruby-version
-coverage/
-log/
-.idea/
-.dependencies/
-.librarian/
-Puppetfile.lock
+/pkg/
+/Gemfile.lock
+/Gemfile.local
+/vendor/
+/.vendor/
+/spec/fixtures/manifests/
+/spec/fixtures/modules/
+/.vagrant/
+/.bundle/
+/.ruby-version
+/coverage/
+/log/
+/.idea/
+/.dependencies/
+/.librarian/
+/Puppetfile.lock
*.iml
.*.sw?
-.yardoc/
-Guardfile
+/.yardoc/
+/Guardfile
diff --git a/.msync.yml b/.msync.yml
index 9c9f18f9..f46ee025 100644
--- a/.msync.yml
+++ b/.msync.yml
@@ -2,4 +2,4 @@
# Managed by modulesync - DO NOT EDIT
# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/
-modulesync_config_version: '4.1.0'
+modulesync_config_version: '7.3.0'
diff --git a/.pmtignore b/.pmtignore
index 65f50514..10b98306 100644
--- a/.pmtignore
+++ b/.pmtignore
@@ -1,37 +1,38 @@
# Managed by modulesync - DO NOT EDIT
# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/
-docs/
-pkg/
-Gemfile
-Gemfile.lock
-Gemfile.local
-vendor/
-.vendor/
-spec/
-Rakefile
-.vagrant/
-.bundle/
-.ruby-version
-coverage/
-log/
-.idea/
-.dependencies/
-.github/
-.librarian/
-Puppetfile.lock
+/docs/
+/pkg/
+/Gemfile
+/Gemfile.lock
+/Gemfile.local
+/vendor/
+/.vendor/
+/spec/
+/Rakefile
+/.vagrant/
+/.bundle/
+/.ruby-version
+/coverage/
+/log/
+/.idea/
+/.dependencies/
+/.github/
+/.librarian/
+/Puppetfile.lock
*.iml
-.editorconfig
-.fixtures.yml
-.gitignore
-.msync.yml
-.overcommit.yml
-.pmtignore
-.rspec
-.rspec_parallel
-.rubocop.yml
-.sync.yml
+/.editorconfig
+/.fixtures.yml
+/.gitignore
+/.msync.yml
+/.overcommit.yml
+/.pmtignore
+/.rspec
+/.rspec_parallel
+/.rubocop.yml
+/.sync.yml
.*.sw?
-.yardoc/
-.yardopts
-Dockerfile
+/.yardoc/
+/.yardopts
+/Dockerfile
+/HISTORY.md
diff --git a/.puppet-lint.rc b/.puppet-lint.rc
new file mode 100644
index 00000000..dd8272c7
--- /dev/null
+++ b/.puppet-lint.rc
@@ -0,0 +1,3 @@
+--fail-on-warnings
+--no-parameter_documentation-check
+--no-parameter_types-check
diff --git a/Dockerfile b/Dockerfile
index e3cf307f..8dd82d63 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -8,7 +8,7 @@ WORKDIR /opt/puppet
# https://github.com/puppetlabs/puppet/blob/06ad255754a38f22fb3a22c7c4f1e2ce453d01cb/lib/puppet/provider/service/runit.rb#L39
RUN mkdir -p /etc/sv
-ARG PUPPET_VERSION="~> 6.0"
+ARG PUPPET_GEM_VERSION="~> 6.0"
ARG PARALLEL_TEST_PROCESSORS=4
# Cache gems
diff --git a/Gemfile b/Gemfile
index 1a095270..a4a3b204 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,13 +1,13 @@
# Managed by modulesync - DO NOT EDIT
# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/
-source ENV['GEM_SOURCE'] || "https://rubygems.org"
+source ENV['GEM_SOURCE'] || 'https://rubygems.org'
group :test do
- gem 'voxpupuli-test', '~> 2.1', :require => false
- gem 'coveralls', :require => false
- gem 'simplecov-console', :require => false
- gem 'puppet_metadata', '~> 0.3.0', :require => false
+ gem 'voxpupuli-test', '~> 7.0', :require => false
+ gem 'coveralls', :require => false
+ gem 'simplecov-console', :require => false
+ gem 'puppet_metadata', '~> 3.5', :require => false
end
group :development do
@@ -16,19 +16,17 @@ group :development do
end
group :system_tests do
- gem 'voxpupuli-acceptance', '~> 1.0', :require => false
+ gem 'voxpupuli-acceptance', '~> 3.0', :require => false
end
group :release do
- gem 'github_changelog_generator', '>= 1.16.1', :require => false
- gem 'voxpupuli-release', '>= 1.0.2', :require => false
- gem 'puppet-strings', '>= 2.2', :require => false
+ gem 'voxpupuli-release', '~> 3.0', :require => false
end
gem 'rake', :require => false
gem 'facter', ENV['FACTER_GEM_VERSION'], :require => false, :groups => [:test]
-puppetversion = ENV['PUPPET_VERSION'] || '>= 6.0'
+puppetversion = ENV['PUPPET_GEM_VERSION'] || '~> 7.24'
gem 'puppet', puppetversion, :require => false, :groups => [:test]
# vim: syntax=ruby
diff --git a/REFERENCE.md b/REFERENCE.md
index 26cc58a1..68c70b25 100644
--- a/REFERENCE.md
+++ b/REFERENCE.md
@@ -9,13 +9,13 @@
#### Public Classes
* [`kafka`](#kafka): This class handles the Kafka requirements.
-* [`kafka::broker`](#kafkabroker): This class handles the Kafka (broker).
-* [`kafka::consumer`](#kafkaconsumer): This class handles the Kafka (consumer).
-* [`kafka::consumer::config`](#kafkaconsumerconfig): This class handles the Kafka (consumer) config.
-* [`kafka::mirror`](#kafkamirror): This class handles the Kafka (mirror).
-* [`kafka::params`](#kafkaparams): This class provides default parameters.
-* [`kafka::producer`](#kafkaproducer): This class handles the Kafka (producer).
-* [`kafka::producer::config`](#kafkaproducerconfig): This class handles the Kafka (producer) config.
+* [`kafka::broker`](#kafka--broker): This class handles the Kafka (broker).
+* [`kafka::consumer`](#kafka--consumer): This class handles the Kafka (consumer).
+* [`kafka::consumer::config`](#kafka--consumer--config): This class handles the Kafka (consumer) config.
+* [`kafka::mirror`](#kafka--mirror): This class handles the Kafka (mirror).
+* [`kafka::params`](#kafka--params): This class provides default parameters.
+* [`kafka::producer`](#kafka--producer): This class handles the Kafka (producer).
+* [`kafka::producer::config`](#kafka--producer--config): This class handles the Kafka (producer) config.
#### Private Classes
@@ -32,7 +32,7 @@
### Defined types
-* [`kafka::topic`](#kafkatopic): This defined type handles the creation of Kafka topics.
+* [`kafka::topic`](#kafka--topic): This defined type handles the creation of Kafka topics.
## Classes
@@ -52,33 +52,33 @@ class { 'kafka': }
The following parameters are available in the `kafka` class:
-* [`kafka_version`](#kafka_version)
-* [`scala_version`](#scala_version)
-* [`install_dir`](#install_dir)
-* [`mirror_url`](#mirror_url)
-* [`manage_java`](#manage_java)
-* [`package_dir`](#package_dir)
-* [`package_name`](#package_name)
-* [`mirror_subpath`](#mirror_subpath)
-* [`proxy_server`](#proxy_server)
-* [`proxy_port`](#proxy_port)
-* [`proxy_host`](#proxy_host)
-* [`proxy_type`](#proxy_type)
-* [`package_ensure`](#package_ensure)
-* [`user_name`](#user_name)
-* [`user_shell`](#user_shell)
-* [`group_name`](#group_name)
-* [`system_user`](#system_user)
-* [`system_group`](#system_group)
-* [`user_id`](#user_id)
-* [`group_id`](#group_id)
-* [`manage_user`](#manage_user)
-* [`manage_group`](#manage_group)
-* [`config_dir`](#config_dir)
-* [`log_dir`](#log_dir)
-* [`install_mode`](#install_mode)
-
-##### `kafka_version`
+* [`kafka_version`](#-kafka--kafka_version)
+* [`scala_version`](#-kafka--scala_version)
+* [`install_dir`](#-kafka--install_dir)
+* [`mirror_url`](#-kafka--mirror_url)
+* [`manage_java`](#-kafka--manage_java)
+* [`package_dir`](#-kafka--package_dir)
+* [`package_name`](#-kafka--package_name)
+* [`mirror_subpath`](#-kafka--mirror_subpath)
+* [`proxy_server`](#-kafka--proxy_server)
+* [`proxy_port`](#-kafka--proxy_port)
+* [`proxy_host`](#-kafka--proxy_host)
+* [`proxy_type`](#-kafka--proxy_type)
+* [`package_ensure`](#-kafka--package_ensure)
+* [`user_name`](#-kafka--user_name)
+* [`user_shell`](#-kafka--user_shell)
+* [`group_name`](#-kafka--group_name)
+* [`system_user`](#-kafka--system_user)
+* [`system_group`](#-kafka--system_group)
+* [`user_id`](#-kafka--user_id)
+* [`group_id`](#-kafka--group_id)
+* [`manage_user`](#-kafka--manage_user)
+* [`manage_group`](#-kafka--manage_group)
+* [`config_dir`](#-kafka--config_dir)
+* [`log_dir`](#-kafka--log_dir)
+* [`install_mode`](#-kafka--install_mode)
+
+##### `kafka_version`
Data type: `String[1]`
@@ -86,7 +86,7 @@ The version of Kafka that should be installed.
Default value: `$kafka::params::kafka_version`
-##### `scala_version`
+##### `scala_version`
Data type: `String[1]`
@@ -94,7 +94,7 @@ The scala version what Kafka was built with.
Default value: `$kafka::params::scala_version`
-##### `install_dir`
+##### `install_dir`
Data type: `Stdlib::Absolutepath`
@@ -102,7 +102,7 @@ The directory to install Kafka to.
Default value: `$kafka::params::install_dir`
-##### `mirror_url`
+##### `mirror_url`
Data type: `Stdlib::HTTPUrl`
@@ -110,7 +110,7 @@ The url where the Kafka is downloaded from.
Default value: `$kafka::params::mirror_url`
-##### `manage_java`
+##### `manage_java`
Data type: `Boolean`
@@ -118,7 +118,7 @@ Install java if it's not already installed.
Default value: `$kafka::params::manage_java`
-##### `package_dir`
+##### `package_dir`
Data type: `Stdlib::Absolutepath`
@@ -126,7 +126,7 @@ The directory to install Kafka.
Default value: `$kafka::params::package_dir`
-##### `package_name`
+##### `package_name`
Data type: `Optional[String[1]]`
@@ -134,7 +134,7 @@ Package name, when installing Kafka from a package.
Default value: `$kafka::params::package_name`
-##### `mirror_subpath`
+##### `mirror_subpath`
Data type: `Optional[String[1]]`
@@ -142,7 +142,7 @@ The sub directory where the source is downloaded from.
Default value: `$kafka::params::mirror_subpath`
-##### `proxy_server`
+##### `proxy_server`
Data type: `Optional[String[1]]`
@@ -150,7 +150,7 @@ Set proxy server, when installing Kafka from source.
Default value: `$kafka::params::proxy_server`
-##### `proxy_port`
+##### `proxy_port`
Data type: `Optional[String[1]]`
@@ -158,7 +158,7 @@ Set proxy port, when installing Kafka from source.
Default value: `$kafka::params::proxy_port`
-##### `proxy_host`
+##### `proxy_host`
Data type: `Optional[String[1]]`
@@ -166,7 +166,7 @@ Set proxy host, when installing Kafka from source.
Default value: `$kafka::params::proxy_host`
-##### `proxy_type`
+##### `proxy_type`
Data type: `Optional[String[1]]`
@@ -174,7 +174,7 @@ Set proxy type, when installing Kafka from source.
Default value: `$kafka::params::proxy_type`
-##### `package_ensure`
+##### `package_ensure`
Data type: `String[1]`
@@ -182,7 +182,7 @@ Package version or ensure state, when installing Kafka from a package.
Default value: `$kafka::params::package_ensure`
-##### `user_name`
+##### `user_name`
Data type: `String[1]`
@@ -190,7 +190,7 @@ User to run Kafka as.
Default value: `$kafka::params::user_name`
-##### `user_shell`
+##### `user_shell`
Data type: `Stdlib::Absolutepath`
@@ -198,7 +198,7 @@ Login shell of the Kafka user.
Default value: `$kafka::params::user_shell`
-##### `group_name`
+##### `group_name`
Data type: `String[1]`
@@ -206,7 +206,7 @@ Group to run Kafka as.
Default value: `$kafka::params::group_name`
-##### `system_user`
+##### `system_user`
Data type: `Boolean`
@@ -214,7 +214,7 @@ Whether the Kafka user is a system user or not.
Default value: `$kafka::params::system_user`
-##### `system_group`
+##### `system_group`
Data type: `Boolean`
@@ -222,7 +222,7 @@ Whether the Kafka group is a system group or not.
Default value: `$kafka::params::system_group`
-##### `user_id`
+##### `user_id`
Data type: `Optional[Integer]`
@@ -230,7 +230,7 @@ Create the Kafka user with this ID.
Default value: `$kafka::params::user_id`
-##### `group_id`
+##### `group_id`
Data type: `Optional[Integer]`
@@ -238,7 +238,7 @@ Create the Kafka group with this ID.
Default value: `$kafka::params::group_id`
-##### `manage_user`
+##### `manage_user`
Data type: `Boolean`
@@ -246,7 +246,7 @@ Create the Kafka user if it's not already present.
Default value: `$kafka::params::manage_user`
-##### `manage_group`
+##### `manage_group`
Data type: `Boolean`
@@ -254,7 +254,7 @@ Create the Kafka group if it's not already present.
Default value: `$kafka::params::manage_group`
-##### `config_dir`
+##### `config_dir`
Data type: `Stdlib::Absolutepath`
@@ -262,7 +262,7 @@ The directory to create the Kafka config files to.
Default value: `$kafka::params::config_dir`
-##### `log_dir`
+##### `log_dir`
Data type: `Stdlib::Absolutepath`
@@ -270,7 +270,7 @@ The directory for Kafka log files.
Default value: `$kafka::params::log_dir`
-##### `install_mode`
+##### `install_mode`
Data type: `Stdlib::Filemode`
@@ -278,7 +278,7 @@ The permissions for the install directory.
Default value: `$kafka::params::install_mode`
-### `kafka::broker`
+### `kafka::broker`
This class handles the Kafka (broker).
@@ -299,46 +299,46 @@ class { 'kafka::broker':
The following parameters are available in the `kafka::broker` class:
-* [`kafka_version`](#kafka_version)
-* [`scala_version`](#scala_version)
-* [`install_dir`](#install_dir)
-* [`mirror_url`](#mirror_url)
-* [`manage_java`](#manage_java)
-* [`package_dir`](#package_dir)
-* [`package_name`](#package_name)
-* [`package_ensure`](#package_ensure)
-* [`user_name`](#user_name)
-* [`user_shell`](#user_shell)
-* [`group_name`](#group_name)
-* [`user_id`](#user_id)
-* [`group_id`](#group_id)
-* [`manage_user`](#manage_user)
-* [`manage_group`](#manage_group)
-* [`config_mode`](#config_mode)
-* [`config_dir`](#config_dir)
-* [`log_dir`](#log_dir)
-* [`bin_dir`](#bin_dir)
-* [`service_name`](#service_name)
-* [`manage_service`](#manage_service)
-* [`service_ensure`](#service_ensure)
-* [`service_restart`](#service_restart)
-* [`service_requires`](#service_requires)
-* [`limit_nofile`](#limit_nofile)
-* [`limit_core`](#limit_core)
-* [`timeout_stop`](#timeout_stop)
-* [`exec_stop`](#exec_stop)
-* [`daemon_start`](#daemon_start)
-* [`env`](#env)
-* [`config`](#config)
-* [`heap_opts`](#heap_opts)
-* [`jmx_opts`](#jmx_opts)
-* [`log4j_opts`](#log4j_opts)
-* [`opts`](#opts)
-* [`manage_log4j`](#manage_log4j)
-* [`log_file_size`](#log_file_size)
-* [`log_file_count`](#log_file_count)
-
-##### `kafka_version`
+* [`kafka_version`](#-kafka--broker--kafka_version)
+* [`scala_version`](#-kafka--broker--scala_version)
+* [`install_dir`](#-kafka--broker--install_dir)
+* [`mirror_url`](#-kafka--broker--mirror_url)
+* [`manage_java`](#-kafka--broker--manage_java)
+* [`package_dir`](#-kafka--broker--package_dir)
+* [`package_name`](#-kafka--broker--package_name)
+* [`package_ensure`](#-kafka--broker--package_ensure)
+* [`user_name`](#-kafka--broker--user_name)
+* [`user_shell`](#-kafka--broker--user_shell)
+* [`group_name`](#-kafka--broker--group_name)
+* [`user_id`](#-kafka--broker--user_id)
+* [`group_id`](#-kafka--broker--group_id)
+* [`manage_user`](#-kafka--broker--manage_user)
+* [`manage_group`](#-kafka--broker--manage_group)
+* [`config_mode`](#-kafka--broker--config_mode)
+* [`config_dir`](#-kafka--broker--config_dir)
+* [`log_dir`](#-kafka--broker--log_dir)
+* [`bin_dir`](#-kafka--broker--bin_dir)
+* [`service_name`](#-kafka--broker--service_name)
+* [`manage_service`](#-kafka--broker--manage_service)
+* [`service_ensure`](#-kafka--broker--service_ensure)
+* [`service_restart`](#-kafka--broker--service_restart)
+* [`service_requires`](#-kafka--broker--service_requires)
+* [`limit_nofile`](#-kafka--broker--limit_nofile)
+* [`limit_core`](#-kafka--broker--limit_core)
+* [`timeout_stop`](#-kafka--broker--timeout_stop)
+* [`exec_stop`](#-kafka--broker--exec_stop)
+* [`daemon_start`](#-kafka--broker--daemon_start)
+* [`env`](#-kafka--broker--env)
+* [`config`](#-kafka--broker--config)
+* [`heap_opts`](#-kafka--broker--heap_opts)
+* [`jmx_opts`](#-kafka--broker--jmx_opts)
+* [`log4j_opts`](#-kafka--broker--log4j_opts)
+* [`opts`](#-kafka--broker--opts)
+* [`manage_log4j`](#-kafka--broker--manage_log4j)
+* [`log_file_size`](#-kafka--broker--log_file_size)
+* [`log_file_count`](#-kafka--broker--log_file_count)
+
+##### `kafka_version`
Data type: `String[1]`
@@ -346,7 +346,7 @@ The version of Kafka that should be installed.
Default value: `$kafka::params::kafka_version`
-##### `scala_version`
+##### `scala_version`
Data type: `String[1]`
@@ -354,7 +354,7 @@ The scala version what Kafka was built with.
Default value: `$kafka::params::scala_version`
-##### `install_dir`
+##### `install_dir`
Data type: `Stdlib::Absolutepath`
@@ -362,7 +362,7 @@ The directory to install Kafka to.
Default value: `$kafka::params::install_dir`
-##### `mirror_url`
+##### `mirror_url`
Data type: `Stdlib::HTTPUrl`
@@ -370,7 +370,7 @@ The url where the Kafka is downloaded from.
Default value: `$kafka::params::mirror_url`
-##### `manage_java`
+##### `manage_java`
Data type: `Boolean`
@@ -378,7 +378,7 @@ Install java if it's not already installed.
Default value: `$kafka::params::manage_java`
-##### `package_dir`
+##### `package_dir`
Data type: `Stdlib::Absolutepath`
@@ -386,7 +386,7 @@ The directory to install Kafka.
Default value: `$kafka::params::package_dir`
-##### `package_name`
+##### `package_name`
Data type: `Optional[String[1]]`
@@ -394,7 +394,7 @@ Package name, when installing Kafka from a package.
Default value: `$kafka::params::package_name`
-##### `package_ensure`
+##### `package_ensure`
Data type: `String[1]`
@@ -402,7 +402,7 @@ Package version or ensure state, when installing Kafka from a package.
Default value: `$kafka::params::package_ensure`
-##### `user_name`
+##### `user_name`
Data type: `String[1]`
@@ -410,7 +410,7 @@ User to run Kafka as.
Default value: `$kafka::params::user_name`
-##### `user_shell`
+##### `user_shell`
Data type: `Stdlib::Absolutepath`
@@ -418,7 +418,7 @@ Login shell of the Kafka user.
Default value: `$kafka::params::user_shell`
-##### `group_name`
+##### `group_name`
Data type: `String[1]`
@@ -426,7 +426,7 @@ Group to run Kafka as.
Default value: `$kafka::params::group_name`
-##### `user_id`
+##### `user_id`
Data type: `Optional[Integer]`
@@ -434,7 +434,7 @@ Create the Kafka user with this ID.
Default value: `$kafka::params::user_id`
-##### `group_id`
+##### `group_id`
Data type: `Optional[Integer]`
@@ -442,7 +442,7 @@ Create the Kafka group with this ID.
Default value: `$kafka::params::group_id`
-##### `manage_user`
+##### `manage_user`
Data type: `Boolean`
@@ -450,7 +450,7 @@ Create the Kafka user if it's not already present.
Default value: `$kafka::params::manage_user`
-##### `manage_group`
+##### `manage_group`
Data type: `Boolean`
@@ -458,7 +458,7 @@ Create the Kafka group if it's not already present.
Default value: `$kafka::params::manage_group`
-##### `config_mode`
+##### `config_mode`
Data type: `Stdlib::Filemode`
@@ -466,7 +466,7 @@ The permissions for the config files.
Default value: `$kafka::params::config_mode`
-##### `config_dir`
+##### `config_dir`
Data type: `Stdlib::Absolutepath`
@@ -474,7 +474,7 @@ The directory to create the Kafka config files to.
Default value: `$kafka::params::config_dir`
-##### `log_dir`
+##### `log_dir`
Data type: `Stdlib::Absolutepath`
@@ -482,7 +482,7 @@ The directory for Kafka log files.
Default value: `$kafka::params::log_dir`
-##### `bin_dir`
+##### `bin_dir`
Data type: `Stdlib::Absolutepath`
@@ -490,7 +490,7 @@ The directory where the Kafka scripts are.
Default value: `$kafka::params::bin_dir`
-##### `service_name`
+##### `service_name`
Data type: `String[1]`
@@ -498,7 +498,7 @@ Set the name of the service.
Default value: `'kafka'`
-##### `manage_service`
+##### `manage_service`
Data type: `Boolean`
@@ -506,7 +506,7 @@ Install the init.d or systemd service.
Default value: `$kafka::params::manage_service`
-##### `service_ensure`
+##### `service_ensure`
Data type: `Enum['running', 'stopped']`
@@ -514,7 +514,7 @@ Set the ensure state of the service.
Default value: `$kafka::params::service_ensure`
-##### `service_restart`
+##### `service_restart`
Data type: `Boolean`
@@ -522,7 +522,7 @@ Whether the configuration files should trigger a service restart.
Default value: `$kafka::params::service_restart`
-##### `service_requires`
+##### `service_requires`
Data type: `Array[String[1]]`
@@ -530,7 +530,7 @@ Set the list of services required to be running before Kafka.
Default value: `$kafka::params::service_requires`
-##### `limit_nofile`
+##### `limit_nofile`
Data type: `Optional[String[1]]`
@@ -538,7 +538,7 @@ Set the 'LimitNOFILE' option of the systemd service.
Default value: `$kafka::params::limit_nofile`
-##### `limit_core`
+##### `limit_core`
Data type: `Optional[String[1]]`
@@ -546,7 +546,7 @@ Set the 'LimitCORE' option of the systemd service.
Default value: `$kafka::params::limit_core`
-##### `timeout_stop`
+##### `timeout_stop`
Data type: `Optional[String[1]]`
@@ -554,7 +554,7 @@ Set the 'TimeoutStopSec' option of the systemd service.
Default value: `$kafka::params::timeout_stop`
-##### `exec_stop`
+##### `exec_stop`
Data type: `Boolean`
@@ -562,7 +562,7 @@ Set the 'ExecStop' option of the systemd service to 'kafka-server-stop.sh'.
Default value: `$kafka::params::exec_stop`
-##### `daemon_start`
+##### `daemon_start`
Data type: `Boolean`
@@ -570,7 +570,7 @@ Use the '-daemon' option when starting Kafka with 'kafka-server-start.sh'.
Default value: `$kafka::params::daemon_start`
-##### `env`
+##### `env`
Data type: `Hash`
@@ -578,7 +578,7 @@ A hash of the environment variables to set.
Default value: `{}`
-##### `config`
+##### `config`
Data type: `Hash[String[1], Any]`
@@ -586,7 +586,7 @@ A hash of the broker configuration options.
Default value: `{}`
-##### `heap_opts`
+##### `heap_opts`
Data type: `String[1]`
@@ -594,7 +594,7 @@ Set the Java heap size.
Default value: `$kafka::params::broker_heap_opts`
-##### `jmx_opts`
+##### `jmx_opts`
Data type: `String[1]`
@@ -602,7 +602,7 @@ Set the JMX options.
Default value: `$kafka::params::broker_jmx_opts`
-##### `log4j_opts`
+##### `log4j_opts`
Data type: `String[1]`
@@ -610,7 +610,7 @@ Set the Log4j options.
Default value: `$kafka::params::broker_log4j_opts`
-##### `opts`
+##### `opts`
Data type: `String[0]`
@@ -618,7 +618,7 @@ Set the Kafka options.
Default value: `$kafka::params::broker_opts`
-##### `manage_log4j`
+##### `manage_log4j`
Data type: `Boolean`
@@ -626,7 +626,7 @@ Data type: `Boolean`
Default value: `$kafka::params::manage_log4j`
-##### `log_file_size`
+##### `log_file_size`
Data type: `Pattern[/[1-9][0-9]*[KMG]B/]`
@@ -634,7 +634,7 @@ Data type: `Pattern[/[1-9][0-9]*[KMG]B/]`
Default value: `$kafka::params::log_file_size`
-##### `log_file_count`
+##### `log_file_count`
Data type: `Integer[1, 50]`
@@ -642,7 +642,7 @@ Data type: `Integer[1, 50]`
Default value: `$kafka::params::log_file_count`
-### `kafka::consumer`
+### `kafka::consumer`
This class handles the Kafka (consumer).
@@ -663,42 +663,42 @@ class { 'kafka::consumer':
The following parameters are available in the `kafka::consumer` class:
-* [`kafka_version`](#kafka_version)
-* [`scala_version`](#scala_version)
-* [`install_dir`](#install_dir)
-* [`mirror_url`](#mirror_url)
-* [`manage_java`](#manage_java)
-* [`package_dir`](#package_dir)
-* [`package_name`](#package_name)
-* [`package_ensure`](#package_ensure)
-* [`user_name`](#user_name)
-* [`user_shell`](#user_shell)
-* [`group_name`](#group_name)
-* [`user_id`](#user_id)
-* [`group_id`](#group_id)
-* [`manage_user`](#manage_user)
-* [`manage_group`](#manage_group)
-* [`config_mode`](#config_mode)
-* [`config_dir`](#config_dir)
-* [`log_dir`](#log_dir)
-* [`bin_dir`](#bin_dir)
-* [`service_name`](#service_name)
-* [`manage_service`](#manage_service)
-* [`service_ensure`](#service_ensure)
-* [`service_restart`](#service_restart)
-* [`service_requires`](#service_requires)
-* [`limit_nofile`](#limit_nofile)
-* [`limit_core`](#limit_core)
-* [`env`](#env)
-* [`config`](#config)
-* [`service_config`](#service_config)
-* [`jmx_opts`](#jmx_opts)
-* [`log4j_opts`](#log4j_opts)
-* [`manage_log4j`](#manage_log4j)
-* [`log_file_size`](#log_file_size)
-* [`log_file_count`](#log_file_count)
-
-##### `kafka_version`
+* [`kafka_version`](#-kafka--consumer--kafka_version)
+* [`scala_version`](#-kafka--consumer--scala_version)
+* [`install_dir`](#-kafka--consumer--install_dir)
+* [`mirror_url`](#-kafka--consumer--mirror_url)
+* [`manage_java`](#-kafka--consumer--manage_java)
+* [`package_dir`](#-kafka--consumer--package_dir)
+* [`package_name`](#-kafka--consumer--package_name)
+* [`package_ensure`](#-kafka--consumer--package_ensure)
+* [`user_name`](#-kafka--consumer--user_name)
+* [`user_shell`](#-kafka--consumer--user_shell)
+* [`group_name`](#-kafka--consumer--group_name)
+* [`user_id`](#-kafka--consumer--user_id)
+* [`group_id`](#-kafka--consumer--group_id)
+* [`manage_user`](#-kafka--consumer--manage_user)
+* [`manage_group`](#-kafka--consumer--manage_group)
+* [`config_mode`](#-kafka--consumer--config_mode)
+* [`config_dir`](#-kafka--consumer--config_dir)
+* [`log_dir`](#-kafka--consumer--log_dir)
+* [`bin_dir`](#-kafka--consumer--bin_dir)
+* [`service_name`](#-kafka--consumer--service_name)
+* [`manage_service`](#-kafka--consumer--manage_service)
+* [`service_ensure`](#-kafka--consumer--service_ensure)
+* [`service_restart`](#-kafka--consumer--service_restart)
+* [`service_requires`](#-kafka--consumer--service_requires)
+* [`limit_nofile`](#-kafka--consumer--limit_nofile)
+* [`limit_core`](#-kafka--consumer--limit_core)
+* [`env`](#-kafka--consumer--env)
+* [`config`](#-kafka--consumer--config)
+* [`service_config`](#-kafka--consumer--service_config)
+* [`jmx_opts`](#-kafka--consumer--jmx_opts)
+* [`log4j_opts`](#-kafka--consumer--log4j_opts)
+* [`manage_log4j`](#-kafka--consumer--manage_log4j)
+* [`log_file_size`](#-kafka--consumer--log_file_size)
+* [`log_file_count`](#-kafka--consumer--log_file_count)
+
+##### `kafka_version`
Data type: `String[1]`
@@ -706,7 +706,7 @@ The version of Kafka that should be installed.
Default value: `$kafka::params::kafka_version`
-##### `scala_version`
+##### `scala_version`
Data type: `String[1]`
@@ -714,7 +714,7 @@ The scala version what Kafka was built with.
Default value: `$kafka::params::scala_version`
-##### `install_dir`
+##### `install_dir`
Data type: `Stdlib::Absolutepath`
@@ -722,7 +722,7 @@ The directory to install Kafka to.
Default value: `$kafka::params::install_dir`
-##### `mirror_url`
+##### `mirror_url`
Data type: `Stdlib::HTTPUrl`
@@ -730,7 +730,7 @@ The url where the Kafka is downloaded from.
Default value: `$kafka::params::mirror_url`
-##### `manage_java`
+##### `manage_java`
Data type: `Boolean`
@@ -738,7 +738,7 @@ Install java if it's not already installed.
Default value: `$kafka::params::manage_java`
-##### `package_dir`
+##### `package_dir`
Data type: `Stdlib::Absolutepath`
@@ -746,7 +746,7 @@ The directory to install Kafka.
Default value: `$kafka::params::package_dir`
-##### `package_name`
+##### `package_name`
Data type: `Optional[String[1]]`
@@ -754,7 +754,7 @@ Package name, when installing Kafka from a package.
Default value: `$kafka::params::package_name`
-##### `package_ensure`
+##### `package_ensure`
Data type: `String[1]`
@@ -762,7 +762,7 @@ Package version or ensure state, when installing Kafka from a package.
Default value: `$kafka::params::package_ensure`
-##### `user_name`
+##### `user_name`
Data type: `String[1]`
@@ -770,7 +770,7 @@ User to run Kafka as.
Default value: `$kafka::params::user_name`
-##### `user_shell`
+##### `user_shell`
Data type: `Stdlib::Absolutepath`
@@ -778,7 +778,7 @@ Login shell of the Kafka user.
Default value: `$kafka::params::user_shell`
-##### `group_name`
+##### `group_name`
Data type: `String[1]`
@@ -786,7 +786,7 @@ Group to run Kafka as.
Default value: `$kafka::params::group_name`
-##### `user_id`
+##### `user_id`
Data type: `Optional[Integer]`
@@ -794,7 +794,7 @@ Create the Kafka user with this ID.
Default value: `$kafka::params::user_id`
-##### `group_id`
+##### `group_id`
Data type: `Optional[Integer]`
@@ -802,7 +802,7 @@ Create the Kafka group with this ID.
Default value: `$kafka::params::group_id`
-##### `manage_user`
+##### `manage_user`
Data type: `Boolean`
@@ -810,7 +810,7 @@ Create the Kafka user if it's not already present.
Default value: `$kafka::params::manage_user`
-##### `manage_group`
+##### `manage_group`
Data type: `Boolean`
@@ -818,7 +818,7 @@ Create the Kafka group if it's not already present.
Default value: `$kafka::params::manage_group`
-##### `config_mode`
+##### `config_mode`
Data type: `Stdlib::Filemode`
@@ -826,7 +826,7 @@ The permissions for the config files.
Default value: `$kafka::params::config_mode`
-##### `config_dir`
+##### `config_dir`
Data type: `Stdlib::Absolutepath`
@@ -834,7 +834,7 @@ The directory to create the Kafka config files to.
Default value: `$kafka::params::config_dir`
-##### `log_dir`
+##### `log_dir`
Data type: `Stdlib::Absolutepath`
@@ -842,7 +842,7 @@ The directory for Kafka log files.
Default value: `$kafka::params::log_dir`
-##### `bin_dir`
+##### `bin_dir`
Data type: `Stdlib::Absolutepath`
@@ -850,7 +850,7 @@ The directory where the Kafka scripts are.
Default value: `$kafka::params::bin_dir`
-##### `service_name`
+##### `service_name`
Data type: `String[1]`
@@ -858,7 +858,7 @@ Set the name of the service.
Default value: `'kafka-consumer'`
-##### `manage_service`
+##### `manage_service`
Data type: `Boolean`
@@ -866,7 +866,7 @@ Install the init.d or systemd service.
Default value: `$kafka::params::manage_service`
-##### `service_ensure`
+##### `service_ensure`
Data type: `Enum['running', 'stopped']`
@@ -874,7 +874,7 @@ Set the ensure state of the service.
Default value: `$kafka::params::service_ensure`
-##### `service_restart`
+##### `service_restart`
Data type: `Boolean`
@@ -882,7 +882,7 @@ Whether the configuration files should trigger a service restart.
Default value: `$kafka::params::service_restart`
-##### `service_requires`
+##### `service_requires`
Data type: `Array[String[1]]`
@@ -890,7 +890,7 @@ Set the list of services required to be running before Kafka.
Default value: `$kafka::params::service_requires`
-##### `limit_nofile`
+##### `limit_nofile`
Data type: `Optional[String[1]]`
@@ -898,7 +898,7 @@ Set the 'LimitNOFILE' option of the systemd service.
Default value: `$kafka::params::limit_nofile`
-##### `limit_core`
+##### `limit_core`
Data type: `Optional[String[1]]`
@@ -906,7 +906,7 @@ Set the 'LimitCORE' option of the systemd service.
Default value: `$kafka::params::limit_core`
-##### `env`
+##### `env`
Data type: `Hash`
@@ -914,7 +914,7 @@ A hash of the environment variables to set.
Default value: `{}`
-##### `config`
+##### `config`
Data type: `Hash[String[1], Any]`
@@ -922,7 +922,7 @@ A hash of the consumer configuration options.
Default value: `{}`
-##### `service_config`
+##### `service_config`
Data type: `Hash[String[1],String[1]]`
@@ -930,7 +930,7 @@ A hash of the `kafka-console-consumer.sh` script options.
Default value: `{}`
-##### `jmx_opts`
+##### `jmx_opts`
Data type: `String[1]`
@@ -938,7 +938,7 @@ Set the JMX options.
Default value: `$kafka::params::consumer_jmx_opts`
-##### `log4j_opts`
+##### `log4j_opts`
Data type: `String[1]`
@@ -946,7 +946,7 @@ Set the Log4j options.
Default value: `$kafka::params::consumer_log4j_opts`
-##### `manage_log4j`
+##### `manage_log4j`
Data type: `Boolean`
@@ -954,7 +954,7 @@ Data type: `Boolean`
Default value: `$kafka::params::manage_log4j`
-##### `log_file_size`
+##### `log_file_size`
Data type: `Pattern[/[1-9][0-9]*[KMG]B/]`
@@ -962,7 +962,7 @@ Data type: `Pattern[/[1-9][0-9]*[KMG]B/]`
Default value: `$kafka::params::log_file_size`
-##### `log_file_count`
+##### `log_file_count`
Data type: `Integer[1, 50]`
@@ -970,7 +970,7 @@ Data type: `Integer[1, 50]`
Default value: `$kafka::params::log_file_count`
-### `kafka::consumer::config`
+### `kafka::consumer::config`
This class handles the Kafka (consumer) config.
@@ -978,19 +978,19 @@ This class handles the Kafka (consumer) config.
The following parameters are available in the `kafka::consumer::config` class:
-* [`manage_service`](#manage_service)
-* [`service_name`](#service_name)
-* [`service_restart`](#service_restart)
-* [`config`](#config)
-* [`config_dir`](#config_dir)
-* [`user_name`](#user_name)
-* [`group_name`](#group_name)
-* [`config_mode`](#config_mode)
-* [`manage_log4j`](#manage_log4j)
-* [`log_file_size`](#log_file_size)
-* [`log_file_count`](#log_file_count)
+* [`manage_service`](#-kafka--consumer--config--manage_service)
+* [`service_name`](#-kafka--consumer--config--service_name)
+* [`service_restart`](#-kafka--consumer--config--service_restart)
+* [`config`](#-kafka--consumer--config--config)
+* [`config_dir`](#-kafka--consumer--config--config_dir)
+* [`user_name`](#-kafka--consumer--config--user_name)
+* [`group_name`](#-kafka--consumer--config--group_name)
+* [`config_mode`](#-kafka--consumer--config--config_mode)
+* [`manage_log4j`](#-kafka--consumer--config--manage_log4j)
+* [`log_file_size`](#-kafka--consumer--config--log_file_size)
+* [`log_file_count`](#-kafka--consumer--config--log_file_count)
-##### `manage_service`
+##### `manage_service`
Data type: `Boolean`
@@ -998,7 +998,7 @@ Data type: `Boolean`
Default value: `$kafka::consumer::manage_service`
-##### `service_name`
+##### `service_name`
Data type: `String[1]`
@@ -1006,7 +1006,7 @@ Data type: `String[1]`
Default value: `$kafka::consumer::service_name`
-##### `service_restart`
+##### `service_restart`
Data type: `Boolean`
@@ -1014,7 +1014,7 @@ Data type: `Boolean`
Default value: `$kafka::consumer::service_restart`
-##### `config`
+##### `config`
Data type: `Hash[String[1], Any]`
@@ -1022,7 +1022,7 @@ Data type: `Hash[String[1], Any]`
Default value: `$kafka::consumer::config`
-##### `config_dir`
+##### `config_dir`
Data type: `Stdlib::Absolutepath`
@@ -1030,7 +1030,7 @@ Data type: `Stdlib::Absolutepath`
Default value: `$kafka::consumer::config_dir`
-##### `user_name`
+##### `user_name`
Data type: `String[1]`
@@ -1038,7 +1038,7 @@ Data type: `String[1]`
Default value: `$kafka::consumer::user_name`
-##### `group_name`
+##### `group_name`
Data type: `String[1]`
@@ -1046,7 +1046,7 @@ Data type: `String[1]`
Default value: `$kafka::consumer::group_name`
-##### `config_mode`
+##### `config_mode`
Data type: `Stdlib::Filemode`
@@ -1054,7 +1054,7 @@ Data type: `Stdlib::Filemode`
Default value: `$kafka::consumer::config_mode`
-##### `manage_log4j`
+##### `manage_log4j`
Data type: `Boolean`
@@ -1062,7 +1062,7 @@ Data type: `Boolean`
Default value: `$kafka::consumer::manage_log4j`
-##### `log_file_size`
+##### `log_file_size`
Data type: `Pattern[/[1-9][0-9]*[KMG]B/]`
@@ -1070,7 +1070,7 @@ Data type: `Pattern[/[1-9][0-9]*[KMG]B/]`
Default value: `$kafka::consumer::log_file_size`
-##### `log_file_count`
+##### `log_file_count`
Data type: `Integer[1, 50]`
@@ -1078,7 +1078,7 @@ Data type: `Integer[1, 50]`
Default value: `$kafka::consumer::log_file_count`
-### `kafka::mirror`
+### `kafka::mirror`
This class handles the Kafka (mirror).
@@ -1105,44 +1105,44 @@ class { 'kafka::mirror':
The following parameters are available in the `kafka::mirror` class:
-* [`kafka_version`](#kafka_version)
-* [`scala_version`](#scala_version)
-* [`install_dir`](#install_dir)
-* [`mirror_url`](#mirror_url)
-* [`manage_java`](#manage_java)
-* [`package_dir`](#package_dir)
-* [`package_name`](#package_name)
-* [`package_ensure`](#package_ensure)
-* [`user_name`](#user_name)
-* [`user_shell`](#user_shell)
-* [`group_name`](#group_name)
-* [`user_id`](#user_id)
-* [`group_id`](#group_id)
-* [`manage_user`](#manage_user)
-* [`manage_group`](#manage_group)
-* [`config_mode`](#config_mode)
-* [`config_dir`](#config_dir)
-* [`log_dir`](#log_dir)
-* [`bin_dir`](#bin_dir)
-* [`service_name`](#service_name)
-* [`manage_service`](#manage_service)
-* [`service_ensure`](#service_ensure)
-* [`service_restart`](#service_restart)
-* [`service_requires`](#service_requires)
-* [`limit_nofile`](#limit_nofile)
-* [`limit_core`](#limit_core)
-* [`env`](#env)
-* [`consumer_config`](#consumer_config)
-* [`producer_config`](#producer_config)
-* [`service_config`](#service_config)
-* [`heap_opts`](#heap_opts)
-* [`jmx_opts`](#jmx_opts)
-* [`log4j_opts`](#log4j_opts)
-* [`manage_log4j`](#manage_log4j)
-* [`log_file_size`](#log_file_size)
-* [`log_file_count`](#log_file_count)
-
-##### `kafka_version`
+* [`kafka_version`](#-kafka--mirror--kafka_version)
+* [`scala_version`](#-kafka--mirror--scala_version)
+* [`install_dir`](#-kafka--mirror--install_dir)
+* [`mirror_url`](#-kafka--mirror--mirror_url)
+* [`manage_java`](#-kafka--mirror--manage_java)
+* [`package_dir`](#-kafka--mirror--package_dir)
+* [`package_name`](#-kafka--mirror--package_name)
+* [`package_ensure`](#-kafka--mirror--package_ensure)
+* [`user_name`](#-kafka--mirror--user_name)
+* [`user_shell`](#-kafka--mirror--user_shell)
+* [`group_name`](#-kafka--mirror--group_name)
+* [`user_id`](#-kafka--mirror--user_id)
+* [`group_id`](#-kafka--mirror--group_id)
+* [`manage_user`](#-kafka--mirror--manage_user)
+* [`manage_group`](#-kafka--mirror--manage_group)
+* [`config_mode`](#-kafka--mirror--config_mode)
+* [`config_dir`](#-kafka--mirror--config_dir)
+* [`log_dir`](#-kafka--mirror--log_dir)
+* [`bin_dir`](#-kafka--mirror--bin_dir)
+* [`service_name`](#-kafka--mirror--service_name)
+* [`manage_service`](#-kafka--mirror--manage_service)
+* [`service_ensure`](#-kafka--mirror--service_ensure)
+* [`service_restart`](#-kafka--mirror--service_restart)
+* [`service_requires`](#-kafka--mirror--service_requires)
+* [`limit_nofile`](#-kafka--mirror--limit_nofile)
+* [`limit_core`](#-kafka--mirror--limit_core)
+* [`env`](#-kafka--mirror--env)
+* [`consumer_config`](#-kafka--mirror--consumer_config)
+* [`producer_config`](#-kafka--mirror--producer_config)
+* [`service_config`](#-kafka--mirror--service_config)
+* [`heap_opts`](#-kafka--mirror--heap_opts)
+* [`jmx_opts`](#-kafka--mirror--jmx_opts)
+* [`log4j_opts`](#-kafka--mirror--log4j_opts)
+* [`manage_log4j`](#-kafka--mirror--manage_log4j)
+* [`log_file_size`](#-kafka--mirror--log_file_size)
+* [`log_file_count`](#-kafka--mirror--log_file_count)
+
+##### `kafka_version`
Data type: `String[1]`
@@ -1150,7 +1150,7 @@ The version of Kafka that should be installed.
Default value: `$kafka::params::kafka_version`
-##### `scala_version`
+##### `scala_version`
Data type: `String[1]`
@@ -1158,7 +1158,7 @@ The scala version what Kafka was built with.
Default value: `$kafka::params::scala_version`
-##### `install_dir`
+##### `install_dir`
Data type: `Stdlib::Absolutepath`
@@ -1166,7 +1166,7 @@ The directory to install Kafka to.
Default value: `$kafka::params::install_dir`
-##### `mirror_url`
+##### `mirror_url`
Data type: `Stdlib::HTTPUrl`
@@ -1174,7 +1174,7 @@ The url where the Kafka is downloaded from.
Default value: `$kafka::params::mirror_url`
-##### `manage_java`
+##### `manage_java`
Data type: `Boolean`
@@ -1182,7 +1182,7 @@ Install java if it's not already installed.
Default value: `$kafka::params::manage_java`
-##### `package_dir`
+##### `package_dir`
Data type: `Stdlib::Absolutepath`
@@ -1190,7 +1190,7 @@ The directory to install Kafka.
Default value: `$kafka::params::package_dir`
-##### `package_name`
+##### `package_name`
Data type: `Optional[String[1]]`
@@ -1198,7 +1198,7 @@ Package name, when installing Kafka from a package.
Default value: `$kafka::params::package_name`
-##### `package_ensure`
+##### `package_ensure`
Data type: `String[1]`
@@ -1206,7 +1206,7 @@ Package version or ensure state, when installing Kafka from a package.
Default value: `$kafka::params::package_ensure`
-##### `user_name`
+##### `user_name`
Data type: `String[1]`
@@ -1214,7 +1214,7 @@ User to run Kafka as.
Default value: `$kafka::params::user_name`
-##### `user_shell`
+##### `user_shell`
Data type: `Stdlib::Absolutepath`
@@ -1222,7 +1222,7 @@ Login shell of the Kafka user.
Default value: `$kafka::params::user_shell`
-##### `group_name`
+##### `group_name`
Data type: `String[1]`
@@ -1230,7 +1230,7 @@ Group to run Kafka as.
Default value: `$kafka::params::group_name`
-##### `user_id`
+##### `user_id`
Data type: `Optional[Integer]`
@@ -1238,7 +1238,7 @@ Create the Kafka user with this ID.
Default value: `$kafka::params::user_id`
-##### `group_id`
+##### `group_id`
Data type: `Optional[Integer]`
@@ -1246,7 +1246,7 @@ Create the Kafka group with this ID.
Default value: `$kafka::params::group_id`
-##### `manage_user`
+##### `manage_user`
Data type: `Boolean`
@@ -1254,7 +1254,7 @@ Create the Kafka user if it's not already present.
Default value: `$kafka::params::manage_user`
-##### `manage_group`
+##### `manage_group`
Data type: `Boolean`
@@ -1262,7 +1262,7 @@ Create the Kafka group if it's not already present.
Default value: `$kafka::params::manage_group`
-##### `config_mode`
+##### `config_mode`
Data type: `Stdlib::Filemode`
@@ -1270,7 +1270,7 @@ The permissions for the config files.
Default value: `$kafka::params::config_mode`
-##### `config_dir`
+##### `config_dir`
Data type: `Stdlib::Absolutepath`
@@ -1278,7 +1278,7 @@ The directory to create the Kafka config files to.
Default value: `$kafka::params::config_dir`
-##### `log_dir`
+##### `log_dir`
Data type: `Stdlib::Absolutepath`
@@ -1286,7 +1286,7 @@ The directory for Kafka log files.
Default value: `$kafka::params::log_dir`
-##### `bin_dir`
+##### `bin_dir`
Data type: `Stdlib::Absolutepath`
@@ -1294,7 +1294,7 @@ The directory where the Kafka scripts are.
Default value: `$kafka::params::bin_dir`
-##### `service_name`
+##### `service_name`
Data type: `String[1]`
@@ -1302,7 +1302,7 @@ Set the name of the service.
Default value: `'kafka-mirror'`
-##### `manage_service`
+##### `manage_service`
Data type: `Boolean`
@@ -1310,7 +1310,7 @@ Install the init.d or systemd service.
Default value: `$kafka::params::manage_service`
-##### `service_ensure`
+##### `service_ensure`
Data type: `Enum['running', 'stopped']`
@@ -1318,7 +1318,7 @@ Set the ensure state of the service.
Default value: `$kafka::params::service_ensure`
-##### `service_restart`
+##### `service_restart`
Data type: `Boolean`
@@ -1326,7 +1326,7 @@ Whether the configuration files should trigger a service restart.
Default value: `$kafka::params::service_restart`
-##### `service_requires`
+##### `service_requires`
Data type: `Array[String[1]]`
@@ -1334,7 +1334,7 @@ Set the list of services required to be running before Kafka.
Default value: `$kafka::params::service_requires`
-##### `limit_nofile`
+##### `limit_nofile`
Data type: `Optional[String[1]]`
@@ -1342,7 +1342,7 @@ Set the 'LimitNOFILE' option of the systemd service.
Default value: `$kafka::params::limit_nofile`
-##### `limit_core`
+##### `limit_core`
Data type: `Optional[String[1]]`
@@ -1350,7 +1350,7 @@ Set the 'LimitCORE' option of the systemd service.
Default value: `$kafka::params::limit_core`
-##### `env`
+##### `env`
Data type: `Hash`
@@ -1358,7 +1358,7 @@ A hash of the environment variables to set.
Default value: `{}`
-##### `consumer_config`
+##### `consumer_config`
Data type: `Hash[String[1],String[1]]`
@@ -1366,7 +1366,7 @@ A hash of the consumer configuration options.
Default value: `{}`
-##### `producer_config`
+##### `producer_config`
Data type: `Hash[String[1],String[1]]`
@@ -1374,7 +1374,7 @@ A hash of the producer configuration options.
Default value: `{}`
-##### `service_config`
+##### `service_config`
Data type: `Hash[String[1],String[1]]`
@@ -1382,7 +1382,7 @@ A hash of the mirror script options.
Default value: `{}`
-##### `heap_opts`
+##### `heap_opts`
Data type: `String[1]`
@@ -1390,7 +1390,7 @@ Set the Java heap size.
Default value: `$kafka::params::mirror_heap_opts`
-##### `jmx_opts`
+##### `jmx_opts`
Data type: `String[1]`
@@ -1398,7 +1398,7 @@ Set the JMX options.
Default value: `$kafka::params::mirror_jmx_opts`
-##### `log4j_opts`
+##### `log4j_opts`
Data type: `String[1]`
@@ -1406,7 +1406,7 @@ Set the Log4j options.
Default value: `$kafka::params::mirror_log4j_opts`
-##### `manage_log4j`
+##### `manage_log4j`
Data type: `Boolean`
@@ -1414,7 +1414,7 @@ Data type: `Boolean`
Default value: `$kafka::params::manage_log4j`
-##### `log_file_size`
+##### `log_file_size`
Data type: `Pattern[/[1-9][0-9]*[KMG]B/]`
@@ -1422,7 +1422,7 @@ Data type: `Pattern[/[1-9][0-9]*[KMG]B/]`
Default value: `$kafka::params::log_file_size`
-##### `log_file_count`
+##### `log_file_count`
Data type: `Integer[1, 50]`
@@ -1430,11 +1430,11 @@ Data type: `Integer[1, 50]`
Default value: `$kafka::params::log_file_count`
-### `kafka::params`
+### `kafka::params`
This class provides default parameters.
-### `kafka::producer`
+### `kafka::producer`
This class handles the Kafka (producer).
@@ -1455,46 +1455,46 @@ class { 'kafka::producer':
The following parameters are available in the `kafka::producer` class:
-* [`input`](#input)
-* [`kafka_version`](#kafka_version)
-* [`scala_version`](#scala_version)
-* [`install_dir`](#install_dir)
-* [`mirror_url`](#mirror_url)
-* [`manage_java`](#manage_java)
-* [`package_dir`](#package_dir)
-* [`package_name`](#package_name)
-* [`package_ensure`](#package_ensure)
-* [`user_name`](#user_name)
-* [`user_shell`](#user_shell)
-* [`group_name`](#group_name)
-* [`user_id`](#user_id)
-* [`group_id`](#group_id)
-* [`manage_user`](#manage_user)
-* [`manage_group`](#manage_group)
-* [`config_mode`](#config_mode)
-* [`config_dir`](#config_dir)
-* [`log_dir`](#log_dir)
-* [`bin_dir`](#bin_dir)
-* [`service_name`](#service_name)
-* [`manage_service`](#manage_service)
-* [`service_ensure`](#service_ensure)
-* [`service_restart`](#service_restart)
-* [`service_requires`](#service_requires)
-* [`limit_nofile`](#limit_nofile)
-* [`limit_core`](#limit_core)
-* [`env`](#env)
-* [`config`](#config)
-* [`service_config`](#service_config)
-* [`jmx_opts`](#jmx_opts)
-* [`log4j_opts`](#log4j_opts)
-
-##### `input`
+* [`input`](#-kafka--producer--input)
+* [`kafka_version`](#-kafka--producer--kafka_version)
+* [`scala_version`](#-kafka--producer--scala_version)
+* [`install_dir`](#-kafka--producer--install_dir)
+* [`mirror_url`](#-kafka--producer--mirror_url)
+* [`manage_java`](#-kafka--producer--manage_java)
+* [`package_dir`](#-kafka--producer--package_dir)
+* [`package_name`](#-kafka--producer--package_name)
+* [`package_ensure`](#-kafka--producer--package_ensure)
+* [`user_name`](#-kafka--producer--user_name)
+* [`user_shell`](#-kafka--producer--user_shell)
+* [`group_name`](#-kafka--producer--group_name)
+* [`user_id`](#-kafka--producer--user_id)
+* [`group_id`](#-kafka--producer--group_id)
+* [`manage_user`](#-kafka--producer--manage_user)
+* [`manage_group`](#-kafka--producer--manage_group)
+* [`config_mode`](#-kafka--producer--config_mode)
+* [`config_dir`](#-kafka--producer--config_dir)
+* [`log_dir`](#-kafka--producer--log_dir)
+* [`bin_dir`](#-kafka--producer--bin_dir)
+* [`service_name`](#-kafka--producer--service_name)
+* [`manage_service`](#-kafka--producer--manage_service)
+* [`service_ensure`](#-kafka--producer--service_ensure)
+* [`service_restart`](#-kafka--producer--service_restart)
+* [`service_requires`](#-kafka--producer--service_requires)
+* [`limit_nofile`](#-kafka--producer--limit_nofile)
+* [`limit_core`](#-kafka--producer--limit_core)
+* [`env`](#-kafka--producer--env)
+* [`config`](#-kafka--producer--config)
+* [`service_config`](#-kafka--producer--service_config)
+* [`jmx_opts`](#-kafka--producer--jmx_opts)
+* [`log4j_opts`](#-kafka--producer--log4j_opts)
+
+##### `input`
Data type: `Optional[String[1]]`
Set named pipe as input.
-##### `kafka_version`
+##### `kafka_version`
Data type: `String[1]`
@@ -1502,7 +1502,7 @@ The version of Kafka that should be installed.
Default value: `$kafka::params::kafka_version`
-##### `scala_version`
+##### `scala_version`
Data type: `String[1]`
@@ -1510,7 +1510,7 @@ The scala version what Kafka was built with.
Default value: `$kafka::params::scala_version`
-##### `install_dir`
+##### `install_dir`
Data type: `Stdlib::Absolutepath`
@@ -1518,7 +1518,7 @@ The directory to install Kafka to.
Default value: `$kafka::params::install_dir`
-##### `mirror_url`
+##### `mirror_url`
Data type: `Stdlib::HTTPUrl`
@@ -1526,7 +1526,7 @@ The url where the Kafka is downloaded from.
Default value: `$kafka::params::mirror_url`
-##### `manage_java`
+##### `manage_java`
Data type: `Boolean`
@@ -1534,7 +1534,7 @@ Install java if it's not already installed.
Default value: `$kafka::params::manage_java`
-##### `package_dir`
+##### `package_dir`
Data type: `Stdlib::Absolutepath`
@@ -1542,7 +1542,7 @@ The directory to install Kafka.
Default value: `$kafka::params::package_dir`
-##### `package_name`
+##### `package_name`
Data type: `Optional[String[1]]`
@@ -1550,7 +1550,7 @@ Package name, when installing Kafka from a package.
Default value: `$kafka::params::package_name`
-##### `package_ensure`
+##### `package_ensure`
Data type: `String[1]`
@@ -1558,7 +1558,7 @@ Package version or ensure state, when installing Kafka from a package.
Default value: `$kafka::params::package_ensure`
-##### `user_name`
+##### `user_name`
Data type: `String[1]`
@@ -1566,7 +1566,7 @@ User to run Kafka as.
Default value: `$kafka::params::user_name`
-##### `user_shell`
+##### `user_shell`
Data type: `Stdlib::Absolutepath`
@@ -1574,7 +1574,7 @@ Login shell of the Kafka user.
Default value: `$kafka::params::user_shell`
-##### `group_name`
+##### `group_name`
Data type: `String[1]`
@@ -1582,7 +1582,7 @@ Group to run Kafka as.
Default value: `$kafka::params::group_name`
-##### `user_id`
+##### `user_id`
Data type: `Optional[Integer]`
@@ -1590,7 +1590,7 @@ Create the Kafka user with this ID.
Default value: `$kafka::params::user_id`
-##### `group_id`
+##### `group_id`
Data type: `Optional[Integer]`
@@ -1598,7 +1598,7 @@ Create the Kafka group with this ID.
Default value: `$kafka::params::group_id`
-##### `manage_user`
+##### `manage_user`
Data type: `Boolean`
@@ -1606,7 +1606,7 @@ Create the Kafka user if it's not already present.
Default value: `$kafka::params::manage_user`
-##### `manage_group`
+##### `manage_group`
Data type: `Boolean`
@@ -1614,7 +1614,7 @@ Create the Kafka group if it's not already present.
Default value: `$kafka::params::manage_group`
-##### `config_mode`
+##### `config_mode`
Data type: `Stdlib::Filemode`
@@ -1622,7 +1622,7 @@ The permissions for the config files.
Default value: `$kafka::params::config_mode`
-##### `config_dir`
+##### `config_dir`
Data type: `Stdlib::Absolutepath`
@@ -1630,7 +1630,7 @@ The directory to create the Kafka config files to.
Default value: `$kafka::params::config_dir`
-##### `log_dir`
+##### `log_dir`
Data type: `Stdlib::Absolutepath`
@@ -1638,7 +1638,7 @@ The directory for Kafka log files.
Default value: `$kafka::params::log_dir`
-##### `bin_dir`
+##### `bin_dir`
Data type: `Stdlib::Absolutepath`
@@ -1646,7 +1646,7 @@ The directory where the Kafka scripts are.
Default value: `$kafka::params::bin_dir`
-##### `service_name`
+##### `service_name`
Data type: `String[1]`
@@ -1654,7 +1654,7 @@ Set the name of the service.
Default value: `'kafka-producer'`
-##### `manage_service`
+##### `manage_service`
Data type: `Boolean`
@@ -1662,7 +1662,7 @@ Install the init.d or systemd service.
Default value: `$kafka::params::manage_service`
-##### `service_ensure`
+##### `service_ensure`
Data type: `Enum['running', 'stopped']`
@@ -1670,7 +1670,7 @@ Set the ensure state of the service.
Default value: `$kafka::params::service_ensure`
-##### `service_restart`
+##### `service_restart`
Data type: `Boolean`
@@ -1678,7 +1678,7 @@ Whether the configuration files should trigger a service restart.
Default value: `$kafka::params::service_restart`
-##### `service_requires`
+##### `service_requires`
Data type: `Array[String[1]]`
@@ -1686,7 +1686,7 @@ Set the list of services required to be running before Kafka.
Default value: `$kafka::params::service_requires`
-##### `limit_nofile`
+##### `limit_nofile`
Data type: `Optional[String[1]]`
@@ -1694,7 +1694,7 @@ Set the 'LimitNOFILE' option of the systemd service.
Default value: `$kafka::params::limit_nofile`
-##### `limit_core`
+##### `limit_core`
Data type: `Optional[String[1]]`
@@ -1702,7 +1702,7 @@ Set the 'LimitCORE' option of the systemd service.
Default value: `$kafka::params::limit_core`
-##### `env`
+##### `env`
Data type: `Hash`
@@ -1710,7 +1710,7 @@ A hash of the environment variables to set.
Default value: `{}`
-##### `config`
+##### `config`
Data type: `Hash[String[1], Any]`
@@ -1718,7 +1718,7 @@ A hash of the producer configuration options.
Default value: `{}`
-##### `service_config`
+##### `service_config`
Data type: `Hash[String[1],String[1]]`
@@ -1726,7 +1726,7 @@ A hash of the `kafka-console-producer.sh` script options.
Default value: `{}`
-##### `jmx_opts`
+##### `jmx_opts`
Data type: `String[1]`
@@ -1734,7 +1734,7 @@ Set the JMX options.
Default value: `$kafka::params::producer_jmx_opts`
-##### `log4j_opts`
+##### `log4j_opts`
Data type: `String[1]`
@@ -1742,7 +1742,7 @@ Set the Log4j options.
Default value: `$kafka::params::producer_log4j_opts`
-### `kafka::producer::config`
+### `kafka::producer::config`
This class handles the Kafka (producer) config.
@@ -1750,16 +1750,16 @@ This class handles the Kafka (producer) config.
The following parameters are available in the `kafka::producer::config` class:
-* [`manage_service`](#manage_service)
-* [`service_name`](#service_name)
-* [`service_restart`](#service_restart)
-* [`config`](#config)
-* [`config_dir`](#config_dir)
-* [`user_name`](#user_name)
-* [`group_name`](#group_name)
-* [`config_mode`](#config_mode)
+* [`manage_service`](#-kafka--producer--config--manage_service)
+* [`service_name`](#-kafka--producer--config--service_name)
+* [`service_restart`](#-kafka--producer--config--service_restart)
+* [`config`](#-kafka--producer--config--config)
+* [`config_dir`](#-kafka--producer--config--config_dir)
+* [`user_name`](#-kafka--producer--config--user_name)
+* [`group_name`](#-kafka--producer--config--group_name)
+* [`config_mode`](#-kafka--producer--config--config_mode)
-##### `manage_service`
+##### `manage_service`
Data type: `Boolean`
@@ -1767,7 +1767,7 @@ Data type: `Boolean`
Default value: `$kafka::producer::manage_service`
-##### `service_name`
+##### `service_name`
Data type: `String[1]`
@@ -1775,7 +1775,7 @@ Data type: `String[1]`
Default value: `$kafka::producer::service_name`
-##### `service_restart`
+##### `service_restart`
Data type: `Boolean`
@@ -1783,7 +1783,7 @@ Data type: `Boolean`
Default value: `$kafka::producer::service_restart`
-##### `config`
+##### `config`
Data type: `Hash[String[1], Any]`
@@ -1791,7 +1791,7 @@ Data type: `Hash[String[1], Any]`
Default value: `$kafka::producer::config`
-##### `config_dir`
+##### `config_dir`
Data type: `Stdlib::Absolutepath`
@@ -1799,7 +1799,7 @@ Data type: `Stdlib::Absolutepath`
Default value: `$kafka::producer::config_dir`
-##### `user_name`
+##### `user_name`
Data type: `String[1]`
@@ -1807,7 +1807,7 @@ Data type: `String[1]`
Default value: `$kafka::producer::user_name`
-##### `group_name`
+##### `group_name`
Data type: `String[1]`
@@ -1815,7 +1815,7 @@ Data type: `String[1]`
Default value: `$kafka::producer::group_name`
-##### `config_mode`
+##### `config_mode`
Data type: `Stdlib::Filemode`
@@ -1825,7 +1825,7 @@ Default value: `$kafka::producer::config_mode`
## Defined types
-### `kafka::topic`
+### `kafka::topic`
This defined type handles the creation of Kafka topics.
@@ -1846,31 +1846,40 @@ kafka::topic { 'test':
The following parameters are available in the `kafka::topic` defined type:
-* [`ensure`](#ensure)
-* [`zookeeper`](#zookeeper)
-* [`replication_factor`](#replication_factor)
-* [`partitions`](#partitions)
-* [`bin_dir`](#bin_dir)
-* [`config`](#config)
+* [`ensure`](#-kafka--topic--ensure)
+* [`zookeeper`](#-kafka--topic--zookeeper)
+* [`bootstrap_server`](#-kafka--topic--bootstrap_server)
+* [`replication_factor`](#-kafka--topic--replication_factor)
+* [`partitions`](#-kafka--topic--partitions)
+* [`bin_dir`](#-kafka--topic--bin_dir)
+* [`config`](#-kafka--topic--config)
-##### `ensure`
+##### `ensure`
-Data type: `String[1]`
+Data type: `Optional[String[1]]`
Should the topic be created.
-Default value: `''`
+Default value: `undef`
-##### `zookeeper`
+##### `zookeeper`
-Data type: `String[1]`
+Data type: `Optional[String[1]]`
The connection string for the ZooKeeper connection in the form host:port.
-Multiple hosts can be given to allow fail-over.
+Multiple hosts can be given to allow fail-over. Kafka < 3.0.0 only!
+
+Default value: `undef`
+
+##### `bootstrap_server`
+
+Data type: `Optional[String[1]]`
+
+The Kafka server to connect to in the form host:port. Kafka >= 2.2.0 only!
-Default value: `''`
+Default value: `undef`
-##### `replication_factor`
+##### `replication_factor`
Data type: `Integer`
@@ -1879,7 +1888,7 @@ not supplied, defaults to the cluster default.
Default value: `1`
-##### `partitions`
+##### `partitions`
Data type: `Integer`
@@ -1888,7 +1897,7 @@ supplied for create, defaults to the cluster default.
Default value: `1`
-##### `bin_dir`
+##### `bin_dir`
Data type: `String[1]`
@@ -1896,12 +1905,12 @@ The directory where the file kafka-topics.sh is located.
Default value: `'/opt/kafka/bin'`
-##### `config`
+##### `config`
Data type: `Optional[Hash[String[1],String[1]]]`
A topic configuration override for the topic being created or altered.
See the Kafka documentation for full details on the topic configs.
-Default value: ``undef``
+Default value: `undef`
diff --git a/Rakefile b/Rakefile
index 80b799d6..1d191635 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,7 +1,7 @@
# Managed by modulesync - DO NOT EDIT
# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/
-# Attempt to load voxupuli-test (which pulls in puppetlabs_spec_helper),
+# Attempt to load voxpupuli-test (which pulls in puppetlabs_spec_helper),
# otherwise attempt to load it directly.
begin
require 'voxpupuli/test/rake'
@@ -24,6 +24,10 @@ end
begin
require 'voxpupuli/release/rake_tasks'
rescue LoadError
+ # voxpupuli-release not present
+else
+ GCGConfig.user = 'voxpupuli'
+ GCGConfig.project = 'puppet-kafka'
end
desc "Run main 'test' task and report merged results to coveralls"
@@ -37,36 +41,4 @@ task test_with_coveralls: [:test] do
end
end
-desc 'Generate REFERENCE.md'
-task :reference, [:debug, :backtrace] do |t, args|
- patterns = ''
- Rake::Task['strings:generate:reference'].invoke(patterns, args[:debug], args[:backtrace])
-end
-
-begin
- require 'github_changelog_generator/task'
- require 'puppet_blacksmith'
- GitHubChangelogGenerator::RakeTask.new :changelog do |config|
- metadata = Blacksmith::Modulefile.new
- config.future_release = "v#{metadata.version}" if metadata.version =~ /^\d+\.\d+.\d+$/
- config.header = "# Changelog\n\nAll notable changes to this project will be documented in this file.\nEach new release typically also includes the latest modulesync defaults.\nThese should not affect the functionality of the module."
- config.exclude_labels = %w{duplicate question invalid wontfix wont-fix modulesync skip-changelog}
- config.user = 'voxpupuli'
- config.project = metadata.metadata['name']
- end
-
- # Workaround for https://github.com/github-changelog-generator/github-changelog-generator/issues/715
- require 'rbconfig'
- if RbConfig::CONFIG['host_os'] =~ /linux/
- task :changelog do
- puts 'Fixing line endings...'
- changelog_file = File.join(__dir__, 'CHANGELOG.md')
- changelog_txt = File.read(changelog_file)
- new_contents = changelog_txt.gsub(%r{\r\n}, "\n")
- File.open(changelog_file, "w") {|file| file.puts new_contents }
- end
- end
-
-rescue LoadError
-end
# vim: syntax=ruby
diff --git a/spec/acceptance/01_zookeeper_spec.rb b/spec/acceptance/01_zookeeper_spec.rb
index e2b8f314..ae3e3692 100644
--- a/spec/acceptance/01_zookeeper_spec.rb
+++ b/spec/acceptance/01_zookeeper_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper_acceptance'
describe 'zookeeper prereq' do
diff --git a/spec/acceptance/broker_spec.rb b/spec/acceptance/broker_spec.rb
index 4b9136fd..e41311b5 100644
--- a/spec/acceptance/broker_spec.rb
+++ b/spec/acceptance/broker_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper_acceptance'
case fact('osfamily')
@@ -7,6 +9,7 @@
user_shell = '/usr/sbin/nologin'
end
+# rubocop:disable RSpec/RepeatedExampleGroupBody
describe 'kafka::broker' do
it 'works with no errors' do
pp = <<-EOS
@@ -170,9 +173,7 @@ class { 'kafka::broker':
it { is_expected.to be_enabled }
end
end
- end
- describe 'kafka::broker::service' do
context 'with log4j/jmx parameters' do
it 'works with no errors' do
pp = <<-EOS
@@ -224,3 +225,4 @@ class { 'kafka::broker':
end
end
end
+# rubocop:enable RSpec/RepeatedExampleGroupBody
diff --git a/spec/acceptance/consumer_spec.rb b/spec/acceptance/consumer_spec.rb
index 812abffa..c0950f34 100644
--- a/spec/acceptance/consumer_spec.rb
+++ b/spec/acceptance/consumer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper_acceptance'
case fact('osfamily')
@@ -7,6 +9,7 @@
user_shell = '/usr/sbin/nologin'
end
+# rubocop:disable RSpec/RepeatedExampleGroupBody
describe 'kafka::consumer' do
it 'works with no errors' do
pp = <<-EOS
@@ -98,9 +101,7 @@ class { 'kafka::consumer':
it { is_expected.to be_grouped_into 'kafka' }
end
end
- end
- describe 'kafka::consumer::config' do
context 'with custom config_dir' do
it 'works with no errors' do
pp = <<-EOS
@@ -163,3 +164,4 @@ class { 'kafka::consumer':
end
end
end
+# rubocop:enable RSpec/RepeatedExampleGroupBody
diff --git a/spec/acceptance/init_spec.rb b/spec/acceptance/init_spec.rb
index df552dc5..1dfaefc4 100644
--- a/spec/acceptance/init_spec.rb
+++ b/spec/acceptance/init_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper_acceptance'
case fact('osfamily')
@@ -7,6 +9,7 @@
user_shell = '/usr/sbin/nologin'
end
+# rubocop:disable RSpec/RepeatedExampleGroupBody
describe 'kafka' do
it 'works with no errors' do
pp = <<-EOS
@@ -217,3 +220,4 @@ class { 'kafka':
end
end
end
+# rubocop:enable RSpec/RepeatedExampleGroupBody
diff --git a/spec/acceptance/mirror_spec.rb b/spec/acceptance/mirror_spec.rb
index 443d144e..4a4b3251 100644
--- a/spec/acceptance/mirror_spec.rb
+++ b/spec/acceptance/mirror_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper_acceptance'
case fact('osfamily')
@@ -7,6 +9,7 @@
user_shell = '/usr/sbin/nologin'
end
+# rubocop:disable RSpec/RepeatedExampleGroupBody
describe 'kafka::mirror' do
it 'works with no errors' do
pp = <<-EOS
@@ -241,3 +244,4 @@ class { 'kafka::mirror':
end
end
end
+# rubocop:enable RSpec/RepeatedExampleGroupBody
diff --git a/spec/classes/broker_spec.rb b/spec/classes/broker_spec.rb
index 0c3d3092..76e183af 100644
--- a/spec/classes/broker_spec.rb
+++ b/spec/classes/broker_spec.rb
@@ -1,9 +1,13 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'shared_examples_param_validation'
describe 'kafka::broker', type: :class do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
+ os_facts = os_facts.merge({ service_provider: 'systemd' })
+
let(:facts) do
os_facts
end
@@ -37,7 +41,8 @@
context 'defaults' do
it { is_expected.to contain_file('/opt/kafka/config/server.properties') }
end
- context 'with manage_log4j => true' do
+
+ context 'with manage_log4j => true' do
let(:params) { { 'manage_log4j' => true } }
it { is_expected.to contain_file('/opt/kafka/config/log4j.properties').with_content(%r{^log4j.appender.kafkaAppender.MaxFileSize=50MB$}) }
@@ -57,8 +62,6 @@
context 'defaults' do
if os_facts[:service_provider] == 'systemd'
it { is_expected.to contain_file('/etc/init.d/kafka').with_ensure('absent') }
- it { is_expected.to contain_file('/etc/systemd/system/kafka.service').with_content %r{^After=network\.target syslog\.target$} }
- it { is_expected.to contain_file('/etc/systemd/system/kafka.service').with_content %r{^Wants=network\.target syslog\.target$} }
it { is_expected.not_to contain_file('/etc/systemd/system/kafka.service').with_content %r{^LimitNOFILE=} }
it { is_expected.not_to contain_file('/etc/systemd/system/kafka.service').with_content %r{^LimitCORE=} }
else
diff --git a/spec/classes/consumer_spec.rb b/spec/classes/consumer_spec.rb
index 137972e0..e747cd38 100644
--- a/spec/classes/consumer_spec.rb
+++ b/spec/classes/consumer_spec.rb
@@ -1,9 +1,13 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'shared_examples_param_validation'
describe 'kafka::consumer', type: :class do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
+ os_facts = os_facts.merge({ service_provider: 'systemd' })
+
let(:facts) do
os_facts
end
@@ -38,7 +42,8 @@
context 'defaults' do
it { is_expected.to contain_file('/opt/kafka/config/consumer.properties') }
end
- context 'with manage_log4j => true' do
+
+ context 'with manage_log4j => true' do
let(:params) { { 'manage_log4j' => true } }
it { is_expected.to contain_file('/opt/kafka/config/log4j.properties').with_content(%r{^log4j.appender.kafkaAppender.MaxFileSize=50MB$}) }
@@ -49,7 +54,7 @@
describe 'kafka::consumer::service' do
context 'defaults' do
if os_facts[:service_provider] == 'systemd'
- it { is_expected.to contain_file('/etc/init.d/kafka-consumer').with_abent('absent') }
+ it { is_expected.to contain_file('/etc/systemd/system/kafka-consumer.service') }
else
it { is_expected.to contain_file('/etc/init.d/kafka-consumer') }
end
diff --git a/spec/classes/init_spec.rb b/spec/classes/init_spec.rb
index 1edd6d78..4caea361 100644
--- a/spec/classes/init_spec.rb
+++ b/spec/classes/init_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'kafka', type: :class do
diff --git a/spec/classes/mirror_spec.rb b/spec/classes/mirror_spec.rb
index e76caf55..7ff8c3a5 100644
--- a/spec/classes/mirror_spec.rb
+++ b/spec/classes/mirror_spec.rb
@@ -1,9 +1,13 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'shared_examples_param_validation'
describe 'kafka::mirror', type: :class do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
+ os_facts = os_facts.merge({ service_provider: 'systemd' })
+
let(:facts) do
os_facts
end
diff --git a/spec/classes/producer_spec.rb b/spec/classes/producer_spec.rb
index 1a87df34..025ca93d 100644
--- a/spec/classes/producer_spec.rb
+++ b/spec/classes/producer_spec.rb
@@ -1,9 +1,13 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'shared_examples_param_validation'
describe 'kafka::producer', type: :class do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
+ os_facts = os_facts.merge({ service_provider: 'systemd' })
+
let(:facts) do
os_facts
end
@@ -18,35 +22,35 @@
}
end
- it { is_expected.to contain_class('kafka::producer::install').that_comes_before('Class[kafka::producer::config]') }
- it { is_expected.to contain_class('kafka::producer::config').that_comes_before('Class[kafka::producer::service]') }
- it { is_expected.to contain_class('kafka::producer::service').that_comes_before('Class[kafka::producer]') }
- it { is_expected.to contain_class('kafka::producer') }
+ if os_facts[:service_provider] == 'systemd'
+ it { is_expected.to raise_error(Puppet::Error, %r{Console Producer is not supported on systemd, because the stdin of the process cannot be redirected}) }
+ else
+ it { is_expected.to contain_class('kafka::producer::install').that_comes_before('Class[kafka::producer::config]') }
+ it { is_expected.to contain_class('kafka::producer::config').that_comes_before('Class[kafka::producer::service]') }
+ it { is_expected.to contain_class('kafka::producer::service').that_comes_before('Class[kafka::producer]') }
+ it { is_expected.to contain_class('kafka::producer') }
- describe 'kafka::producer::install' do
- context 'defaults' do
- it { is_expected.to contain_class('kafka') }
+ describe 'kafka::producer::install' do
+ context 'defaults' do
+ it { is_expected.to contain_class('kafka') }
+ end
end
- end
- describe 'kafka::producer::config' do
- context 'defaults' do
- it { is_expected.to contain_file('/opt/kafka/config/producer.properties') }
+ describe 'kafka::producer::config' do
+ context 'defaults' do
+ it { is_expected.to contain_file('/opt/kafka/config/producer.properties') }
+ end
end
- end
- describe 'kafka::producer::service' do
- context 'defaults' do
- if os_facts[:service_provider] == 'systemd'
- it { is_expected.to raise_error(Puppet::Error, %r{Console Producer is not supported on systemd, because the stdin of the process cannot be redirected}) }
- else
+ describe 'kafka::producer::service' do
+ context 'defaults' do
it { is_expected.to contain_file('/etc/init.d/kafka-producer') }
it { is_expected.to contain_service('kafka-producer') }
end
end
- end
- it_validates_parameter 'mirror_url'
+ it_validates_parameter 'mirror_url'
+ end
end
end
end
diff --git a/spec/defines/topic_spec.rb b/spec/defines/topic_spec.rb
index 4ddea383..3a2637d2 100644
--- a/spec/defines/topic_spec.rb
+++ b/spec/defines/topic_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'kafka::topic', type: :define do
diff --git a/spec/shared_examples_param_validation.rb b/spec/shared_examples_param_validation.rb
index dd718cf7..f808b76a 100644
--- a/spec/shared_examples_param_validation.rb
+++ b/spec/shared_examples_param_validation.rb
@@ -1,3 +1,4 @@
+# frozen_string_literal: true
RSpec.configure do |c|
c.alias_it_should_behave_like_to :it_validates_parameter, 'validates parameter:'
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index fb5f0cbe..9efb4ae6 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -1,17 +1,20 @@
+# frozen_string_literal: true
+
# Managed by modulesync - DO NOT EDIT
# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/
# puppetlabs_spec_helper will set up coverage if the env variable is set.
# We want to do this if lib exists and it hasn't been explicitly set.
-ENV['COVERAGE'] ||= 'yes' if Dir.exist?(File.expand_path('../../lib', __FILE__))
+ENV['COVERAGE'] ||= 'yes' if Dir.exist?(File.expand_path('../lib', __dir__))
require 'voxpupuli/test/spec_helper'
+add_mocked_facts!
+
if File.exist?(File.join(__dir__, 'default_module_facts.yml'))
facts = YAML.safe_load(File.read(File.join(__dir__, 'default_module_facts.yml')))
- if facts
- facts.each do |name, value|
- add_custom_fact name.to_sym, value
- end
+ facts&.each do |name, value|
+ add_custom_fact name.to_sym, value
end
end
+Dir['./spec/support/spec/**/*.rb'].sort.each { |f| require f }
diff --git a/spec/spec_helper_acceptance.rb b/spec/spec_helper_acceptance.rb
index f5f5960c..5c2b9e99 100644
--- a/spec/spec_helper_acceptance.rb
+++ b/spec/spec_helper_acceptance.rb
@@ -1,7 +1,10 @@
+# frozen_string_literal: true
+
require 'voxpupuli/acceptance/spec_helper_acceptance'
configure_beaker do |host|
next unless fact('os.name') == 'Debian' && fact('os.release.major') == '8'
+
on host, 'echo "deb http://archive.debian.org/debian jessie-backports main" > /etc/apt/sources.list.d/backports.list'
on host, 'echo \'Acquire::Check-Valid-Until "false";\' > /etc/apt/apt.conf.d/check-valid'
on host, 'DEBIAN_FRONTEND=noninteractive apt-get -y update'