Skip to content

Commit

Permalink
Merge branch 'master' into 141
Browse files Browse the repository at this point in the history
Signed-off-by: Maciej Mensfeld <[email protected]>
  • Loading branch information
mensfeld authored Sep 25, 2023
2 parents 0ee9fae + 25d4b87 commit 2e55f72
Show file tree
Hide file tree
Showing 16 changed files with 118 additions and 77 deletions.
13 changes: 9 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,18 +28,19 @@ jobs:
- ruby: '3.2'
coverage: 'true'
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4
- uses: actions/checkout@v4
- name: Install package dependencies
run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"

- name: Start Kafka with docker-compose
run: |
docker-compose up -d
docker-compose up -d || (sleep 5 && docker-compose up -d)
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: ${{matrix.ruby}}
bundler-cache: true

- name: Install latest bundler
run: |
Expand All @@ -51,6 +52,10 @@ jobs:
bundle config set without development
bundle install --jobs 4 --retry 3
- name: Wait for Kafka
run: |
bundle exec bin/wait_for_kafka
- name: Run all tests
env:
GITHUB_COVERAGE: ${{matrix.coverage}}
Expand All @@ -62,7 +67,7 @@ jobs:
strategy:
fail-fast: false
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4
with:
fetch-depth: 0

Expand All @@ -83,7 +88,7 @@ jobs:
strategy:
fail-fast: false
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4
with:
fetch-depth: 0
- name: Run Coditsu
Expand Down
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@
## 0.7.5 (Unreleased)
- **[Feature]** Introduce ability to download raw payloads.
- **[Feature]** Introduce ability to download deserialized message payload as JSON.
- [Enhancement] Update order of topics creation for the setup of Web to support zero-downtime setup of Web in running Karafka projects.
- [Fix] Cache assets for 1 year instead of 7 days.
- [Fix] Remove source maps pointing to non-existing locations.
- [Maintenance] Include license and copyrights notice for `timeago.js` that was missing in the JS min file.
- [Refactor] Rename `ui.show_internal_topics` to `ui.visibility.internal_topics_display`

### Upgrade Notes
Expand Down
4 changes: 2 additions & 2 deletions Gemfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ PATH
specs:
karafka-web (0.7.4)
erubi (~> 1.4)
karafka (>= 2.2.3, < 3.0.0)
karafka (>= 2.2.5, < 3.0.0)
karafka-core (>= 2.2.2, < 3.0.0)
roda (~> 3.68, >= 3.69)
tilt (~> 2.0)
Expand All @@ -26,7 +26,7 @@ GEM
ffi (1.15.5)
i18n (1.14.1)
concurrent-ruby (~> 1.0)
karafka (2.2.3)
karafka (2.2.5)
karafka-core (>= 2.2.2, < 2.3.0)
thor (>= 0.20)
waterdrop (>= 2.6.6, < 3.0.0)
Expand Down
24 changes: 24 additions & 0 deletions bin/wait_for_kafka
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#!/usr/bin/env ruby

# Waits for Kafka to be ready
# Useful in CI where Kafka needs to be fully started before we run any tests

require 'karafka'

Karafka::App.setup do |config|
config.kafka[:'bootstrap.servers'] = '127.0.0.1:9092'
end

60.times do
begin
# Stop if we can connect to the cluster and get info
exit if Karafka::Admin.cluster_info
rescue Rdkafka::RdkafkaError
puts "Kafka not available, retrying..."
sleep(1)
end
end

puts 'Kafka not available!'

exit 1
33 changes: 17 additions & 16 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,22 +1,23 @@
version: '2'
services:
zookeeper:
container_name: karafka_web_21_zookeeper
image: wurstmeister/zookeeper
restart: on-failure
ports:
- '2181:2181'

services:
kafka:
container_name: karafka_web_21_kafka
image: wurstmeister/kafka
container_name: kafka
image: confluentinc/cp-kafka:7.5.0

ports:
- '9092:9092'
- 9092:9092

environment:
KAFKA_ADVERTISED_HOST_NAME: localhost
KAFKA_ADVERTISED_PORT: 9092
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
CLUSTER_ID: kafka-docker-cluster-1
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_PROCESS_ROLES: broker,controller
KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
KAFKA_LISTENERS: PLAINTEXT://:9092,CONTROLLER://:9093
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://127.0.0.1:9092
KAFKA_BROKER_ID: 1
KAFKA_CONTROLLER_QUORUM_VOTERS: [email protected]:9093
ALLOW_PLAINTEXT_LISTENER: 'yes'
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
volumes:
- /var/run/docker.sock:/var/run/docker.sock
restart: on-failure
2 changes: 1 addition & 1 deletion karafka-web.gemspec
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ Gem::Specification.new do |spec|
spec.licenses = %w[LGPL-3.0 Commercial]

spec.add_dependency 'erubi', '~> 1.4'
spec.add_dependency 'karafka', '>= 2.2.3', '< 3.0.0'
spec.add_dependency 'karafka', '>= 2.2.5', '< 3.0.0'
spec.add_dependency 'karafka-core', '>= 2.2.2', '< 3.0.0'
spec.add_dependency 'roda', '~> 3.68', '>= 3.69'
spec.add_dependency 'tilt', '~> 2.0'
Expand Down
98 changes: 52 additions & 46 deletions lib/karafka/web/management/create_topics.rb
Original file line number Diff line number Diff line change
Expand Up @@ -9,54 +9,36 @@ class CreateTopics < Base
# Runs the creation process
#
# @param replication_factor [Integer] replication factor for Web-UI topics
#
# @note The order of creation of those topics is important. In order to support the
# zero-downtime bootstrap, we use the presence of the states topic and its initial state
# existence as an indicator that the setup went as expected. It the consumers states
# topic exists and contains needed data, it means all went as expected and that
# topics created before it also exist (as no error).
def call(replication_factor)
consumers_states_topic = ::Karafka::Web.config.topics.consumers.states
consumers_metrics_topic = ::Karafka::Web.config.topics.consumers.metrics
consumers_reports_topic = ::Karafka::Web.config.topics.consumers.reports
errors_topic = ::Karafka::Web.config.topics.errors

# Create only if needed
if existing_topics_names.include?(consumers_states_topic)
exists(consumers_states_topic)
else
creating(consumers_states_topic)
# This topic needs to have one partition
::Karafka::Admin.create_topic(
consumers_states_topic,
1,
replication_factor,
# We care only about the most recent state, previous are irrelevant. So we can easily
# compact after one minute. We do not use this beyond the most recent collective
# state, hence it all can easily go away. We also limit the segment size to at most
# 100MB not to use more space ever.
{
'cleanup.policy': 'compact',
'retention.ms': 60 * 60 * 1_000,
'segment.ms': 24 * 60 * 60 * 1_000, # 1 day
'segment.bytes': 104_857_600 # 100MB
}
)
created(consumers_states_topic)
end

if existing_topics_names.include?(consumers_metrics_topic)
exists(consumers_metrics_topic)
if existing_topics_names.include?(errors_topic)
exists(errors_topic)
else
creating(consumers_metrics_topic)
# This topic needs to have one partition
# Same as states - only most recent is relevant as it is a materialized state
creating(errors_topic)
# All the errors will be dispatched here
# This topic can have multiple partitions but we go with one by default. A single Ruby
# process should not crash that often and if there is an expectation of a higher volume
# of errors, this can be changed by the end user
::Karafka::Admin.create_topic(
consumers_metrics_topic,
errors_topic,
1,
replication_factor,
# Remove really old errors (older than 3 months just to preserve space)
{
'cleanup.policy': 'compact',
'retention.ms': 60 * 60 * 1_000, # 1h
'segment.ms': 24 * 60 * 60 * 1_000, # 1 day
'segment.bytes': 104_857_600 # 100MB
'retention.ms': 3 * 31 * 24 * 60 * 60 * 1_000 # 3 months
}
)
created(consumers_metrics_topic)
created(errors_topic)
end

if existing_topics_names.include?(consumers_reports_topic)
Expand All @@ -81,24 +63,48 @@ def call(replication_factor)
created(consumers_reports_topic)
end

if existing_topics_names.include?(errors_topic)
exists(errors_topic)
if existing_topics_names.include?(consumers_metrics_topic)
exists(consumers_metrics_topic)
else
creating(errors_topic)
# All the errors will be dispatched here
# This topic can have multiple partitions but we go with one by default. A single Ruby
# process should not crash that often and if there is an expectation of a higher volume
# of errors, this can be changed by the end user
creating(consumers_metrics_topic)
# This topic needs to have one partition
# Same as states - only most recent is relevant as it is a materialized state
::Karafka::Admin.create_topic(
errors_topic,
consumers_metrics_topic,
1,
replication_factor,
# Remove really old errors (older than 3 months just to preserve space)
{
'retention.ms': 3 * 31 * 24 * 60 * 60 * 1_000 # 3 months
'cleanup.policy': 'compact',
'retention.ms': 60 * 60 * 1_000, # 1h
'segment.ms': 24 * 60 * 60 * 1_000, # 1 day
'segment.bytes': 104_857_600 # 100MB
}
)
created(errors_topic)
created(consumers_metrics_topic)
end

# Create only if needed
if existing_topics_names.include?(consumers_states_topic)
exists(consumers_states_topic)
else
creating(consumers_states_topic)
# This topic needs to have one partition
::Karafka::Admin.create_topic(
consumers_states_topic,
1,
replication_factor,
# We care only about the most recent state, previous are irrelevant. So we can easily
# compact after one minute. We do not use this beyond the most recent collective
# state, hence it all can easily go away. We also limit the segment size to at most
# 100MB not to use more space ever.
{
'cleanup.policy': 'compact',
'retention.ms': 60 * 60 * 1_000,
'segment.ms': 24 * 60 * 60 * 1_000, # 1 day
'segment.bytes': 104_857_600 # 100MB
}
)
created(consumers_states_topic)
end
end

Expand Down
2 changes: 1 addition & 1 deletion lib/karafka/web/ui/app.rb
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class App < Base

# Serve current version specific assets to prevent users from fetching old assets
# after upgrade
r.on(:assets, Karafka::Web::VERSION) do
r.on 'assets', Karafka::Web::VERSION do
r.public
end

Expand Down
2 changes: 1 addition & 1 deletion lib/karafka/web/ui/base.rb
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class Base < Roda
root: Karafka::Web.gem_root.join('lib/karafka/web/ui/public'),
# Cache all static files for the end user for as long as possible
# We can do it because we ship per version assets so they invalidate with gem bumps
headers: { 'Cache-Control' => 'max-age=604800' }
headers: { 'Cache-Control' => 'max-age=31536000, immutable' }
)
plugin :render_each
plugin :partials
Expand Down
2 changes: 1 addition & 1 deletion lib/karafka/web/ui/pro/app.rb
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class App < Ui::Base

# Serve current version specific assets to prevent users from fetching old assets
# after upgrade
r.on(:assets, Karafka::Web::VERSION) do
r.on 'assets', Karafka::Web::VERSION do
r.public
end

Expand Down
1 change: 0 additions & 1 deletion lib/karafka/web/ui/public/javascripts/bootstrap.min.js

Large diffs are not rendered by default.

1 change: 0 additions & 1 deletion lib/karafka/web/ui/public/javascripts/chart.min.js

Large diffs are not rendered by default.

5 changes: 5 additions & 0 deletions lib/karafka/web/ui/public/javascripts/timeago.min.js

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion lib/karafka/web/ui/public/stylesheets/bootstrap.min.css

Large diffs are not rendered by default.

This file was deleted.

2 changes: 1 addition & 1 deletion lib/karafka/web/ui/views/shared/_header.erb
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
<link href="<%= asset_path('stylesheets/datepicker.min.css') %>" rel="stylesheet">
<link href="<%= asset_path('stylesheets/application.css') %>" rel="stylesheet">

<script type="module" src="<%= asset_path('javascripts/chart.min.js') %>"></script>
<script type="text/javascript" src="<%= asset_path('javascripts/chart.min.js') %>"></script>
<script type="text/javascript" src="<%= asset_path('javascripts/timeago.min.js') %>"></script>
<script type="text/javascript" src="<%= asset_path('javascripts/bootstrap.min.js') %>"></script>
<script type="text/javascript" src="<%= asset_path('javascripts/highlight.min.js') %>"></script>
Expand Down

0 comments on commit 2e55f72

Please sign in to comment.