@@ -44,7 +44,7 @@ module Kafka
44
44
#
45
45
class Consumer
46
46
47
- def initialize ( cluster :, logger :, instrumenter :, group :, fetcher :, offset_manager :, session_timeout :, heartbeat :)
47
+ def initialize ( cluster :, logger :, instrumenter :, group :, fetcher :, offset_manager :, session_timeout :, heartbeat :, refresh_topic_interval : 0 )
48
48
@cluster = cluster
49
49
@logger = TaggedLogger . new ( logger )
50
50
@instrumenter = instrumenter
@@ -53,6 +53,7 @@ def initialize(cluster:, logger:, instrumenter:, group:, fetcher:, offset_manage
53
53
@session_timeout = session_timeout
54
54
@fetcher = fetcher
55
55
@heartbeat = heartbeat
56
+ @refresh_topic_interval = refresh_topic_interval
56
57
57
58
@pauses = Hash . new { |h , k |
58
59
h [ k ] = Hash . new { |h2 , k2 |
@@ -73,6 +74,15 @@ def initialize(cluster:, logger:, instrumenter:, group:, fetcher:, offset_manage
73
74
# when user commits message other than last in a batch, this would make ruby-kafka refetch
74
75
# some already consumed messages
75
76
@current_offsets = Hash . new { |h , k | h [ k ] = { } }
77
+
78
+ # Map storing subscribed topics with their configuration
79
+ @subscribed_topics = Concurrent ::Map . new
80
+
81
+ # Set storing topics that matched topics in @subscribed_topics
82
+ @matched_topics = Set . new
83
+
84
+ # Whether join_group must be executed again because new topics are added
85
+ @join_group_for_new_topics = false
76
86
end
77
87
78
88
# Subscribes the consumer to a topic.
@@ -97,13 +107,12 @@ def initialize(cluster:, logger:, instrumenter:, group:, fetcher:, offset_manage
97
107
def subscribe ( topic_or_regex , default_offset : nil , start_from_beginning : true , max_bytes_per_partition : 1048576 )
98
108
default_offset ||= start_from_beginning ? :earliest : :latest
99
109
100
- if topic_or_regex . is_a? ( Regexp )
101
- cluster_topics . select { |topic | topic =~ topic_or_regex } . each do |topic |
102
- subscribe_to_topic ( topic , default_offset , start_from_beginning , max_bytes_per_partition )
103
- end
104
- else
105
- subscribe_to_topic ( topic_or_regex , default_offset , start_from_beginning , max_bytes_per_partition )
106
- end
110
+ @subscribed_topics [ topic_or_regex ] = {
111
+ default_offset : default_offset ,
112
+ start_from_beginning : start_from_beginning ,
113
+ max_bytes_per_partition : max_bytes_per_partition
114
+ }
115
+ scan_for_subscribing
107
116
108
117
nil
109
118
end
@@ -402,6 +411,7 @@ def consumer_loop
402
411
while running?
403
412
begin
404
413
@instrumenter . instrument ( "loop.consumer" ) do
414
+ refresh_topic_list_if_enabled
405
415
yield
406
416
end
407
417
rescue HeartbeatError
@@ -453,6 +463,8 @@ def make_final_offsets_commit!(attempts = 3)
453
463
end
454
464
455
465
def join_group
466
+ @join_group_for_new_topics = false
467
+
456
468
old_generation_id = @group . generation_id
457
469
458
470
@group . join
@@ -514,11 +526,19 @@ def resume_paused_partitions!
514
526
end
515
527
end
516
528
529
+ def refresh_topic_list_if_enabled
530
+ return if @refresh_topic_interval <= 0
531
+ return if @refreshed_at && @refreshed_at + @refresh_topic_interval > Time . now
532
+
533
+ scan_for_subscribing
534
+ @refreshed_at = Time . now
535
+ end
536
+
517
537
def fetch_batches
518
538
# Return early if the consumer has been stopped.
519
539
return [ ] if shutting_down?
520
540
521
- join_group unless @group . member?
541
+ join_group if ! @group . member? || @join_group_for_new_topics
522
542
523
543
trigger_heartbeat
524
544
@@ -572,10 +592,34 @@ def clear_current_offsets(excluding: {})
572
592
end
573
593
end
574
594
595
+ def scan_for_subscribing
596
+ @subscribed_topics . each do |topic_or_regex , config |
597
+ default_offset = config . fetch ( :default_offset )
598
+ start_from_beginning = config . fetch ( :start_from_beginning )
599
+ max_bytes_per_partition = config . fetch ( :max_bytes_per_partition )
600
+ if topic_or_regex . is_a? ( Regexp )
601
+ subscribe_to_regex ( topic_or_regex , default_offset , start_from_beginning , max_bytes_per_partition )
602
+ else
603
+ subscribe_to_topic ( topic_or_regex , default_offset , start_from_beginning , max_bytes_per_partition )
604
+ end
605
+ end
606
+ end
607
+
608
+ def subscribe_to_regex ( topic_regex , default_offset , start_from_beginning , max_bytes_per_partition )
609
+ cluster_topics . select { |topic | topic =~ topic_regex } . each do |topic |
610
+ subscribe_to_topic ( topic , default_offset , start_from_beginning , max_bytes_per_partition )
611
+ end
612
+ end
613
+
575
614
def subscribe_to_topic ( topic , default_offset , start_from_beginning , max_bytes_per_partition )
615
+ return if @matched_topics . include? ( topic )
616
+ @matched_topics . add ( topic )
617
+ @join_group_for_new_topics = true
618
+
576
619
@group . subscribe ( topic )
577
620
@offset_manager . set_default_offset ( topic , default_offset )
578
621
@fetcher . subscribe ( topic , max_bytes_per_partition : max_bytes_per_partition )
622
+ @cluster . mark_as_stale!
579
623
end
580
624
581
625
def cluster_topics
0 commit comments