Skip to content

Commit a008e3c

Browse files
committed
Clean up integration tests
1 parent 7d79adc commit a008e3c

File tree

2 files changed

+75
-136
lines changed

2 files changed

+75
-136
lines changed

lib/logstash/inputs/kafka.rb

+6-4
Original file line numberDiff line numberDiff line change
@@ -131,10 +131,6 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
131131
# The class name of the partition assignment strategy that the client will use to distribute
132132
# partition ownership amongst consumer instances
133133
config :partition_assignment_strategy, :validate => :string
134-
# ID of the pipeline whose events you want to read from.
135-
def pipeline_id
136-
respond_to?(:execution_context) ? execution_context.pipeline_id : "main"
137-
end
138134
# The size of the TCP receive buffer (SO_RCVBUF) to use when reading data.
139135
config :receive_buffer_bytes, :validate => :string
140136
# The amount of time to wait before attempting to reconnect to a given host.
@@ -360,4 +356,10 @@ def set_sasl_config(props)
360356

361357
props.put("sasl.kerberos.service.name",sasl_kerberos_service_name) unless sasl_kerberos_service_name.nil?
362358
end
359+
360+
# ID of the pipeline whose events you want to read from.
361+
def pipeline_id
362+
respond_to?(:execution_context) ? execution_context.pipeline_id : "main"
363+
end
364+
363365
end #class LogStash::Inputs::Kafka

spec/integration/inputs/kafka_spec.rb

+69-132
Original file line numberDiff line numberDiff line change
@@ -4,166 +4,103 @@
44
require "digest"
55
require "rspec/wait"
66

7+
def thread_it(kafka_input, queue)
8+
Thread.new do
9+
begin
10+
kafka_input.run(queue)
11+
end
12+
end
13+
end
14+
15+
def run_with_kafka(&block)
16+
queue = Queue.new
17+
t = thread_it(kafka_input, queue)
18+
begin
19+
wait(timeout_seconds).for {queue.length}.to eq(expected_num_events)
20+
yield(queue)
21+
ensure
22+
t.kill
23+
t.join(30_000)
24+
end
25+
end
26+
27+
shared_examples 'consumes all expected messages' do
28+
it 'should consume all expected messages' do
29+
run_with_kafka do |queue|
30+
expect(queue.length).to eq(expected_num_events)
31+
end
32+
end
33+
end
34+
735
# Please run kafka_test_setup.sh prior to executing this integration test.
836
describe "inputs/kafka", :integration => true do
37+
subject(:kafka_input) { LogStash::Inputs::Kafka.new(config) }
38+
let(:execution_context) { double("execution_context")}
39+
40+
before :each do
41+
allow(kafka_input).to receive(:execution_context).and_return(execution_context)
42+
allow(execution_context).to receive(:pipeline_id).and_return(pipeline_id)
43+
end
44+
945
# Group ids to make sure that the consumers get all the logs.
1046
let(:group_id_1) {rand(36**8).to_s(36)}
1147
let(:group_id_2) {rand(36**8).to_s(36)}
1248
let(:group_id_3) {rand(36**8).to_s(36)}
1349
let(:group_id_4) {rand(36**8).to_s(36)}
14-
let(:group_id_5) {rand(36**8).to_s(36)}
15-
let(:plain_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
16-
let(:multi_consumer_config) { plain_config.merge({"group_id" => group_id_4, "client_id" => "spec", "consumer_threads" => 3}) }
17-
let(:snappy_config) { { 'topics' => ['logstash_topic_snappy'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
18-
let(:lz4_config) { { 'topics' => ['logstash_topic_lz4'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
19-
let(:pattern_config) { { 'topics_pattern' => 'logstash_topic_.*', 'group_id' => group_id_2, 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
20-
let(:decorate_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_3, 'auto_offset_reset' => 'earliest', 'decorate_events' => true} }
21-
let(:manual_commit_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_5, 'auto_offset_reset' => 'earliest', 'enable_auto_commit' => 'false'} }
50+
let(:pipeline_id) {rand(36**8).to_s(36)}
51+
let(:config) { { 'codec' => 'plain', 'auto_offset_reset' => 'earliest'}}
2252
let(:timeout_seconds) { 30 }
2353
let(:num_events) { 103 }
54+
let(:expected_num_events) { num_events }
2455

25-
describe "#kafka-topics" do
26-
def thread_it(kafka_input, queue)
27-
Thread.new do
28-
begin
29-
kafka_input.run(queue)
30-
end
31-
end
32-
end
33-
34-
it "should consume all messages from plain 3-partition topic" do
35-
kafka_input = LogStash::Inputs::Kafka.new(plain_config)
36-
queue = Queue.new
37-
t = thread_it(kafka_input, queue)
38-
begin
39-
t.run
40-
wait(timeout_seconds).for {queue.length}.to eq(num_events)
41-
expect(queue.length).to eq(num_events)
42-
ensure
43-
t.kill
44-
t.join(30_000)
45-
end
46-
end
47-
48-
it "should consume all messages from snappy 3-partition topic" do
49-
kafka_input = LogStash::Inputs::Kafka.new(snappy_config)
50-
queue = Queue.new
51-
t = thread_it(kafka_input, queue)
52-
begin
53-
t.run
54-
wait(timeout_seconds).for {queue.length}.to eq(num_events)
55-
expect(queue.length).to eq(num_events)
56-
ensure
57-
t.kill
58-
t.join(30_000)
59-
end
60-
end
56+
context 'from a plain 3 partition topic' do
57+
let(:config) { super.merge({ 'topics' => ['logstash_topic_plain'], 'group_id' => group_id_1}) }
58+
it_behaves_like 'consumes all expected messages'
59+
end
6160

62-
it "should consume all messages from lz4 3-partition topic" do
63-
kafka_input = LogStash::Inputs::Kafka.new(lz4_config)
64-
queue = Queue.new
65-
t = thread_it(kafka_input, queue)
66-
begin
67-
t.run
68-
wait(timeout_seconds).for {queue.length}.to eq(num_events)
69-
expect(queue.length).to eq(num_events)
70-
ensure
71-
t.kill
72-
t.join(30_000)
73-
end
74-
end
61+
context 'from snappy 3 partition topic' do
62+
let(:config) { { 'topics' => ['logstash_topic_snappy'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
63+
it_behaves_like 'consumes all expected messages'
64+
end
7565

76-
it "should consumer all messages with multiple consumers" do
77-
kafka_input = LogStash::Inputs::Kafka.new(multi_consumer_config)
78-
queue = Queue.new
79-
t = thread_it(kafka_input, queue)
80-
begin
81-
t.run
82-
wait(timeout_seconds).for {queue.length}.to eq(num_events)
83-
expect(queue.length).to eq(num_events)
84-
kafka_input.kafka_consumers.each_with_index do |consumer, i|
85-
expect(consumer.metrics.keys.first.tags["client-id"]).to eq("spec-#{i}-main")
86-
end
87-
ensure
88-
t.kill
89-
t.join(30_000)
90-
end
91-
end
66+
context 'from lz4 3 partition topic' do
67+
let(:config) { { 'topics' => ['logstash_topic_lz4'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
68+
it_behaves_like 'consumes all expected messages'
9269
end
9370

94-
describe "#kafka-topics-pattern" do
95-
def thread_it(kafka_input, queue)
96-
Thread.new do
97-
begin
98-
kafka_input.run(queue)
99-
end
100-
end
101-
end
71+
context 'manually committing' do
72+
let(:config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_2, 'auto_offset_reset' => 'earliest', 'enable_auto_commit' => 'false'} }
73+
it_behaves_like 'consumes all expected messages'
74+
end
10275

103-
it "should consume all messages from all 3 topics" do
104-
kafka_input = LogStash::Inputs::Kafka.new(pattern_config)
105-
queue = Queue.new
106-
t = thread_it(kafka_input, queue)
107-
begin
108-
t.run
109-
wait(timeout_seconds).for {queue.length}.to eq(3*num_events)
110-
expect(queue.length).to eq(3*num_events)
111-
ensure
112-
t.kill
113-
t.join(30_000)
114-
end
115-
end
76+
context 'using a pattern to consume from all 3 topics' do
77+
let(:config) { { 'topics_pattern' => 'logstash_topic_.*', 'group_id' => group_id_3, 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
78+
let(:expected_num_events) { 3*num_events }
79+
it_behaves_like 'consumes all expected messages'
11680
end
11781

118-
describe "#kafka-decorate" do
119-
def thread_it(kafka_input, queue)
120-
Thread.new do
121-
begin
122-
kafka_input.run(queue)
82+
context "with multiple consumers" do
83+
let(:config) { super.merge({'topics' => ['logstash_topic_plain'], "group_id" => group_id_4, "client_id" => "spec", "consumer_threads" => 3}) }
84+
it 'should should consume all messages' do
85+
run_with_kafka do |queue|
86+
expect(queue.length).to eq(num_events)
87+
kafka_input.kafka_consumers.each_with_index do |consumer, i|
88+
expect(consumer.metrics.keys.first.tags["client-id"]).to eq("spec-#{i}-#{pipeline_id}")
12389
end
12490
end
12591
end
92+
end
12693

94+
context 'with decorate events set to true' do
95+
let(:config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_3, 'auto_offset_reset' => 'earliest', 'decorate_events' => true} }
12796
it "should show the right topic and group name in decorated kafka section" do
12897
start = LogStash::Timestamp.now.time.to_i
129-
kafka_input = LogStash::Inputs::Kafka.new(decorate_config)
130-
queue = Queue.new
131-
t = thread_it(kafka_input, queue)
132-
begin
133-
t.run
134-
wait(timeout_seconds).for {queue.length}.to eq(num_events)
98+
run_with_kafka do |queue|
13599
expect(queue.length).to eq(num_events)
136100
event = queue.shift
137101
expect(event.get("[@metadata][kafka][topic]")).to eq("logstash_topic_plain")
138102
expect(event.get("[@metadata][kafka][consumer_group]")).to eq(group_id_3)
139103
expect(event.get("[@metadata][kafka][timestamp]")).to be >= start
140-
ensure
141-
t.kill
142-
t.join(30_000)
143-
end
144-
end
145-
end
146-
147-
describe "#kafka-offset-commit" do
148-
def thread_it(kafka_input, queue)
149-
Thread.new do
150-
begin
151-
kafka_input.run(queue)
152-
end
153-
end
154-
end
155-
156-
it "should manually commit offsets" do
157-
kafka_input = LogStash::Inputs::Kafka.new(manual_commit_config)
158-
queue = Queue.new
159-
t = thread_it(kafka_input, queue)
160-
begin
161-
t.run
162-
wait(timeout_seconds).for {queue.length}.to eq(num_events)
163-
expect(queue.length).to eq(num_events)
164-
ensure
165-
t.kill
166-
t.join(30_000)
167104
end
168105
end
169106
end

0 commit comments

Comments
 (0)