Skip to content

Commit 9639262

Browse files
committed
Change BitSet to EnumSet.
1 parent 3f7fb6a commit 9639262

File tree

1 file changed

+6
-13
lines changed

1 file changed

+6
-13
lines changed

spring-kafka/src/main/java/org/springframework/kafka/listener/DeadLetterPublishingRecoverer.java

+6-13
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@
2121
import java.nio.ByteBuffer;
2222
import java.nio.charset.StandardCharsets;
2323
import java.time.Duration;
24-
import java.util.BitSet;
2524
import java.util.Collections;
25+
import java.util.EnumSet;
2626
import java.util.List;
2727
import java.util.Map;
2828
import java.util.Optional;
@@ -88,7 +88,7 @@ public class DeadLetterPublishingRecoverer extends ExceptionClassifier implement
8888

8989
private final Function<ProducerRecord<?, ?>, KafkaOperations<?, ?>> templateResolver;
9090

91-
private final BitSet whichHeaders = new BitSet(10);
91+
private final EnumSet<HeaderNames.HeadersToAdd> whichHeaders = EnumSet.allOf(HeaderNames.HeadersToAdd.class);
9292

9393
private boolean retainExceptionHeader;
9494

@@ -185,7 +185,6 @@ public DeadLetterPublishingRecoverer(Map<Class<?>, KafkaOperations<? extends Obj
185185
.map(t -> t.isTransactional())
186186
.allMatch(t -> t.equals(tx)), "All templates must have the same setting for transactional");
187187
this.destinationResolver = destinationResolver;
188-
setHeaderBits(this.whichHeaders);
189188
}
190189

191190
/**
@@ -210,12 +209,6 @@ public DeadLetterPublishingRecoverer(Function<ProducerRecord<?, ?>, KafkaOperati
210209
this.transactional = transactional;
211210
this.destinationResolver = destinationResolver;
212211
this.templateResolver = templateResolver;
213-
setHeaderBits(this.whichHeaders);
214-
}
215-
216-
private static void setHeaderBits(BitSet bits) {
217-
bits.set(HeaderNames.HeadersToAdd.OFFSET.ordinal(),
218-
(HeaderNames.HeadersToAdd.EX_STACKTRACE.ordinal()) + 1);
219212
}
220213

221214
/**
@@ -368,7 +361,7 @@ public void excludeHeader(HeaderNames.HeadersToAdd... headers) {
368361
Assert.notNull(headers, "'headers' cannot be null");
369362
Assert.noNullElements(headers, "'headers' cannot include null elements");
370363
for (HeaderNames.HeadersToAdd header : headers) {
371-
this.whichHeaders.clear(header.ordinal());
364+
this.whichHeaders.remove(header);
372365
}
373366
}
374367

@@ -381,7 +374,7 @@ public void includeHeader(HeaderNames.HeadersToAdd... headers) {
381374
Assert.notNull(headers, "'headers' cannot be null");
382375
Assert.noNullElements(headers, "'headers' cannot include null elements");
383376
for (HeaderNames.HeadersToAdd header : headers) {
384-
this.whichHeaders.set(header.ordinal());
377+
this.whichHeaders.add(header);
385378
}
386379
}
387380

@@ -681,7 +674,7 @@ private void maybeAddOriginalHeaders(Headers kafkaHeaders, ConsumerRecord<?, ?>
681674
}
682675

683676
private void maybeAddHeader(Headers kafkaHeaders, String header, byte[] value, HeadersToAdd hta) {
684-
if (this.whichHeaders.get(hta.ordinal())
677+
if (this.whichHeaders.contains(hta)
685678
&& (this.appendOriginalHeaders || kafkaHeaders.lastHeader(header) == null)) {
686679
kafkaHeaders.add(header, value);
687680
}
@@ -713,7 +706,7 @@ private void addExceptionInfoHeaders(Headers kafkaHeaders, Exception exception,
713706
}
714707

715708
private void appendOrReplace(Headers headers, RecordHeader header, HeadersToAdd hta) {
716-
if (this.whichHeaders.get(hta.ordinal())) {
709+
if (this.whichHeaders.contains(hta)) {
717710
if (this.stripPreviousExceptionHeaders) {
718711
headers.remove(header.key());
719712
}

0 commit comments

Comments
 (0)