24
24
import java .util .List ;
25
25
import java .util .Map ;
26
26
27
- import org .apache .commons .logging .LogFactory ;
28
27
import org .apache .kafka .clients .consumer .Consumer ;
29
28
import org .apache .kafka .clients .consumer .ConsumerRecord ;
30
29
import org .apache .kafka .clients .producer .ProducerRecord ;
31
30
import org .apache .kafka .common .header .Headers ;
31
+ import org .apache .kafka .common .record .TimestampType ;
32
32
import org .apache .kafka .common .utils .Bytes ;
33
33
34
34
import org .springframework .core .log .LogAccessor ;
35
+ import org .springframework .core .log .LogMessage ;
35
36
import org .springframework .kafka .support .Acknowledgment ;
36
37
import org .springframework .kafka .support .DefaultKafkaHeaderMapper ;
37
38
import org .springframework .kafka .support .JacksonPresent ;
54
55
* <p>
55
56
* If a {@link RecordMessageConverter} is provided, and the batch type is a {@link ParameterizedType}
56
57
* with a single generic type parameter, each record will be passed to the converter, thus supporting
57
- * a method signature {@code List<Foo> foos }.
58
+ * a method signature {@code List<MyType> myObjects }.
58
59
*
59
60
* @author Marius Bogoevici
60
61
* @author Gary Russell
63
64
* @author Sanghyeok An
64
65
* @author Hope Kim
65
66
* @author Borahm Lee
67
+ * @author Artem Bilan
68
+ *
66
69
* @since 1.1
67
70
*/
68
71
public class BatchMessagingMessageConverter implements BatchMessageConverter {
69
72
70
- protected final LogAccessor logger = new LogAccessor (LogFactory . getLog ( getClass () )); // NOSONAR
73
+ protected final LogAccessor logger = new LogAccessor (getClass ()); // NOSONAR
71
74
72
75
@ Nullable
73
76
private final RecordMessageConverter recordConverter ;
@@ -102,7 +105,7 @@ public BatchMessagingMessageConverter(@Nullable RecordMessageConverter recordCon
102
105
103
106
/**
104
107
* Generate {@link Message} {@code ids} for produced messages. If set to {@code false},
105
- * will try to use a default value. By default set to {@code false}.
108
+ * will try to use a default value. By default, set to {@code false}.
106
109
* @param generateMessageId true if a message id should be generated
107
110
*/
108
111
public void setGenerateMessageId (boolean generateMessageId ) {
@@ -111,7 +114,7 @@ public void setGenerateMessageId(boolean generateMessageId) {
111
114
112
115
/**
113
116
* Generate {@code timestamp} for produced messages. If set to {@code false}, -1 is
114
- * used instead. By default set to {@code false}.
117
+ * used instead. By default, set to {@code false}.
115
118
* @param generateTimestamp true if a timestamp should be generated
116
119
*/
117
120
public void setGenerateTimestamp (boolean generateTimestamp ) {
@@ -147,8 +150,8 @@ public void setRawRecordHeader(boolean rawRecordHeader) {
147
150
public Message <?> toMessage (List <ConsumerRecord <?, ?>> records , @ Nullable Acknowledgment acknowledgment ,
148
151
Consumer <?, ?> consumer , Type type ) {
149
152
150
- KafkaMessageHeaders kafkaMessageHeaders = new KafkaMessageHeaders ( this . generateMessageId ,
151
- this .generateTimestamp );
153
+ KafkaMessageHeaders kafkaMessageHeaders =
154
+ new KafkaMessageHeaders ( this . generateMessageId , this .generateTimestamp );
152
155
153
156
Map <String , Object > rawHeaders = kafkaMessageHeaders .getRawHeaders ();
154
157
List <Object > payloads = new ArrayList <>();
@@ -169,16 +172,18 @@ public Message<?> toMessage(List<ConsumerRecord<?, ?>> records, @Nullable Acknow
169
172
170
173
String listenerInfo = null ;
171
174
for (ConsumerRecord <?, ?> record : records ) {
172
- addRecordInfo (record , type , payloads , keys , topics , partitions , offsets , timestampTypes , timestamps , conversionFailures );
173
- if (this .headerMapper != null && record .headers () != null ) {
174
- Map <String , Object > converted = convertHeaders (record .headers (), convertedHeaders );
175
+ addRecordInfo (record , type , payloads , keys , topics , partitions , offsets , timestampTypes , timestamps ,
176
+ conversionFailures );
177
+ Headers recordHeaders = record .headers ();
178
+ if (this .headerMapper != null && recordHeaders != null ) {
179
+ Map <String , Object > converted = convertHeaders (recordHeaders , convertedHeaders );
175
180
Object obj = converted .get (KafkaHeaders .LISTENER_INFO );
176
- if (obj instanceof String ) {
177
- listenerInfo = ( String ) obj ;
181
+ if (obj instanceof String info ) {
182
+ listenerInfo = info ;
178
183
}
179
184
}
180
185
else {
181
- natives .add (record . headers () );
186
+ natives .add (recordHeaders );
182
187
}
183
188
if (this .rawRecordHeader ) {
184
189
raws .add (record );
@@ -198,6 +203,7 @@ public Message<?> toMessage(List<ConsumerRecord<?, ?>> records, @Nullable Acknow
198
203
199
204
private void addToRawHeaders (Map <String , Object > rawHeaders , List <Map <String , Object >> convertedHeaders ,
200
205
List <Headers > natives , List <ConsumerRecord <?, ?>> raws , List <ConversionException > conversionFailures ) {
206
+
201
207
if (this .headerMapper != null ) {
202
208
rawHeaders .put (KafkaHeaders .BATCH_CONVERTED_HEADERS , convertedHeaders );
203
209
}
@@ -211,16 +217,18 @@ private void addToRawHeaders(Map<String, Object> rawHeaders, List<Map<String, Ob
211
217
}
212
218
213
219
private void addRecordInfo (ConsumerRecord <?, ?> record , Type type , List <Object > payloads , List <Object > keys ,
214
- List <String > topics , List <Integer > partitions , List <Long > offsets , List <String > timestampTypes ,
215
- List <Long > timestamps , List <ConversionException > conversionFailures ) {
220
+ List <String > topics , List <Integer > partitions , List <Long > offsets , List <String > timestampTypes ,
221
+ List <Long > timestamps , List <ConversionException > conversionFailures ) {
222
+
216
223
payloads .add (obtainPayload (type , record , conversionFailures ));
217
224
keys .add (record .key ());
218
225
topics .add (record .topic ());
219
226
partitions .add (record .partition ());
220
227
offsets .add (record .offset ());
221
228
timestamps .add (record .timestamp ());
222
- if (record .timestampType () != null ) {
223
- timestampTypes .add (record .timestampType ().name ());
229
+ TimestampType timestampType = record .timestampType ();
230
+ if (timestampType != null ) {
231
+ timestampTypes .add (timestampType .name ());
224
232
}
225
233
}
226
234
@@ -264,24 +272,29 @@ protected Object extractAndConvertValue(ConsumerRecord<?, ?> record, Type type)
264
272
protected Object convert (ConsumerRecord <?, ?> record , Type type , List <ConversionException > conversionFailures ) {
265
273
try {
266
274
Object payload = this .recordConverter
267
- .toMessage (record , null , null , ((ParameterizedType ) type ).getActualTypeArguments ()[0 ]).getPayload ();
275
+ .toMessage (record , null , null , ((ParameterizedType ) type ).getActualTypeArguments ()[0 ]).getPayload ();
268
276
conversionFailures .add (null );
269
277
return payload ;
270
278
}
271
279
catch (ConversionException ex ) {
272
280
byte [] original = null ;
273
- if (record .value () instanceof byte []) {
274
- original = ( byte []) record . value () ;
281
+ if (record .value () instanceof byte [] bytes ) {
282
+ original = bytes ;
275
283
}
276
- else if (record .value () instanceof Bytes ) {
277
- original = (( Bytes ) record . value ()) .get ();
284
+ else if (record .value () instanceof Bytes bytes ) {
285
+ original = bytes .get ();
278
286
}
279
- else if (record .value () instanceof String ) {
280
- original = (( String ) record . value ()) .getBytes (StandardCharsets .UTF_8 );
287
+ else if (record .value () instanceof String string ) {
288
+ original = string .getBytes (StandardCharsets .UTF_8 );
281
289
}
282
290
if (original != null ) {
283
291
SerializationUtils .deserializationException (record .headers (), original , ex , false );
284
292
conversionFailures .add (ex );
293
+ this .logger .warn (ex ,
294
+ LogMessage .format ("Could not convert message for topic=%s, partition=%d, offset=%d" ,
295
+ record .topic (),
296
+ record .partition (),
297
+ record .offset ()));
285
298
return null ;
286
299
}
287
300
throw new ConversionException ("The batch converter can only report conversion failures to the listener "
@@ -296,8 +309,8 @@ else if (record.value() instanceof String) {
296
309
* @return true if the conditions are met.
297
310
*/
298
311
private boolean containerType (Type type ) {
299
- return type instanceof ParameterizedType
300
- && (( ParameterizedType ) type ) .getActualTypeArguments ().length == 1 ;
312
+ return type instanceof ParameterizedType parameterizedType
313
+ && parameterizedType .getActualTypeArguments ().length == 1 ;
301
314
}
302
315
303
316
}
0 commit comments