Skip to content
This repository was archived by the owner on Mar 30, 2023. It is now read-only.

Commit 0ceb761

Browse files
artembilangaryrussell
authored andcommitted
Apply SI-Kotlin-DSL in tests
1 parent c9d35c4 commit 0ceb761

File tree

2 files changed

+59
-54
lines changed

2 files changed

+59
-54
lines changed

build.gradle

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,9 @@ ext {
5959
jacksonVersion = '2.10.0'
6060
junitJupiterVersion = '5.5.2'
6161
log4jVersion = '2.12.1'
62-
springIntegrationVersion = '5.2.0.RELEASE'
63-
springKafkaVersion = '2.3.0.RELEASE'
62+
springIntegrationVersion = '5.2.1.BUILD-SNAPSHOT'
63+
springIntegrationKotlinVersion = '0.0.2.BUILD-SNAPSHOT'
64+
springKafkaVersion = '2.3.1.BUILD-SNAPSHOT'
6465

6566
idPrefix = 'kafka'
6667

@@ -102,11 +103,13 @@ dependencies {
102103
compile "org.springframework.kafka:spring-kafka:$springKafkaVersion"
103104

104105
testCompile "org.springframework.kafka:spring-kafka-test:$springKafkaVersion"
106+
testCompile "org.springframework.integration:spring-integration-kotlin-dsl:$springIntegrationKotlinVersion"
105107
testCompile 'org.springframework.integration:spring-integration-test'
106108
testCompile "com.willowtreeapps.assertk:assertk-jvm:$assertkVersion"
107109
testCompile 'org.jetbrains.kotlin:kotlin-reflect'
108110
testCompile 'org.jetbrains.kotlin:kotlin-stdlib-jdk8'
109111
testCompile 'org.junit.jupiter:junit-jupiter-api'
112+
110113
testRuntime 'org.junit.jupiter:junit-jupiter-engine'
111114
testRuntime 'org.junit.platform:junit-platform-launcher'
112115

src/test/kotlin/org/springframework/integration/kafka/dsl/KafkaDslKotlinTests.kt

Lines changed: 54 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,10 @@ import org.springframework.integration.MessageRejectedException
3737
import org.springframework.integration.channel.QueueChannel
3838
import org.springframework.integration.config.EnableIntegration
3939
import org.springframework.integration.dsl.IntegrationFlow
40-
import org.springframework.integration.dsl.IntegrationFlows
4140
import org.springframework.integration.dsl.Pollers
41+
import org.springframework.integration.dsl.kotlin.filterReified
42+
import org.springframework.integration.dsl.kotlin.integrationFlow
43+
import org.springframework.integration.dsl.kotlin.split
4244
import org.springframework.integration.handler.advice.ErrorMessageSendingRecoverer
4345
import org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter
4446
import org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler
@@ -87,7 +89,7 @@ import java.util.stream.Stream
8789
@SpringJUnitConfig
8890
@DirtiesContext
8991
@EmbeddedKafka(topics = [KafkaDslKotlinTests.TEST_TOPIC1, KafkaDslKotlinTests.TEST_TOPIC2,
90-
KafkaDslKotlinTests.TEST_TOPIC3, KafkaDslKotlinTests.TEST_TOPIC4, KafkaDslKotlinTests.TEST_TOPIC5])
92+
KafkaDslKotlinTests.TEST_TOPIC3, KafkaDslKotlinTests.TEST_TOPIC4, KafkaDslKotlinTests.TEST_TOPIC5])
9193
class KafkaDslKotlinTests {
9294

9395
companion object {
@@ -159,8 +161,8 @@ class KafkaDslKotlinTests {
159161
assertThat(receive!!.payload).isEqualTo("FOO")
160162
val headers = receive.headers
161163
assertThat(headers.containsKey(KafkaHeaders.ACKNOWLEDGMENT)).isTrue()
162-
val acknowledgment = headers.get(KafkaHeaders.ACKNOWLEDGMENT, Acknowledgment::class.java)
163-
acknowledgment?.acknowledge()
164+
val acknowledgment = headers[KafkaHeaders.ACKNOWLEDGMENT] as Acknowledgment
165+
acknowledgment.acknowledge()
164166
assertThat(headers[KafkaHeaders.RECEIVED_TOPIC]).isEqualTo(TEST_TOPIC1)
165167
assertThat(headers[KafkaHeaders.RECEIVED_MESSAGE_KEY]).isEqualTo(i + 1)
166168
assertThat(headers[KafkaHeaders.RECEIVED_PARTITION_ID]).isEqualTo(0)
@@ -176,8 +178,8 @@ class KafkaDslKotlinTests {
176178
assertThat(receive!!.payload).isEqualTo("FOO")
177179
val headers = receive.headers
178180
assertThat(headers.containsKey(KafkaHeaders.ACKNOWLEDGMENT)).isTrue()
179-
val acknowledgment = headers.get(KafkaHeaders.ACKNOWLEDGMENT, Acknowledgment::class.java)
180-
acknowledgment?.acknowledge()
181+
val acknowledgment = headers[KafkaHeaders.ACKNOWLEDGMENT] as Acknowledgment
182+
acknowledgment.acknowledge()
181183
assertThat(headers[KafkaHeaders.RECEIVED_TOPIC]).isEqualTo(TEST_TOPIC2)
182184
assertThat(headers[KafkaHeaders.RECEIVED_MESSAGE_KEY]).isEqualTo(i + 1)
183185
assertThat(headers[KafkaHeaders.RECEIVED_PARTITION_ID]).isEqualTo(0)
@@ -232,7 +234,7 @@ class KafkaDslKotlinTests {
232234
@Bean
233235
fun consumerFactory(): ConsumerFactory<Int, String> {
234236
val props = KafkaTestUtils.consumerProps("test1", "false", this.embeddedKafka)
235-
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
237+
props[ConsumerConfig.AUTO_OFFSET_RESET_CONFIG] = "earliest"
236238
return DefaultKafkaConsumerFactory(props)
237239
}
238240

@@ -241,7 +243,7 @@ class KafkaDslKotlinTests {
241243

242244
@Bean
243245
fun topic1ListenerFromKafkaFlow() =
244-
IntegrationFlows.from(
246+
integrationFlow(
245247
Kafka.messageDrivenChannelAdapter(consumerFactory(),
246248
KafkaMessageDrivenChannelAdapter.ListenerMode.record, TEST_TOPIC1)
247249
.configureListenerContainer {
@@ -251,50 +253,52 @@ class KafkaDslKotlinTests {
251253
.recoveryCallback(ErrorMessageSendingRecoverer(errorChannel(),
252254
RawRecordHeaderErrorMessageStrategy()))
253255
.retryTemplate(RetryTemplate())
254-
.filterInRetry(true))
255-
.filter(Message::class.java, { m -> m.getHeaders().get(KafkaHeaders.RECEIVED_MESSAGE_KEY, Integer::class.java)!! < 101 },
256-
{ f -> f.throwExceptionOnRejection(true) })
257-
.transform<String, String> { it.toUpperCase() }
258-
.channel { c -> c.queue("listeningFromKafkaResults1") }
259-
.get()
256+
.filterInRetry(true)) {
257+
it.filterReified<Message<*>>(
258+
{ m -> (m.headers[KafkaHeaders.RECEIVED_MESSAGE_KEY] as Int) < 101 },
259+
{ f -> f.throwExceptionOnRejection(true) })
260+
.transform<String, String> { it.toUpperCase() }
261+
.channel { c -> c.queue("listeningFromKafkaResults1") }
262+
}
260263

261264
@Bean
262265
fun topic2ListenerFromKafkaFlow() =
263-
IntegrationFlows.from(
266+
integrationFlow(
264267
Kafka.messageDrivenChannelAdapter(consumerFactory(),
265268
KafkaMessageDrivenChannelAdapter.ListenerMode.record, TEST_TOPIC2)
266269
.configureListenerContainer { it.ackMode(ContainerProperties.AckMode.MANUAL) }
267270
.recoveryCallback(ErrorMessageSendingRecoverer(errorChannel(),
268271
RawRecordHeaderErrorMessageStrategy()))
269272
.retryTemplate(RetryTemplate())
270-
.filterInRetry(true))
271-
.filter(Message::class.java,
272-
{ m -> m.getHeaders().get(KafkaHeaders.RECEIVED_MESSAGE_KEY, Integer::class.java)!! < 101 },
273-
{ it.throwExceptionOnRejection(true) })
274-
.transform<String, String> { it.toUpperCase() }
275-
.channel { c -> c.queue("listeningFromKafkaResults2") }
276-
.get()
273+
.filterInRetry(true)) {
274+
it.filterReified<Message<*>>(
275+
{ m -> (m.headers[KafkaHeaders.RECEIVED_MESSAGE_KEY] as Int) < 101 },
276+
{ it.throwExceptionOnRejection(true) })
277+
.transform<String, String> { it.toUpperCase() }
278+
.channel { c -> c.queue("listeningFromKafkaResults2") }
279+
}
277280

278281
@Bean
279282
fun producerFactory(): DefaultKafkaProducerFactory<Int, String> {
280283
val props = KafkaTestUtils.producerProps(this.embeddedKafka)
281-
props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, "10000")
284+
props[ProducerConfig.MAX_BLOCK_MS_CONFIG] = "10000"
282285
return DefaultKafkaProducerFactory(props)
283286
}
284287

285288
@Bean
286289
fun sendToKafkaFlow() =
287-
IntegrationFlow { f ->
288-
f.split<String>({ p -> Stream.generate { p }.limit(101) }, null)
289-
.publishSubscribeChannel { c ->
290-
c.subscribe { sf ->
291-
sf.handle(
292-
kafkaMessageHandler(producerFactory(), TEST_TOPIC1)
293-
.timestampExpression("T(Long).valueOf('1487694048633')")
294-
) { it.id("kafkaProducer1") }
295-
}
296-
.subscribe { sf ->
297-
sf.handle(
290+
IntegrationFlow {
291+
it.split<String>({ p -> Stream.generate { p }.limit(101) })
292+
.publishSubscribeChannel {
293+
it
294+
.subscribe {
295+
it.handle(
296+
kafkaMessageHandler(producerFactory(), TEST_TOPIC1)
297+
.timestampExpression("T(Long).valueOf('1487694048633')")
298+
) { it.id("kafkaProducer1") }
299+
}
300+
.subscribe {
301+
it.handle(
298302
kafkaMessageHandler(producerFactory(), TEST_TOPIC2)
299303
.timestamp<Any> { 1487694048644L }
300304
) { it.id("kafkaProducer2") }
@@ -310,21 +314,20 @@ class KafkaDslKotlinTests {
310314
.messageKey<Any> { m -> m.headers[IntegrationMessageHeaderAccessor.SEQUENCE_NUMBER] }
311315
.headerMapper(mapper())
312316
.sync(true)
313-
.partitionId<Any> { _ -> 0 }
317+
.partitionId<Any> { 0 }
314318
.topicExpression("headers[kafka_topic] ?: '$topic'")
315-
.configureKafkaTemplate { t -> t.id("kafkaTemplate:$topic") }
319+
.configureKafkaTemplate { it.id("kafkaTemplate:$topic") }
316320

317321

318322
@Bean
319323
fun sourceFlow() =
320-
IntegrationFlows
321-
.from(Kafka.inboundChannelAdapter(consumerFactory(), ConsumerProperties(TEST_TOPIC3)))
322-
{ e -> e.poller(Pollers.fixedDelay(100)) }
323-
.handle { p ->
324-
this.fromSource = p.getPayload()
325-
this.sourceFlowLatch.countDown()
326-
}
327-
.get()
324+
integrationFlow(Kafka.inboundChannelAdapter(consumerFactory(), ConsumerProperties(TEST_TOPIC3)),
325+
{ e -> e.poller(Pollers.fixedDelay(100)) }) {
326+
it.handle { m ->
327+
this.fromSource = m.payload
328+
this.sourceFlowLatch.countDown()
329+
}
330+
}
328331

329332
@Bean
330333
fun replyingKafkaTemplate() =
@@ -335,10 +338,10 @@ class KafkaDslKotlinTests {
335338

336339
@Bean
337340
fun outboundGateFlow() =
338-
IntegrationFlows.from(Gate::class.java)
339-
.handle(Kafka.outboundGateway(replyingKafkaTemplate())
340-
.sync(true))
341-
.get()
341+
integrationFlow<Gate> {
342+
it.handle(Kafka.outboundGateway(replyingKafkaTemplate())
343+
.sync(true))
344+
}
342345

343346
private fun replyContainer(): GenericMessageListenerContainer<Int, String> {
344347
val containerProperties = ContainerProperties(TEST_TOPIC5)
@@ -359,10 +362,9 @@ class KafkaDslKotlinTests {
359362

360363
@Bean
361364
fun serverGateway() =
362-
IntegrationFlows.from(
363-
Kafka.inboundGateway(consumerFactory(), containerProperties(), producerFactory()))
364-
.transform<String, String> { it.toUpperCase() }
365-
.get()
365+
integrationFlow(Kafka.inboundGateway(consumerFactory(), containerProperties(), producerFactory())) {
366+
it.transform<String, String> { it.toUpperCase() }
367+
}
366368

367369
private fun containerProperties() =
368370
ContainerProperties(TEST_TOPIC4)

0 commit comments

Comments
 (0)