@@ -16,6 +16,7 @@ import org.scalatest.Suite
16
16
17
17
import scala .collection .JavaConversions .mapAsJavaMap
18
18
import scala .collection .mutable
19
+ import scala .collection .mutable .ListBuffer
19
20
import scala .concurrent .duration ._
20
21
import scala .concurrent .{ExecutionContext , TimeoutException }
21
22
import scala .language .{higherKinds , postfixOps }
@@ -210,10 +211,23 @@ sealed trait EmbeddedKafkaSupport {
210
211
ProducerConfig .RETRY_BACKOFF_MS_CONFIG -> 1000 .toString
211
212
)
212
213
214
+ private def baseConsumerConfig (implicit config : EmbeddedKafkaConfig ) : Properties = {
215
+ val props = new Properties ()
216
+ props.put(" group.id" , s " embedded-kafka-spec " )
217
+ props.put(" bootstrap.servers" , s " localhost: ${config.kafkaPort}" )
218
+ props.put(" auto.offset.reset" , " earliest" )
219
+ props.put(" enable.auto.commit" , " false" )
220
+ props
221
+ }
222
+
213
223
def consumeFirstStringMessageFrom (topic : String , autoCommit : Boolean = false )(
214
224
implicit config : EmbeddedKafkaConfig ): String =
215
225
consumeFirstMessageFrom(topic, autoCommit)(config, new StringDeserializer ())
216
226
227
+ def consumeNumberStringMessagesFrom (topic : String , number : Int , autoCommit : Boolean = false )(
228
+ implicit config : EmbeddedKafkaConfig ): List [String ] =
229
+ consumeNumberMessagesFrom(topic, number, autoCommit)(config, new StringDeserializer ())
230
+
217
231
/**
218
232
* Consumes the first message available in a given topic, deserializing it as a String.
219
233
*
@@ -238,10 +252,7 @@ sealed trait EmbeddedKafkaSupport {
238
252
239
253
import scala .collection .JavaConversions ._
240
254
241
- val props = new Properties ()
242
- props.put(" group.id" , s " embedded-kafka-spec " )
243
- props.put(" bootstrap.servers" , s " localhost: ${config.kafkaPort}" )
244
- props.put(" auto.offset.reset" , " earliest" )
255
+ val props = baseConsumerConfig
245
256
props.put(" enable.auto.commit" , autoCommit.toString)
246
257
247
258
val consumer =
@@ -271,6 +282,69 @@ sealed trait EmbeddedKafkaSupport {
271
282
}.get
272
283
}
273
284
285
+ /**
286
+ * Consumes the first n messages available in a given topic, deserializing it as a String, and returns
287
+ * the n messages as a List.
288
+ *
289
+ * Only the messsages that are returned are committed if autoCommit is false.
290
+ * If autoCommit is true then all messages that were polled will be committed.
291
+ *
292
+ * @param topic the topic to consume a message from
293
+ * @param number the number of messagese to consume in a batch
294
+ * @param autoCommit if false, only the offset for the consumed message will be commited.
295
+ * if true, the offset for the last polled message will be committed instead.
296
+ * Defaulted to false.
297
+ * @param config an implicit [[EmbeddedKafkaConfig ]]
298
+ * @param deserializer an implicit [[org.apache.kafka.common.serialization.Deserializer ]] for the type [[T ]]
299
+ * @return the first message consumed from the given topic, with a type [[T ]]
300
+ * @throws TimeoutException if unable to consume a message within 5 seconds
301
+ * @throws KafkaUnavailableException if unable to connect to Kafka
302
+ */
303
+ def consumeNumberMessagesFrom [T ](topic : String , number : Int , autoCommit : Boolean = false )(
304
+ implicit config : EmbeddedKafkaConfig ,
305
+ deserializer : Deserializer [T ]): List [T ] = {
306
+
307
+ import scala .collection .JavaConverters ._
308
+
309
+ val props = baseConsumerConfig
310
+ props.put(" enable.auto.commit" , autoCommit.toString)
311
+
312
+ val consumer =
313
+ new KafkaConsumer [String , T ](props, new StringDeserializer , deserializer)
314
+
315
+ val messages = Try {
316
+ val messagesBuffer = ListBuffer .empty[T ]
317
+ var messagesRead = 0
318
+ consumer.subscribe(List (topic).asJava)
319
+ consumer.partitionsFor(topic)
320
+
321
+ while (messagesRead < number) {
322
+ val records = consumer.poll(5000 )
323
+ if (records.isEmpty) {
324
+ throw new TimeoutException (
325
+ " Unable to retrieve a message from Kafka in 5000ms" )
326
+ }
327
+
328
+ val recordIter = records.iterator()
329
+ while (recordIter.hasNext && messagesRead < number) {
330
+ val record = recordIter.next()
331
+ messagesBuffer += record.value()
332
+ val tp = new TopicPartition (record.topic(), record.partition())
333
+ val om = new OffsetAndMetadata (record.offset() + 1 )
334
+ consumer.commitSync(Map (tp -> om).asJava)
335
+ messagesRead += 1
336
+ }
337
+ }
338
+ messagesBuffer.toList
339
+ }
340
+
341
+ consumer.close()
342
+ messages.recover {
343
+ case ex : KafkaException => throw new KafkaUnavailableException (ex)
344
+ }.get
345
+ }
346
+
347
+
274
348
object aKafkaProducer {
275
349
private [this ] var producers = Vector .empty[KafkaProducer [_, _]]
276
350
0 commit comments