2626import org .slf4j .Logger ;
2727import org .slf4j .LoggerFactory ;
2828
29- import java .io .IOException ;
3029import java .nio .ByteBuffer ;
3130import java .util .Iterator ;
31+ import java .util .NoSuchElementException ;
3232import java .util .Properties ;
3333import java .util .concurrent .TimeUnit ;
3434
3535
3636/**
3737 * A class which reads from the fetch results from kafka.
3838 */
39- public class KafkaReader {
40- private static final Logger LOG = LoggerFactory .getLogger (KafkaReader .class );
39+ final class Kafka10Reader implements KafkaReader {
40+ private static final Logger LOG = LoggerFactory .getLogger (Kafka10Reader .class );
4141 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0 ];
4242
4343 // index of context
@@ -50,61 +50,58 @@ public class KafkaReader {
5050
5151
5252 /**
53- * Construct using the json representation of the kafka request
53+ * Construct a reader based on the given {@link KafkaRequest}.
5454 */
55- public KafkaReader (KafkaRequest request ) {
55+ Kafka10Reader (KafkaRequest request ) {
5656 kafkaRequest = request ;
57- currentOffset = request .getOffset ();
58- lastOffset = request .getLastOffset ();
57+ currentOffset = request .getStartOffset ();
58+ lastOffset = request .getEndOffset ();
5959
6060 // read data from queue
6161 Properties properties = new Properties ();
6262 properties .putAll (request .getConf ());
6363 consumer = new KafkaConsumer <>(properties , new ByteArrayDeserializer (), new ByteArrayDeserializer ());
64- fetch ();
6564 }
6665
67- public boolean hasNext () throws IOException {
66+ @ Override
67+ public boolean hasNext () {
6868 if (currentOffset >= lastOffset ) {
6969 return false ;
7070 }
7171 if (messageIter != null && messageIter .hasNext ()) {
7272 return true ;
73- } else {
74- return fetch ();
7573 }
74+ return fetch ();
7675 }
7776
7877 /**
79- * Fetches the next Kafka message and stuffs the results into the key and value.
78+ * Fetches the next Kafka message. The message key will be set into the given {@link KafkaKey} object, and the message
79+ * payload will be returned.
8080 */
81- public KafkaMessage getNext (KafkaKey kafkaKey ) throws IOException {
82- if (hasNext ()) {
83- ConsumerRecord <byte [], byte []> consumerRecord = messageIter .next ();
84-
85- byte [] keyBytes = consumerRecord .key ();
86- byte [] value = consumerRecord .value ();
87- if (value == null ) {
88- LOG .warn ("Received message with null message.payload with topic {} and partition {}" ,
89- kafkaKey .getTopic (), kafkaKey .getPartition ());
90- }
91-
92- ByteBuffer payload = value == null ? ByteBuffer .wrap (EMPTY_BYTE_ARRAY ) : ByteBuffer .wrap (value );
93- ByteBuffer key = keyBytes == null ? ByteBuffer .wrap (EMPTY_BYTE_ARRAY ) : ByteBuffer .wrap (keyBytes );
94-
95- kafkaKey .clear ();
96- kafkaKey .set (kafkaRequest .getTopic (), kafkaRequest .getPartition (), currentOffset ,
97- consumerRecord .offset () + 1 );
98- kafkaKey .setMessageSize (value == null ? -1 : value .length );
99- currentOffset = consumerRecord .offset () + 1 ; // increase offset
100- return new KafkaMessage (payload , key );
101- } else {
102- return null ;
81+ @ Override
82+ public KafkaMessage getNext (KafkaKey kafkaKey ) {
83+ if (!hasNext ()) {
84+ throw new NoSuchElementException ("No message is available" );
10385 }
86+
87+ ConsumerRecord <byte [], byte []> consumerRecord = messageIter .next ();
88+
89+ byte [] keyBytes = consumerRecord .key ();
90+ byte [] value = consumerRecord .value ();
91+
92+ ByteBuffer key = keyBytes == null ? ByteBuffer .wrap (EMPTY_BYTE_ARRAY ) : ByteBuffer .wrap (keyBytes );
93+ ByteBuffer payload = value == null ? ByteBuffer .wrap (EMPTY_BYTE_ARRAY ) : ByteBuffer .wrap (value );
94+
95+ kafkaKey .set (currentOffset , consumerRecord .offset () + 1 ,
96+ consumerRecord .serializedKeySize () + consumerRecord .serializedValueSize (), consumerRecord .checksum ());
97+ currentOffset = consumerRecord .offset () + 1 ; // increase offset
98+ return new KafkaMessage (payload , key );
10499 }
105100
106101 /**
107- * Creates a fetch request.
102+ * Fetch messages from Kafka.
103+ *
104+ * @return {@code true} if there is some messages available, {@code false} otherwise
108105 */
109106 private boolean fetch () {
110107 if (currentOffset >= lastOffset ) {
@@ -124,9 +121,10 @@ private boolean fetch() {
124121 }
125122
126123 /**
127- * Closes this context
124+ * Closes this reader.
128125 */
129- public void close () throws IOException {
126+ @ Override
127+ public void close () {
130128 if (consumer != null ) {
131129 consumer .close ();
132130 }
0 commit comments