Friday, July 1, 2016

Sample Kafka Producer and Consumer

Applied to: Apache Kafka Version: 0.9.X


Producer 



import java.io.IOException;
import java.time.LocalDateTime;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

public class Producer { 
private void generateMessgaes() throws IOException {
    String topic = "myTopic";
    Properties props = new Properties();
    props.put("bootstrap.servers", "localhost:9092");
    props.put("acks", "all");
    props.put("retries", 0);
    props.put("batch.size", 16384);
    props.put("linger.ms", 1);
    props.put("buffer.memory", 33554432);
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("serializer.class", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("client.id", "test");

    KafkaProducer producer = null;
    try {
          producer = new KafkaProducer<>(props);
          producer.send(new ProducerRecord(topic, "test msg"));
      } catch (Throwable e) {
        e.printStackTrace();

     } finally {
          producer.close(100,TimeUnit.MILLISECONDS);
      }
   }

public static void main(String[] args) throws IOException {
       Producer producer = new Producer();
       producer.generateMessgaes();
     }
}
Consumer


import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.log4j.Logger;

public class Listener {

public void start() throws CoreException {
String topic = "myTopic";
List topics = Arrays.asList(topic);
Properties props = new Properties();
props.put("bootstrap.servers", "aukk1.leightonobrien.com:9092");
props.put("enable.auto.commit", "true"); props.put("auto.commit.interval.ms", "1000"); props.put("session.timeout.ms", "30000");; props.put("heartbeat.interval.ms", "10000"); props.put("auto.offset.reset", "earliest"); props.put("group.id", "test");
props.put("key.deserializer", StringDeserializer.class.getName());
props.put("value.deserializer", StringDeserializer.class.getName());
props.put("fetch.min.bytes", 1); props.put("receive.buffer.bytes", "10000"); props.put("max.partition.fetch.bytes", "10000"); props.put("request.timeout.ms", "40000"); KafkaConsumer consumer = new KafkaConsumer(props);
try { consumer.subscribe(topics);
} catch (Exception e) {
e.printStackTrace();
}
try {
while (true) {
ConsumerRecords records = consumer.poll(3000);
System.out.println("polling msges : " + records.count());
for (ConsumerRecord record : records) {
System.out.println("kafka record : " + record.value());
}
}
} catch (Exception e) {
e.printStackTrace();
} finally { consumer.close();
}
}
public static void main(String args[]) throws CoreException {
Listener listener = new Listener();
listener.start();
}
}

No comments:

Post a Comment