您的位置:首页 > 编程语言 > Java开发

Kafka生产者消费者java示例(包含Avro序列化)

2015-12-28 13:57 711 查看
文章内容包含Kafka未进行序列化生产消费java示例,和使用Avro序列化数据进行生产和消费的示例,掌握这些之后就对Kafka的生产消费有基本开发基础。

1.未序列化

生产者示例:

import java.util.Properties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;

public class KafkaProducer
{
private final Producer<String, String> producer;
public final static String TOPIC = "TEST-TOPIC";

private KafkaProducer(){
Properties props = new Properties();
//此处配置的是kafka的端口
props.put("metadata.broker.list", "192.168.193.148:9092");

//配置value的序列化类
props.put("serializer.class", "kafka.serializer.StringEncoder");
//配置key的序列化类
props.put("key.serializer.class", "kafka.serializer.StringEncoder");

//request.required.acks
//0, which means that the producer never waits for an acknowledgement from the broker (the same behavior as 0.7). This option provides the lowest latency but the weakest durability guarantees (some data will be lost when a server fails).
//1, which means that the producer gets an acknowledgement after the leader replica has received the data. This option provides better durability as the client waits until the server acknowledges the request as successful (only messages that were written to the now-dead leader but not yet replicated will be lost).
//-1, which means that the producer gets an acknowledgement after all in-sync replicas have received the data. This option provides the best durability, we guarantee that no messages will be lost as long as at least one in sync replica remains.
props.put("request.required.acks","-1");

producer = new Producer<String, String>(new ProducerConfig(props));
}

void produce() {
int messageNo = 1000;
final int COUNT = 10000;

while (messageNo < COUNT) {
String key = String.valueOf(messageNo);
String data = "hello kafka message " + key;
producer.send(new KeyedMessage<String, String>(TOPIC, key ,data));
System.out.println(data);
messageNo ++;
}
}

public static void main( String[] args )
{
new KafkaProducer().produce();
}
}

消费者示例:
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties;

public class KafkaConsumer {

private final ConsumerConnector consumer;

private KafkaConsumer() {
Properties props = new Properties();
//zookeeper 配置
props.put("zookeeper.connect", "192.168.193.148:2181");

//group 代表一个消费组
props.put("group.id", "jd-group");

//zk连接超时
props.put("zookeeper.session.timeout.ms", "4000");
props.put("zookeeper.sync.time.ms", "200");
props.put("auto.commit.interval.ms", "1000");
props.put("auto.offset.reset", "smallest");
//序列化类
props.put("serializer.class", "kafka.serializer.StringEncoder");

ConsumerConfig config = new ConsumerConfig(props);

consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);
}

void consume() {
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(KafkaProducer.TOPIC, new Integer(1));

StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());

Map<String, List<KafkaStream<String, String>>> consumerMap =
consumer.createMessageStreams(topicCountMap,keyDecoder,valueDecoder);
KafkaStream<String, String> stream = consumerMap.get(KafkaProducer.TOPIC).get(0);
ConsumerIterator<String, String> it = stream.iterator();
while (it.hasNext())
System.out.println(it.next().message());
}

public static void main(String[] args) {
new KafkaConsumer().consume();
}
}
2.用Avro进行序列化
生产者示例:

import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.*;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Hex;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.Properties;

public class ProducerTest {

void producer(Schema schema) throws IOException {

Properties props = new Properties();
props.put("metadata.broker.list", "0:9092");
props.put("serializer.class", "kafka.serializer.DefaultEncoder");
props.put("request.required.acks", "1");
ProducerConfig config = new ProducerConfig(props);
Producer<String, byte[]> producer = new Producer<String, byte[]>(config);
GenericRecord payload1 = new GenericData.Record(schema);
//Step2 : Put data in that genericrecord object
payload1.put("desc", "'testdata'");
//payload1.put("name", "अasa");
payload1.put("name", "dbevent1");
payload1.put("id", 111);
System.out.println("Original Message : "+ payload1);
//Step3 : Serialize the object to a bytearray
DatumWriter<GenericRecord>writer = new SpecificDatumWriter<GenericRecord>(schema);
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
writer.write(payload1, encoder);
encoder.flush();
out.close();

byte[] serializedBytes = out.toByteArray();
System.out.println("Sending message in bytes : " + serializedBytes);
KeyedMessage<String, byte[]> message = new KeyedMessage<String, byte[]>("page_views", serializedBytes);
producer.send(message);
producer.close();

}

public static void main(String[] args) throws IOException, DecoderException {
ProducerTest test = new ProducerTest();
Schema schema = new Schema.Parser().parse(new File("src/test_schema.avsc"));
test.producer(schema);
}
}
test_schema.avsc schema文件内容
{
"namespace": "xyz.test",
"type": "record",
"name": "payload",
"fields":[
{
"name": "name", "type": "string"
},
{
"name": "id", "type": ["int", "null"]
},
{
"name": "desc", "type": ["string", "null"]
}
]
}
注意:

1.需要props.put("serializer.class", "kafka.serializer.DefaultEncoder"),这样数据传输才会用byte[]的方式

2.schema中fields的type要有null,这样当字段值为空时,能够正常运行,否在不设置null,并且字段值为空时就会抛异常。

消费者示例:
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;

public class ConsumerGroupExample {
private final ConsumerConnector consumer;
private final String topic;
private ExecutorService executor;

public ConsumerGroupExample(String a_zookeeper, String a_groupId, String a_topic){
consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
createConsumerConfig(a_zookeeper, a_groupId));
this.topic = a_topic;
}

private static ConsumerConfig createConsumerConfig(String a_zookeeper, String a_groupId){
Properties props = new Properties();
props.put("zookeeper.connect", a_zookeeper);
props.put("group.id", a_groupId);
props.put("zookeeper.session.timeout.ms", "400");
props.put("zookeeper.sync.time.ms", "200");
props.put("auto.commit.interval.ms", "1000");

return new ConsumerConfig(props);
}

public void shutdown(){
if (consumer!=null) consumer.shutdown();
if (executor!=null) executor.shutdown();
System.out.println("Timed out waiting for consumer threads to shut down, exiting uncleanly");
try{
if(!executor.awaitTermination(5000, TimeUnit.MILLISECONDS)){

}
}catch(InterruptedException e){
System.out.println("Interrupted");
}

}

public void run(int a_numThreads){
//Make a map of topic as key and no. of threads for that topic
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(topic, new Integer(a_numThreads));
//Create message streams for each topic
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);

//initialize thread pool
executor = Executors.newFixedThreadPool(a_numThreads);
//start consuming from thread
int threadNumber = 0;
for (final KafkaStream stream : streams) {
executor.submit(new ConsumerTest(stream, threadNumber));
threadNumber++;
}
}
public static void main(String[] args) {
String zooKeeper = args[0];
String groupId = args[1];
String topic = args[2];
int threads = Integer.parseInt(args[3]);

ConsumerGroupExample example = new ConsumerGroupExample(zooKeeper, groupId, topic);
example.run(threads);

try {
Thread.sleep(10000);
} catch (InterruptedException ie) {

}
example.shutdown();
}
}

import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.message.MessageAndMetadata;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.commons.codec.binary.Hex;

import java.io.File;
import java.io.IOException;

public class ConsumerTest implements Runnable{

private KafkaStream m_stream;
private int m_threadNumber;

public ConsumerTest(KafkaStream a_stream, int a_threadNumber) {
m_threadNumber = a_threadNumber;
m_stream = a_stream;
}

public void run(){
ConsumerIterator<byte[], byte[]>it = m_stream.iterator();
while(it.hasNext())
{
try {
//System.out.println("Encoded Message received : " + message_received);
//byte[] input = Hex.decodeHex(it.next().message().toString().toCharArray());
//System.out.println("Deserializied Byte array : " + input);
byte[] received_message = it.next().message();
System.out.println(received_message);
Schema schema = null;
schema = new Schema.Parser().parse(new File("src/test_schema.avsc"));
DatumReader<GenericRecord> reader = new SpecificDatumReader<GenericRecord>(schema);
Decoder decoder = DecoderFactory.get().binaryDecoder(received_message, null);
GenericRecord payload2 = null;
payload2 = reader.read(null, decoder);
System.out.println("Message received : " + payload2);
}catch (Exception e) {
e.printStackTrace();
System.out.println(e);
}
}
}
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息