将Log4j日志实时写入Kafka
2018-01-29 18:42
417 查看
1.创建maven项目,引入依赖
2.创建log4j.properties
将Log4j日志实时写入Kafka,需要一个核心处理类:
kafka.producer.KafkaLog4jAppender,但是这个类在不同的版本中使用是有差异的,在Kafka0.7和0.8版本中是有这个类的,笔者使用的版本是0.11.0.0,这个版本中是没有这个类的(源码中有,但编译后没有),所有不能直接使用。直接复制下面的类,拷贝到项目中
然后在log4j.properties中配置如下
3.创建KafkaLog4jApp类
上述完成后启动Kafka(先启动ZK)
4.创建topic
5.启动 Kafka 消费者,运行项目,观察Kafka 消费者的控制台
<dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka_2.11</artifactId> <version>0.11.0.0</version> </dependency> <dependency> <groupId>log4j</groupId> <artifactId>log4j</artifactId> <version>1.2.16</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>1.7.21</version> <scope>test</scope> </dependency>
2.创建log4j.properties
将Log4j日志实时写入Kafka,需要一个核心处理类:
kafka.producer.KafkaLog4jAppender,但是这个类在不同的版本中使用是有差异的,在Kafka0.7和0.8版本中是有这个类的,笔者使用的版本是0.11.0.0,这个版本中是没有这个类的(源码中有,但编译后没有),所有不能直接使用。直接复制下面的类,拷贝到项目中
package com.bigdata.kafka; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.config.ConfigException; import org.apache.kafka.common.config.SslConfigs; import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.helpers.LogLog; import org.apache.log4j.spi.LoggingEvent; import java.util.Date; import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; /** * A log4j appender that produces log messages to Kafka */ public class KafkaLog4jAppender extends AppenderSkeleton { private static final String BOOTSTRAP_SERVERS_CONFIG = ProducerConfig.BOOTSTRAP_SERVERS_CONFIG; private static final String COMPRESSION_TYPE_CONFIG = ProducerConfig.COMPRESSION_TYPE_CONFIG; private static final String ACKS_CONFIG = ProducerConfig.ACKS_CONFIG; private static final String RETRIES_CONFIG = ProducerConfig.RETRIES_CONFIG; private static final String KEY_SERIALIZER_CLASS_CONFIG = ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; private static final String VALUE_SERIALIZER_CLASS_CONFIG = ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; private static final String SECURITY_PROTOCOL = CommonClientConfigs.SECURITY_PROTOCOL_CONFIG; private static final String SSL_TRUSTSTORE_LOCATION = SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG; private static final String SSL_TRUSTSTORE_PASSWORD = SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG; private static final String SSL_KEYSTORE_TYPE = SslConfigs.SSL_KEYSTORE_TYPE_CONFIG; private static final String SSL_KEYSTORE_LOCATION = SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG; private static final String SSL_KEYSTORE_PASSWORD = SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG; private String brokerList = null; private String topic = null; private String compressionType = null; private String securityProtocol = null; private String sslTruststoreLocation = null; private String sslTruststorePassword = null; private String sslKeystoreType = null; private String sslKeystoreLocation = null; private String sslKeystorePassword = null; private int retries = 0; private int requiredNumAcks = Integer.MAX_VALUE; private boolean syncSend = false; private Producer<byte[], byte[]> producer = null; public Producer<byte[], byte[]> getProducer() { return producer; } public String getBrokerList() { return brokerList; } public void setBrokerList(String brokerList) { this.brokerList = brokerList; } public int getRequiredNumAcks() { return requiredNumAcks; } public void setRequiredNumAcks(int requiredNumAcks) { this.requiredNumAcks = requiredNumAcks; } public int getRetries() { return retries; } public void setRetries(int retries) { this.retries = retries; } public String getCompressionType() { return compressionType; } public void setCompressionType(String compressionType) { this.compressionType = compressionType; } public String getTopic() { return topic; } public void setTopic(String topic) { this.topic = topic; } public boolean getSyncSend() { return syncSend; } public void setSyncSend(boolean syncSend) { this.syncSend = syncSend; } public String getSslTruststorePassword() { return sslTruststorePassword; } public String getSslTruststoreLocation() { return sslTruststoreLocation; } public String getSecurityProtocol() { return securityProtocol; } public void setSecurityProtocol(String securityProtocol) { this.securityProtocol = securityProtocol; } public void setSslTruststoreLocation(String sslTruststoreLocation) { this.sslTruststoreLocation = sslTruststoreLocation; } public void setSslTruststorePassword(String sslTruststorePassword) { this.sslTruststorePassword = sslTruststorePassword; } public void setSslKeystorePassword(String sslKeystorePassword) { this.sslKeystorePassword = sslKeystorePassword; } public void setSslKeystoreType(String sslKeystoreType) { this.sslKeystoreType = sslKeystoreType; } public void setSslKeystoreLocation(String sslKeystoreLocation) { this.sslKeystoreLocation = sslKeystoreLocation; } public String getSslKeystoreLocation() { return sslKeystoreLocation; } public String getSslKeystoreType() { return sslKeystoreType; } public String getSslKeystorePassword() { return sslKeystorePassword; } @Override public void activateOptions() { // check for config parameter validity Properties props = new Properties(); if (brokerList != null) props.put(BOOTSTRAP_SERVERS_CONFIG, brokerList); if (props.isEmpty()) throw new ConfigException("The bootstrap servers property should be specified"); if (topic == null) throw new ConfigException("Topic must be specified by the Kafka log4j appender"); if (compressionType != null) props.put(COMPRESSION_TYPE_CONFIG, compressionType); if (requiredNumAcks != Integer.MAX_VALUE) props.put(ACKS_CONFIG, Integer.toString(requiredNumAcks)); if (retries > 0) props.put(RETRIES_CONFIG, retries); if (securityProtocol != null && sslTruststoreLocation != null && sslTruststorePassword != null) { props.put(SECURITY_PROTOCOL, securityProtocol); props.put(SSL_TRUSTSTORE_LOCATION, sslTruststoreLocation); props.put(SSL_TRUSTSTORE_PASSWORD, sslTruststorePassword); if (sslKeystoreType != null && sslKeystoreLocation != null && sslKeystorePassword != null) { props.put(SSL_KEYSTORE_TYPE, sslKeystoreType); props.put(SSL_KEYSTORE_LOCATION, sslKeystoreLocation); props.put(SSL_KEYSTORE_PASSWORD, sslKeystorePassword); } } props.put(KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer"); props.put(VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer"); this.producer = getKafkaProducer(props); LogLog.debug("Kafka producer connected to " + brokerList); LogLog.debug("Logging for topic: " + topic); } protected Producer<byte[], byte[]> getKafkaProducer(Properties props) { return new KafkaProducer<byte[], byte[]>(props); } @Override protected void append(LoggingEvent event) { String message = subAppend(event); LogLog.debug("[" + new Date(event.getTimeStamp()) + "]" + message); Future<RecordMetadata> response = producer.send(new ProducerRecord<byte[], byte[]>(topic, message.getBytes())); if (syncSend) { try { response.get(); } catch (InterruptedException ex) { throw new RuntimeException(ex); } catch (ExecutionException ex) { throw new RuntimeException(ex); } } } private String subAppend(LoggingEvent event) { return (this.layout == null) ? event.getRenderedMessage() : this.layout.format(event); } public void close() { if (!this.closed) { this.closed = true; producer.close(); } } public boolean requiresLayout() { return true; } }
然后在log4j.properties中配置如下
log4j.rootLogger=INFO,stdout,kafka log4j.appender.stdout = org.apache.log4j.ConsoleAppender log4j.appender.stdout.target = System.out log4j.appender.stdout.layout=org.apache.log4j.PatternLayout log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} [%t] [%c] [%p] - %m%n log4j.appender.kafka = com.bigdata.kafka.KafkaLog4jAppender log4j.appender.kafka.topic = api_topic log4j.appender.kafka.brokerList=hadoop000:9092 log4j.appender.kafka.layout=org.apache.log4j.PatternLayout log4j.appender.kafka.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} [%t] [%c] [%p] - %m%n
3.创建KafkaLog4jApp类
package com.bigdata.kafka; import org.apache.log4j.Logger; /** * 模拟日志产生 */ public class KafkaLog4jApp { private static Logger logger = Logger.getLogger(KafkaLog4jApp.class.getName()); public static void main(String[] args) throws Exception { int index = 0; while(true) { Thread.sleep(1000); logger.info("value is: " + index++); } } }
上述完成后启动Kafka(先启动ZK)
kafka-server-start.sh $KAFKA_HOME/config/server.properties
4.创建topic
[hadoop@Master ~]$ kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic log4jtest Created topic "log4jtest". [hadoop@Master ~]$ kafka-topics.sh --describe --zookeeper localhost:2181 --topic log4jtest Topic:log4jtest PartitionCount:1 ReplicationFactor:1 Configs: Topic: log4jtest Partition: 0 Leader: 0 Replicas: 0 Isr: 0
5.启动 Kafka 消费者,运行项目,观察Kafka 消费者的控制台
[hadoop@Master ~]$ kafka-console-consumer.sh --zookeeper localhost:2181 --topic log4jtest 2017-09-28 17:52:15,178 [main] [com.bigdata.kafka.KafkaLog4jApp] [INFO] - value is: 0 2017-09-28 17:52:16,331 [main] [com.bigdata.kafka.KafkaLog4jApp] [INFO] - value is: 1 2017-09-28 17:52:17,332 [main] [com.bigdata.kafka.KafkaLog4jApp] [INFO] - value is: 2 2017-09-28 17:52:18,333 [main] [com.bigdata.kafka.KafkaLog4jApp] [INFO] - value is: 3 2017-09-28 17:52:19,336 [main] [com.bigdata.kafka.KafkaLog4jApp] [INFO] - value is: 4 2017-09-28 17:52:20,337 [main] [com.bigdata.kafka.KafkaLog4jApp] [INFO] - value is: 5 2017-09-28 17:52:21,338 [main] [com.bigdata.kafka.KafkaLog4jApp] [INFO] - value is: 6 2017-09-28 17:52:22,339 [main] [com.bigdata.kafka.KafkaLog4jApp] [INFO] - value is: 7 2017-09-28 17:52:23,354 [main] [com.bigdata.kafka.KafkaLog4jApp] [INFO] - value is: 8 2017-09-28 17:52:24,355 [main] [com.bigdata.kafka.KafkaLog4jApp] [INFO] - value is: 9 2017-09-28 17:52:25,418 [main] [com.bigdata.kafka.KafkaLog4jApp] [INFO] - value is: 10
相关文章推荐
- 使用Log4j将程序日志实时写入Kafka
- 使用Log4j将程序日志实时写入Kafka
- 使用Log4j将程序日志实时写入Kafka
- 使用Log4j将程序日志实时写入Kafka
- 使用Log4j将程序日志实时写入Kafka(转)
- Flume读取日志数据并写入到Kafka,ConsoleConsumer进行实时消费
- java实时监听日志写入kafka(多目录)
- 使用Log4j将日志实时写入Flume
- java实时监听日志写入kafka
- java实时监听日志写入kafka(多目录)
- java实时监听日志写入kafka
- java实时监听日志写入kafka(转)
- java实时监听日志写入kafka(转)
- flume采集log4j日志到kafka
- 用log4j将日志写入数据库
- [日志处理工作之一]整合elasticsearch,kibana,flume-ng,kafka实时采集tomcat日志
- Log4j配置详解及不同的包(package)下的日志写入到不同的日志文件下
- 统计nginx日志中访客的IP和次数,并且实时写入redis中。
- 实时处理Kafka发来的日志信息
- (三 )kafka-jstorm集群实时日志分析 之 ---------jstorm集成spring 续(代码)