您的位置:首页 > 大数据 > Hadoop

Flume使用大全之kafka source-kafka channel-hdfs(SSL加密)

2017-08-07 16:37 483 查看
agent.sources = kafkaSource1

agent.channels = kafkaChannel

agent.sinks = hdfsSink

agent.sources.kafkaSource1.channels = kafkaChannel

agent.sinks.hdfsSink.channel = kafkaChannel

agent.sources.kafkaSource1.type=org.apache.flume.source.kafka.KafkaSource

agent.sources.kafkaSource1.zookeeperConnect=node1:2181

agent.sources.kafkaSource1.topic=bpu_sensor_router,bpu_sensor_record_present,bpu_group_status_present,bpu_gateway_heartbeat,bpu_gateway_router,bpu_sensor_heartbeat

agent.sources.kafkaSource1.consumer.group.id=flume

agent.sources.kafkaSource1.kafka.consumer.security.protocol=SSL

#目录及密码根据实际情况修改

agent.sources.kafkaSource1.kafka.consumer.ssl.truststore.location =/home/client/client.truststore.jks

agent.sources.kafkaSource1.kafka.consumer.ssl.truststore.password=test1234

agent.sources.kafkaSource1.kafka.consumer.ssl.keystore.location=/home/client/client.keystore.jks

agent.sources.kafkaSource1.kafka.consumer.ssl.keystore.password=pdas202

agent.sources.kafkaSource1.kafka.consumer.ssl.key.password=pdas202

agent.sources.kafkaSource1.kafka.consumer.timeout.ms = 100

agent.sources.kafkaSource1.kafka.bootstrap.servers = node7:9093

agent.sources.kafkaSource1.batchSize=15000

agent.sources.kafkaSource1.batchDurationMillis=2000

agent.channels.kafkaChannel.type = org.apache.flume.channel.kafka.KafkaChannel

agent.channels.kafkaChannel.kafka.bootstrap.servers = node7:9093

agent.channels.kafkaChannel.zookeeperConnect=node1:2181

agent.channels.kafkaChannel.kafka.topic = flume-kafkaChannel

agent.channels.kafkaChannel.consumer.group.id = flume-consumer

agent.channels.kafkaChannel.kafka.producer.security.protocol=SSL

#目录及密码根据实际情况修改

agent.channels.kafkaChannel.kafka.producer.ssl.truststore.location =/home/client/client.truststore.jks

agent.channels.kafkaChannel.kafka.producer.ssl.truststore.password=test1234

agent.channels.kafkaChannel.kafka.producer.ssl.keystore.location=/home/client/client.keystore.jks

agent.channels.kafkaChannel.kafka.producer.ssl.keystore.password=pdas202

agent.channels.kafkaChannel.kafka.producer.ssl.key.password=pdas202

agent.channels.kafkaChannel.kafka.producer.timeout.ms =2000

agent.channels.kafkaChannel.batchSize=15000

agent.channels.kafkaChannel.batchDurationMillis=2000

#目录及密码根据实际情况修改

agent.channels.kafkaChannel.kafka.consumer.security.protocol=SSL

agent.channels.kafkaChannel.kafka.consumer.ssl.truststore.location =/home/client/client.truststore.jks

agent.channels.kafkaChannel.kafka.consumer.ssl.truststore.password=test1234

agent.channels.kafkaChannel.kafka.consumer.ssl.keystore.location=/home/client/client.keystore.jks

agent.channels.kafkaChannel.kafka.consumer.ssl.keystore.password=pdas202

agent.channels.kafkaChannel.kafka.consumer.ssl.key.password=pdas202

agent.channels.kafkaChannel.kafka.consumer.timeout.ms = 2000

#---------hdfsSink 相关配置------------------

agent.sinks.hdfsSink.type = hdfs

# 注意, 我们输出到下面一个子文件夹data中

agent.sinks.hdfsSink.hdfs.path = hdfs://nameservice1/user/hive/warehouse/%{topic}/%Y/%m/%d

agent.sinks.hdfsSink.hdfs.writeFormat = TEXT

agent.sinks.hdfsSink.hdfs.fileType = DataStream

agent.sinks.hdfsSink.hdfs.rollSize = 120000000

agent.sinks.hdfsSink.hdfs.rollInterval=60

agent.sinks.hdfsSink.hdfs.rollCount = 0

agent.sinks.hdfsSink.hdfs.batchSize = 15000

agent.sinks.hdfsSink.hdfs.round = true

agent.sinks.hdfsSink.hdfs.roundUnit = day

agent.sinks.hdfsSink1.hdfs.roundValue = 1

agent.sinks.hdfsSink.hdfs.threadsPoolSize = 25

agent.sinks.hdfsSink.hdfs.useLocalTimeStamp = true

agent.sinks.hdfsSink.hdfs.minBlockReplicas = 1

agent.sinks.hdfsSink.hdfs.idleTimeout = 30

agent.sinks.hdfsSink.hdfs.filePrefix= %{topic}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: