使用kafka作为生产者生产数据到HBase

配置文件

agent.sources = r1

agent.sinks = k1

agent.channels = c1

##sources config

agent.sources.r1.type = org.apache.flume.source.kafka.KafkaSource

agent.sources.r1.kafka.bootstrap.servers = 192.168.80.128:9092,192.168.80.129:9092,192.168.80.130:9092

agent.sources.r1.kafka.topics = 1713

agent.sources.r1.migrateZookeeperOffsets = false

agent.sources.r1.kafka.consumer.timeout.ms = 1000

#agent.sources.r1.kafka.consumer.group.id = consumer-group

#channels config

agent.channels.c1.type = memory

agent.channels.c1.capacity = 1000

agent.channels.c1.transactionCapacity = 100

agent.channels.c1.byteCapacityBufferPercentage = 60

agent.channels.c1.byteCapacity = 1280

agent.channels.c1.keep-alive = 60

#Describe the sink

agent.sinks.k1.type = asynchbase//sink类型到hbase

agent.sinks.k1.table = tb_words3//表

agent.sinks.k1.columnFamily = words//列族

agent.sinks.k1.serializer.payloadColumn = wd//列名

agent.sinks.k1.serializer = org.apache.flume.sink.hbase.SimpleAsyncHbaseEventSerializer

#Use a channel which buffers evens in memory

agent.channels.c1.type = memory

agent.channels.c1.capacity = 1000

agent.channels.ca.transactionCapacity = 100

#Bind the source and sink to the channel

agent.sources.r1.channels = c1

agent.sinks.k1.channel = c1

posted @ 2019-11-13 14:19  张先森🌛  阅读(285)  评论(0)    收藏  举报