package test_kafka;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
public class KafkaProducer{
public Properties initProperties(String broker) {
Properties properties = new Properties();
properties.put("metadata.broker.list", broker);
properties.put("topic.metadata.refresh.interval.ms", "-1");
properties.put("queue.enqueue.timeout.ms", "-1");
properties.put("request.timeout.ms", "10000");
properties.put("request.required.acks", "-1");
properties.put("retry.backoff.ms", "500");
properties.put("compression.codec", "snappy");
return properties;
}
public static void main(String[] args) {
new KafkaProducer().start();
}
public void start() {
String topic = "test_008_kafka";
Properties props = initProperties("192.168.137.131:6667");
Producer<byte[], byte[]> producer = new Producer<byte[], byte[]>(new ProducerConfig(props));
AtomicInteger sendCount = new AtomicInteger(0);
while (true) {
int size = sendCount.addAndGet(1);
byte[] message = (new String("Test_Kafka_Message_" + sendCount.get())).getBytes();
byte[] key = ("msg_" + size).getBytes();
producer.send(new KeyedMessage<byte[], byte[]>(topic, key, message));
System.out.println(new String(message));
if (size >= 50) {
break;
}
}
}
}