input {
kafka{
bootstrap_servers => ["18.3.10.53:9092,18.3.10.54:9092,19.3.10.55:9092,19.3.10.56:9092,19.3.10.57:9092,19.3.10.91:9092,19.3.10.92:9092,19.3.10.93:9092"]
auto_offset_reset => "latest"
consumer_threads => 5
decorate_events => true
topics => ["mips_monitor_log"]
type => "mips_monitor_log"
}
kafka{
bootstrap_servers => ["19.3.10.53:9092,19.3.10.54:9092,19.3.10.55:9092,19.3.100.56:9092,19.3.10.57:9092,19.3.10.91:9092,19.3.10.92:9092,19.3.10.93:9092"]
auto_offset_reset => "latest"
consumer_threads => 5
decorate_events => true
topics => ["mips_info_log"]
type => "mips_info_log"
}
}
filter {
multiline {
pattern => "^\d{4}-\d{1,2}-\d{1,2}\s\d{1,2}:\d{1,2}:\d{1,2}"
negate => true
what => "previous"
}
mutate {
#从kafka的key中获取数据并按照逗号切割
split => ["[@metadata][kafka][key]", ","]
add_field => {
#将切割后的第一位数据放入自定义的“index”字段中
"ip" => "%{[@metadata][kafka][key][0]}"
}
}
}
output {
if [type]=="mi_info_log"{
elasticsearch {
user =>admin
password =>xxxxx
ssl =>true
ssl_certificate_verification => false
truststore =>"/cslc/dip002/elk_data/logstash-6.5.1/config/truststore.jks"
truststore_password =>"1deadxxxxxxxxxxxxxx2"
hosts=> ["19.3.10.91:9200","19.3.10.92:9200","19.3.10.93:9200"]
index =>"info_log-%{+YYYY.MM.dd}"
}
}
if [type]=="monitor_log"{
elasticsearch {
user =>admin
password =>xxxxxx
ssl =>true
ssl_certificate_verification => false
truststore =>"/cslc/dip002/elk_data/logstash-6.5.1/config/truststore.jks"
truststore_password =>"1xxxxxxxxxxxxxxxxxxxxx"
hosts=> ["19.3.10.91:9200","19.3.10.92:9200","19.3.10.93:9200"]
index =>"monitor_log-%{+YYYY.MM.dd}"
}
}
}