elk日志收集-filebeat多日志文件收集

官网中文文档重要参考

https://elkguide.elasticsearch.cn/logstash/plugins/input/

1.准备

rpm -ivh http://192.168.130.150/ELK/filebeat-6.8.5-x86_64.rpm #内网安装
curl -XDELETE 'http://192.168.120.81:9200/*-2019.11*' #删除旧日志
[root@java38 ~]# cat /etc/filebeat/filebeat.yml
filebeat.inputs:
- type: log
  enabled: true
  paths:
  - /testlvnksc/*/*.log
  tags: ["dygabase-app-log"]
  document_type: dygabase-app
  spool_size: 1024
  idle_timeout: "3s"
  multiline.pattern: '^[[:space:]]+(at|\.{3})\b|^Caused by:'
  multiline.negate: false
  multiline.match: after
filebeat.config.modules:
  path: ${path.config}/modules.d/*.yml
  reload.enabled: false
setup.template.settings:
  index.number_of_shards: 3
output.logstash:
  hosts: ["192.168.120.81:5044"]

常用配置多行合并不是以[开头的行合并

[root@java38 ~]# cat /etc/filebeat/filebeat.yml
filebeat.inputs:
- type: log
  enabled: true
  paths:
  - /testlvnksc/*/*.log
  tags: ["dygabase-app-log"]
  document_type: dygabase-app
  spool_size: 1024
  idle_timeout: "3s"
  multiline.pattern: '^\['
  multiline.negate: true
  multiline.match: after
filebeat.config.modules:
  path: ${path.config}/modules.d/*.yml
  reload.enabled: false
setup.template.settings:
  index.number_of_shards: 3
output.logstash:
  hosts: ["192.168.120.81:5044"]

 

 

日志路径

/testlvnksc/dygabase-app/spring.log
/testlvnksc/dygabase-service/spring.log

systemctl restart filebeat
ps -ef | grep filebeat

 es上日志收集

vi file.conf #不同主机日志的时候可以区分一下type

input {
beats {
    type => "dygabase-app"
    host => "192.168.120.81"
    port => 5044
  }
beats {
    type => "dygabase-service"
    host => "192.168.120.81"
    port => 5044
  }

}
output {
 if [type] == "dygabase-app" {
 elasticsearch {
 hosts => ["192.168.120.81:9200"]
 index => "dygabase-app-%{+YYYY.MM.dd}"
 }
}
 if [type] == "dygabase-service" {
 elasticsearch {
 hosts => ["192.168.120.81:9200"]
 index => "dygabase-service-%{+YYYY.MM.dd}"
 }
}

}

 

nohup /usr/share/logstash/bin/logstash -f /root/file.conf &

 字段分片处理

filter {
    mutate {
        split => ["message", "|"]
    }
    mutate {
        add_field => {
          "userId" => "%{message[1]}"
          "vissit" => "%{message[2]}"
    }
 }

    mutate {
       convert => {
          "userId" => "string"
          "vissit" => "string"
    }
 }
}

 

logstash配置文件

 

[root@java134 ~]# cat file.conf 
input {
beats {
    host => "192.168.130.134"
    port => 5044
  }
}
output {
 elasticsearch {
 hosts => ["192.168.130.134:9200"]
 index => "dylog-%{+YYYY.MM.dd}"
 }


}
nohup /usr/share/logstash/bin/logstash -f /root/file.conf &

 

filebeat文件配置

[root@java134 filebeat]# cat /etc/filebeat/filebeat.yml
filebeat.inputs:
- type: log
  enabled: true
  paths:
  - /logstest/*log2019-*
  multiline.pattern: '^\[[0-9]{4}-[0-9]{2}-[0-9]{2}' #以日期格式分割多行合并
  multiline.negate: true
  multiline.match: after
  multiline.max_lines: 2000 #超过2000行就丢弃 默认500
filebeat.config.modules:
  path: ${path.config}/modules.d/*.yml
  reload.enabled: false
setup.template.settings:
  index.number_of_shards: 3
output.logstash:
  hosts: ["192.168.130.134:5044"]
/usr/bin/filebeat -e -c filebeat.yml -d "publish" #测试收集信息效果

 

 日志格式

[2019-11-07 20:01:30] [ main:0 ]  [ DEBUG ]  [Adding PropertySource 'servletConfigInitParams' with lowest search precedence]
[2019-11-07 20:01:30] [ main:3 ]  [ DEBUG ]  [Adding PropertySource 'servletContextInitParams' with lowest search precedence]
[2019-11-07 20:01:30] [ main:11 ]  [ DEBUG ]  [Adding PropertySource 'systemProperties' with lowest search precedence]
[2019-11-07 20:01:30] [ main:12 ]  [ DEBUG ]  [Adding PropertySource 'systemEnvironment' with lowest search precedence]

 

posted @ 2019-11-23 16:59  夜辰雪扬  阅读(997)  评论(0编辑  收藏  举报