ELK日志收集方案实战

一、项目日志规范

  • 使用统一的方法、log4j2格式化日志
  • 涉及到的固定参数写在常量中
  • 异常必须打印堆栈信息 logger.error("msg", throwable)
  • 方法中不得打印大量且无效的日志

  最后在es中保留的数据格式:

  {

          "_index" : "log-info-2019.05.14",

          "_type" : "doc",

          "_id" : "zg2gtWoBmQkJD4TynpqH",

          "_score" : 1.0,

          "_source" : {

            "source" : "/home/devops/app.log",

            "moduleName" : "订单模块",

            "businessType" : "其他日志",

            "offset" : 9834,

            "level" : "INFO",

            "thread" : "[http-nio-8080-exec-7]",

            "type" : "log",

            "fields" : {

              "type" : "owinfo-service-metrics",

              "host" : "192.168.0.16"

            },

            "msg" : "生成订单成功",

            "class" : "com.owinfo.metrics.controller.OrderController.getOrderDetail():34",

            "@timestamp" : "2019-05-14T17:15:54.227Z",

            "desc" : "生成订单"

          }

        }

1、日志格式配置

  使用如下面的方法进行日志格式化

log.error(LoggerUtil.fommatMsg(LoggerConstent.OPERATION, LoggerConstent.ORDER,
"获取订单列表", "获取订单列表失败 "), e);

  springboot2.0.6引入log4j2的配置

<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
</exclusions>
</dependency>

<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-log4j2</artifactId>
</dependency>

  log4j2.xml配置日志策略和PatternLayout

<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="info" name="MyApp" packages="">
<Properties>
<Property name="baseDir">/home/devops</Property>
<Property name="pattern">%d %p %c.%M:%L [%t] %m%n</Property>
</Properties>
<Appenders>
<Console name="Console">
<PatternLayout pattern="${pattern}"/>
</Console>

<!-- 删除超过60天或者更早的文件,每天半夜12点归档 -->
<RollingFile name="RollingFile" fileName="${baseDir}/app.log"
filePattern="${baseDir}/$${date:yyyy-MM}/app-%d{yyyy-MM-dd}.log.gz">
<Encoding>UTF-8</Encoding>
<PatternLayout pattern="${pattern}" />
<CronTriggeringPolicy schedule="0 0 0 * * ?"/>
<DefaultRolloverStrategy>
<Delete basePath="${baseDir}" maxDepth="2">
<IfFileName glob="*/app-*.log.gz" />
<IfLastModified age="60d" />
</Delete>
</DefaultRolloverStrategy>
</RollingFile>
</Appenders>
<Loggers>
<Root level="info">
<AppenderRef ref="Console"/>
</Root>

<logger level="info" name="com.owinfo.metrics" additivity="false">
<AppenderRef ref="RollingFile"/>
</logger>
</Loggers>
</Configuration>

  因此当发生异常的时候,可以得到下面经过格式化之后的日志消息:

2019-05-15 09:44:15,103 ERROR com.owinfo.metrics.controller.OrderController.getOrderList:26 [http-nio-8080-exec-2]  ===> 操作日志 订单模块 获取订单列表 获取订单列表失败 
java.lang.ArithmeticException: / by zero
at com.owinfo.metrics.controller.OrderController.getException(OrderController.java:50) ~[classes/:?]
at com.owinfo.metrics.controller.OrderController.getOrderList(OrderController.java:24) [classes/:?]

二、日志采集 

1、filebeat对日志进行采集,进行多行合并

 观察上面格式的日志,我们要将非日期开头的日志合并到上一行,因此filebeat需要下面的配置:

   multiline.pattern: '^[0-9]{4}-[0-9]{2}-[0-9]{2}'

   multiline.negate: true

   multiline.match: after

2、使用logstash收集kafka日志并传输到Elasticsearc

input{
   kafka{
        codec => "json"
        auto_offset_reset => "latest"
        topics => ["log"]
        bootstrap_servers => "192.168.0.16:9092"
        type => "log"
   }

}

filter{
   if [type] == "log" {
          mutate{
            remove_field => "@version"
            remove_field => "beat"
            remove_field => "input"
            remove_field => "prospector"
            remove_field => "host"
          }

          grok{
            match => {
                "message" => "(?<timestamp>(\d*-\d*-\d* \d*:\d*:\d*,\d*))\s*(?<level>([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo|INFO|[Ww]arn?(?:ing)?|WARN?(?:ING)?|[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?))\s*(?<class>[a-zA-Z.():]*\d*)\s*(?<thread>\[[A-Za-z0-9_.-]+\])\s*===>\s*(?<businessType>[^\s]*)\s*(?<moduleName>[^\s]*)\s*(?<desc>[^\s]*)\s*(?<msg>(.)*)"
            }
          }

          date{
            match => ["timestamp", "yyyy-MM-dd HH:mm:ss,SSS"]
            timezone => "+00:00"
            target => "@timestamp"
          }

          mutate {
            remove_field => "timestamp"
            remove_field => "message"
          }
   }
}

output {

  if [type] == "log" {
        elasticsearch {
                index => "log-info-%{+YYYY.MM.dd}"
                hosts => ["http://192.168.0.16:9200"]
                user => "elastic"
                password => "devops123"
                template_overwrite => true
                template => "/home/devops/elk/logstash-6.5.1/configFile/log.json"
        }

        if [businessType] == "其他日志" {
              elasticsearch {
                index => "log-operation-%{+YYYY.MM}"
                hosts => ["http://192.168.0.16:9200"]
                user => "elastic"
                password => "devops123"
                template_overwrite => true
                template => "/home/devops/elk/logstash-6.5.1/configFile/log.json"
              }
        }
  }
}      

3、elasticsearch静态模板

{
   "template": "log-*",
   "order": 0,
   "settings": {
      "number_of_replicas": 0,
      "number_of_shards": 1,
      "refresh_interval": "30s"
   },
  "mappings": {
        "doc": {
          "properties": {
                "desc": {
                  "type": "text",
                  "analyzer": "ik_max_word"
                },
                "type": {
                  "type": "keyword"
                },
                "offset": {
                  "type": "long"
                },
                "source": {
                  "type": "text",
                  "analyzer": "ik_max_word"
                },
                "fields": {
                  "properties": {
                        "host": {
                          "type": "keyword"
                        },
                        "type": {
                          "type": "keyword"
                        }
                  }
                },
                "class": {
                  "type": "text",
                  "analyzer": "ik_max_word"
                },
                "thread": {
                  "type": "text",
                  "analyzer": "ik_max_word"
                },
                "moduleName": {
                  "type": "text",
                  "analyzer": "ik_max_word"
                },
                "msg": {
                  "type": "text",
                  "analyzer": "ik_max_word",
                  "doc_values": false
                },
                "businessType":{
                  "type": "text",
                  "analyzer": "ik_max_word"
                },
                "@timestamp": {
                     "type": "date"
                },
                "level": {
                  "type": "keyword"
                }
          }
        }
  }
} 
posted @ 2019-05-16 11:00  陌生。  阅读(394)  评论(0)    收藏  举报