ElasticSearch 5.5.0之-05 nginx_access,nginx_error,system_secure日志

1.nginx日志配置为json格式

        # nginx json
        log_format access '{"@timestamp":"$time_iso8601",'
                          '"@version":"1",'
                          '"client_ip":"$remote_addr",'
                          '"server_ip":"$server_addr",'
                          '"request_method":"$request_method",'
                          '"domain":"$host",'
                          '"url":"$uri",'
                          '"request_uri":"$scheme://$http_host$request_uri",'
                          '"status":"$status",'
                          '"size_bytes":$body_bytes_sent,'
                          '"http_referer":"$http_referer",'
                          '"http_x_forwarded_for":"$http_x_forwarded_for",'
                          '"http_version":"$server_protocol",'
                          '"user_agent":"$http_user_agent",'
                          '"response_time":$request_time,'
                          '"upstream_response_time":$upstream_response_time,'
                          '"upstream_addr":"$upstream_addr",'
                          '"upstream_status":"$upstream_status",'
                          '"upstream_cache_status":"$upstream_cache_status"'
                          '}';

 2.配置filebeat.yml文件

# vim /etc/filebeat/filebeat.yml
#=========================== Filebeat prospectors =============================

filebeat.prospectors:
# ================================nginx www_access_log=========================
# nginx中www_access.log日志
- input_type: log
  paths:
    - /usr/local/nginx/logs/www_access.log
  document_type: lb01-www-access
# ================================nginx admin_access_log=======================

# nginx中admin_access.log日志
- input_type: log
  paths:
    - /usr/local/nginx/logs/admin_access.log
  document_type: lb01-admin-access
# ==============================nginx error_log================================

# nginx中nginx_error.log日志
- input_type: log
  paths:
    - /usr/local/nginx/logs/error.log
  document_type: lb01-nginx-error
# ===============================system secure log==============================

# system secure日志
- input_type: log
  paths:
    - /var/log/secure
  document_type: lb01-secure-login
  include_lines: ["Accepted", "Failed"]
# ==============================================================================


#----------------------------- Logstash output --------------------------------
# 负载均衡模式:https://www.elastic.co/guide/en/beats/filebeat/current/load-balancing.html
output.logstash:
  # The Logstash hosts
  hosts: ["10.1.8.34:5044"]
  #hosts: ["10.1.8.33:5044","10.1.8.34:5044"]
  #loadbalance: true
  #worker: 2

#------------------------------drop fields-------------------------------------
# 参考文档:
# https://www.elastic.co/guide/en/beats/packetbeat/current/drop-fields.html
# 删除以下字段,@timestamp和type字段不能删除
processors:
    #target:
- drop_fields:
    fields: ["input_type","beat.hostname","beat.name","beat.version","offset"]
#================================ Logging =====================================

logging.level: info

 3.logstash配置-nginx-access访问日志

[root@logstash02 ~]# vim /etc/logstash/conf.d/lb01-web-access.conf 
# Date: 2017-07-25
# 各端应用服务器通过Filebeat采集发送日志过来 
# == (等于), != (不等于), < (小于), > (大于), <= (小于等于), >= (大于等于),=~ (匹配正则), !~ (不匹配正则),in (包含), not in (不包含),and (与), or (或), nand(非与), xor(非或),() (复合表达式), !() (对复合表达式结果取反)
# -------------------------------------------input---------------------------------------------------------
input {
        beats {
                port => "5044"
                #codec => "json"
        }
}

# -------------------------------------------filter---------------------------------------------------------
# 日志处理,分析
filter {
        # 以下4段用于日志不是jso格式时,用grok来格式化日志
        #grok {
                #patterns_dir => ["/etc/logstash/patterns/nginx"]
                #match => { "message" => "%{NGINXACCESS}" }
        #}
        # json日志中嵌套多个json格式,参考:http://udn.yyuap.com/doc/logstash-best-practice-cn/filter/json.html
        #if [type] =~ "(test|test01)" {
                #json {
                        #source => "message"
                        #target => "jsoncontent"
                #}
        #}

        # 判断日志type类型,使用json格式,判断地址(增加坐标),数据类型修改,移除字段,UA信息转换
        if [type] =~ "(lb01-www-access|lb01-admin-access)" {
                # 1.使用json格式
                json {
                        source => "message"
                }

                # 2.判断地址不在如下列
                if [client_ip] !~ "^127\.|^192\.168\.|^172\.1[6-9]\.|^172\.2[0-9]\.|^172\.3[01]\.|^10\." {
                # 匹配IP字段,显示该IP地理位置,IP来源字段,目标存储为geoip,geoip存放位置,增加字段,坐标经度,坐标维度
                geoip {
                        source => "client_ip"
                        target => "geoip"
                        #database => "/etc/logstash/GeoLiteCity.dat"
                        add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
                        add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ]
                        }
                # 3.数据的修改,删除、类型转换,float转成浮点,integer整型,replace替换字段,移除内容
                mutate {
                        convert => [ "[geoip][coordinates]", "float" ]
                        #convert => [ "response","integer" ]
                        #convert => [ "bytes","integer" ]
                        #replace => { "type" => "nginx_access" }
                        }
                }

                # 4.移除字段
                mutate {
                        remove_field => "message"
                }

                # 5.如果user_agent值不为空时,用户UA信息转换
                if [user_agent] != "-" {
                        useragent {
                        source => "user_agent"
                        target => "ua"
                        }
                }
        }
}
# ------------------------------------------ouput-----------------------------------------------------------
# 日志输出到Elasticsearch
output {
        if [type] == "lb01-www-access" {
                elasticsearch {
                        hosts => ["10.18.45:9200","10.1.8.46:9200"]
                        #user => "elastic"
                        #password => "changeme"
                        index => "filebeat-lb01-www-access-%{+YYYY.MM.dd}"
                }
                # 标准输出,供调试
                #stdout { codec => "rubydebug" }        
        }

        if [type] == "lb01-admin-access" {
                elasticsearch {
                        hosts => ["10.18.45:9200","10.1.8.46:9200"]
                        #user => "elastic"
                        #password => "changeme"
                        index => "filebeat-lb01-admin-access-%{+YYYY.MM.dd}"
                }
                # 标准输出,供调试
                #stdout { codec => "rubydebug" }        
        }
}
# -------------------------------------------------------------------------------------------------------
            

4. logstash配置-nginx-error错误日志

[root@logstash02 ~]# vim /etc/logstash/conf.d/lb01-nginx-error.conf
# Date: 2017-07-25
# 各端应用服务器通过Filebeat采集发送日志过来 
# == (等于), != (不等于), < (小于), > (大于), <= (小于等于), >= (大于等于),=~ (匹配正则), !~ (不匹配正则),in (包含), not in (不包含),and (与), or (或), nand(非与), xor(非或),() (复合表达式), !() (对复合表达式结果取反)
# -------------------------------------------input---------------------------------------------------------
input {
        beats {
                port => "5044"
                #codec => "json"
        }
}

# -------------------------------------------filter---------------------------------------------------------
# 日志处理,分析
filter {
        if [type] =~ "(lb01-nginx-error)" {
                grok {
                        match => { "message" => "(?<datetime>\d\d\d\d/\d\d/\d\d \d\d:\d\d:\d\d) \[(?<err_severity>\w+)\] \S+: \*\d+ (?<err_message>[^,]+), (?<errinfo>.*)$" }
                        #match => { "message" => "(?<datetime>\d\d\d\d/\d\d/\d\d \d\d:\d\d:\d\d) \[(?<errtype>\w+)\] \S+: \*\d+ (?<errmsg>[^,]+), (?<errinfo>.*)$" }
                }
                mutate {
                        rename => [ "host", "fromhost" ]
                        gsub => [ "err_message", "too large body: \d+ bytes", "too large body" ]
                }
                if [errinfo]
                {
                        ruby {
                        code => "new_event = LogStash::Event.new(Hash[event.get('errinfo').split(', ').map{|l| l.split(': ')}])
                                new_event.remove('@timestamp')
                                event.append(new_event)"
                        }
                }
                mutate {
                        remove_field => "message"
                        remove_field => "errinfo"
                        remove_field => "datetime"
                }
                #grok {
                        #match => { "request" => '"%{WORD:verb} %{URIPATH:urlpath}(?:\?%{NGX_URIPARAM:urlparam})?(?: HTTP/%{NUMBER:httpversion})"' }
                        #match => { 
                                #"message" => "(?<time>\d{4}/\d{2}/\d{2}\s{1,}\d{2}:\d{2}:\d{2})\s{1,}\[%{DATA:err_severity}\]\s{1,}(%{NUMBER:pid:int}#%{NUMBER}:\s{1,}\*%{NUMBER}|\*%{NUMBER}) %{DATA:err_message}(?:,\s{1,}client:\s{1,}(?<client_ip>%{IP}|%{HOSTNAME}))(?:,\s{1,}server:\s{1,}%{IPORHOST:server})(?:, request: %{QS:request})?(?:, host: %{QS:client_ip})?(?:, referrer: \"%{URI:referrer})?"
                                #}
                        #patterns_dir => ["/etc/logstash/patterns"]
                        #remove_field => [ "message", "errinfo" ]
                        #remove_field => [ "message", "errinfo", "request" ]
                #}
        }
}
# ------------------------------------------ouput-----------------------------------------------------------
# 日志输出到Elasticsearch
output {
        if [type] == "lb01-nginx-error" {
                elasticsearch {
                        hosts => ["10.18.45:9200","10.1.8.46:9200"]
                        #user => "elastic"
                        #password => "changeme"
                        index => "filebeat-lb01-nginx-error-%{+YYYY.MM.dd}"
                }
                # 标准输出,供调试
                #stdout { codec => "rubydebug" }        
        }
}
# -------------------------------------------------------------------------------------------------------

5. logstash配置-system-secure登陆日志

# Date: 2017-07-25
# 各端应用服务器通过Filebeat采集发送日志过来 
# -------------------------------------------input---------------------------------------------------------
input {
        beats {
                port => "5044"
                #codec => "json"
        }
}

# -------------------------------------------filter---------------------------------------------------------
# 日志处理,分析
filter {
        # 以下4段用于日志不是jso格式时,用grok来格式化日志
        if [type] =~ "(lb01-secure-login)" {
                grok {
                        patterns_dir => ["/etc/logstash/patterns/grok"]
                        match => { "message" => "%{SECURELOG}" }
                        #match => { "message" => "%{SYSLOGBASE2}" }
                        #match => { "message" => "%{SYSLOGPAMSESSION}" }
                }
        }
        geoip {
                source => "IP"
                fields => ["city_name"]
        }

        # tag标记
        if ([status] == "Accepted") {
                mutate {
                        add_tag => ["Success"]
                }
        }
        else if ([status] == "Failed") {
                mutate {
                        add_tag => ["Failed"]
                }
        }
}
# ------------------------------------------ouput-----------------------------------------------------------
# 日志输出到Elasticsearch
output {
        if [type] =~ "(lb01-secure-login)" {
                elasticsearch {
                        hosts => ["10.18.45:9200","10.1.8.46:9200"]
                        #user => "elastic"
                        #password => "changeme"
                        index => "filebeat-lb01-secure-login-%{+YYYY.MM.dd}"
                }
                #标准输出,供调试
                #stdout { codec => "rubydebug" }        
        }
}
# -------------------------------------------------------------------------------------------------------

6. logstash  grok配置文件

[root@logstash02 ~]# vim /etc/logstash/patterns/grok

# 登陆日志grok,示例分析
# Jul 31 16:34:05 zabbix sshd[22729]: Accepted password for root from 202.105.145.833 port 16597 ssh2
# Jul 31 16:34:05 zabbix sshd[22729]: Failed password for root from 202.105.145.82 port 16597 ssh2 
# 使用中1,
SECURELOG %{WORD:program}\[%{DATA:pid}\]: %{WORD:status} password for ?(invalid user)? %{WORD:USER} from %{DATA:IP} port
SYSLOGBASE2 (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource}+(?: %{SYSLOGPROG}:|)
# 示例分析
# Aug  2 12:15:04 zabbix sshd[3436]: pam_unix(sshd:session): session closed for user root
SYSLOGPAMSESSION %{SYSLOGBASE} (?=%{GREEDYDATA:message})%{WORD:pam_module}\(%{DATA:pam_caller}\): session %{WORD:pam_session_state} for user %{USERNAME:username}(?: by %{GREEDYDATA:pam_by})?

# MySQL-slow 日志
# https://www.elastic.co/guide/en/beats/filebeat/current/exported-fields-mysql.html#_mysql_error_thread_id
MYSQLSLOW (?m)^# User@Host: %{USER:user}\[[^\]]+\] @ (?:(?<clienthost>\S*) )?\[(?:%{IP:clientip})?\]\s*Id: %{NUMBER:id:long}\s+# Query_time: %{NUMBER:query_time:long}\s+Lock_time: %{NUMBER:lock_time:long}\s+Rows_sent: %{NUMBER:rows_sent:long}\s+Rows_examined: %{NUMBER:rows_examined:long}\s*(?:use %{DATA:database};\s*)?SET timestamp=%{NUMBER:timestamp};\s*(?<query>(?<action>\w+)\s+.*)

# nginx error日志

 

posted @ 2017-09-21 11:27  sunmmi  阅读(333)  评论(0)    收藏  举报