kafka学习之综合运用elk日志系统搭建
使用kafka、elasticsearch、搭建一个elk日志管理平台:
流程图:

分别安装:jdk、zookeeper、kafka、logstash、es、kibana
logstash安装和配置:
https://www.cnblogs.com/codehello/articles/16467021.html
logstash配置文件:logstash_kafka.conf
# Sample Logstash configuration for creating a simple # Beats -> Logstash -> Elasticsearch pipeline. input { kafka { bootstrap_servers=>"127.0.0.1:9092" topics=>["kafka-log"] } } output { stdout { codec => rubydebug} elasticsearch { hosts => ["http://localhost:9200"] index => "kafka-log" #user => "elastic" #password => "changeme" } }
启动:进入Logstash的bin目录执行命令
bin\logstash.bat -f config\logstash_kafka.conf
启动报错:
报错:Logstash could not be started because there is already another instance using the configured data directory. If you wish to run multiple instances, you must change the "path.data" setting.
原因:之前运行的instance有缓冲,保存在path.data里面有.lock文件,删除掉就可以。
解决:在 logstash.yml 文件中找到 Data path 的路径(默认在安装目录的data目录下)
1,pom.xml
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <parent> <artifactId>spring-boot</artifactId> <groupId>com.bsoft</groupId> <version>1.0-SNAPSHOT</version> </parent> <modelVersion>4.0.0</modelVersion> <artifactId>springboot-elk</artifactId> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>1.8</source> <target>1.8</target> </configuration> </plugin> </plugins> </build> <properties> <maven.compiler.source>1.8</maven.compiler.source> <maven.compiler.target>1.8</maven.compiler.target> </properties> <dependencies> <!--kafka--> <dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka</artifactId> </dependency> <!--es--> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-data-elasticsearch</artifactId> </dependency> </dependencies> </project>
2,配置文件application.yml
spring: elasticsearch: rest: uris: 127.0.0.1:9200 jackson: time-zone: GMT+8 date-format: yyyy-MM-dd HH:mm:ss kafka: bootstrap-servers: 127.0.0.1:9092 producer: batch-size: 300 retries: 1 buffer-memory: 33554432 key-serializer: org.apache.kafka.common.serialization.StringSerializer value-serializer: org.apache.kafka.common.serialization.StringSerializer acks: 1 consumer: auto-offset-reset: latest #是否开启自动提交,消费之后自动提交offset enable-auto-commit: false key-deserializer: org.apache.kafka.common.serialization.StringDeserializer #value的解码方式 value-deserializer: org.apache.kafka.common.serialization.StringDeserializer group-id: bsoft_group max-poll-records: 200 #批量消费消息条数 listener: concurrency: 10 #线程数 missing-topics-fatal: false #默认false,如果不存在topic,项目启动报错 ack-mode: manual server: port: 9999
3,kafka配置文件 KafkaConfig.java
package com.bsoft.elk.config; import org.apache.kafka.clients.admin.NewTopic; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration public class KafkaConfig { @Bean public NewTopic newTopic() { return new NewTopic("kafka-log", 3, (short)1); } }
4,ES工具类 ElasticSearchUtils.java
package com.bsoft.elk.util; import com.alibaba.fastjson.JSON; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpHost; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.impl.client.BasicCredentialsProvider; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.indices.GetIndexRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import javax.annotation.PostConstruct; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.UUID; @Slf4j @Component public class ElasticSearchUtils { @Value("${spring.elasticsearch.rest.uris}") private String uris; // @Value("${spring.elasticsearch.rest.username}") // private String username; // @Value("${spring.elasticsearch.rest.password}") // private String password; private RestHighLevelClient restHighLevelClient; /** * 在Servlet容器初始化前执行 */ @PostConstruct private void init() { try { if (restHighLevelClient != null) { restHighLevelClient.close(); } if (StringUtils.isBlank(uris)) { log.error("spring.elasticsearch.rest.uris is blank"); return; } //解析yml中的配置转化为HttpHost数组 String[] uriArr = uris.split(","); HttpHost[] httpHostArr = new HttpHost[uriArr.length]; int i = 0; for (String uri : uriArr) { if (StringUtils.isEmpty(uris)) { continue; } try { //拆分出ip和端口号 String[] split = uri.split(":"); String host = split[0]; String port = split[1]; HttpHost httpHost = new HttpHost(host, Integer.parseInt(port), "http"); httpHostArr[i++] = httpHost; } catch (Exception e) { log.error(e.getMessage()); } } RestClientBuilder builder = RestClient.builder(httpHostArr); // CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); // credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username, password)); // builder.setHttpClientConfigCallback(f -> f.setDefaultCredentialsProvider(credentialsProvider)); restHighLevelClient = new RestHighLevelClient(builder); } catch (IOException e) { log.error(e.getMessage()); } } /** * 创建索引 * * @param index * @return */ public boolean createIndex(String index) throws IOException { if (isIndexExist(index)) { log.error("Index is exits!"); return false; } //1.创建索引请求 CreateIndexRequest request = new CreateIndexRequest(index); //2.执行客户端请求 CreateIndexResponse response = restHighLevelClient.indices() .create(request, RequestOptions.DEFAULT); return response.isAcknowledged(); } /** * 判断索引是否存在 * * @param index * @return */ public boolean isIndexExist(String index) throws IOException { GetIndexRequest request = new GetIndexRequest(index); return restHighLevelClient.indices().exists(request, RequestOptions.DEFAULT); } /** * 删除索引 * * @param index * @return */ public boolean deleteIndex(String index) throws IOException { if (!isIndexExist(index)) { log.error("Index is not exits!"); return false; } DeleteIndexRequest request = new DeleteIndexRequest(index); AcknowledgedResponse delete = restHighLevelClient.indices() .delete(request, RequestOptions.DEFAULT); return delete.isAcknowledged(); } /** * 新增/更新数据 * * @param object 要新增/更新的数据 * @param index 索引,类似数据库 * @param id 数据ID * @return */ public String submitData(Object object, String index, String id) throws IOException { if (null == id) { return addData(object, index); } if (this.existsById(index, id)) { return this.updateDataByIdNoRealTime(object, index, id); } else { return addData(object, index, id); } } /** * 新增数据,自定义id * * @param object 要增加的数据 * @param index 索引,类似数据库 * @param id 数据ID,为null时es随机生成 * @return */ public String addData(Object object, String index, String id) throws IOException { if (null == id) { return addData(object, index); } if (this.existsById(index, id)) { return this.updateDataByIdNoRealTime(object, index, id); } //创建请求 IndexRequest request = new IndexRequest(index); request.id(id); request.timeout(TimeValue.timeValueSeconds(1)); //将数据放入请求 json request.source(JSON.toJSONString(object), XContentType.JSON); //客户端发送请求 IndexResponse response = restHighLevelClient.index(request, RequestOptions.DEFAULT); log.info("添加数据成功 索引为: {}, response 状态: {}, id为: {}", index, response.status().getStatus(), response.getId()); return response.getId(); } /** * 数据添加 随机id * * @param object 要增加的数据 * @param index 索引,类似数据库 * @return */ public String addData(Object object, String index) throws IOException { return addData(object, index, UUID.randomUUID().toString().replaceAll("-", "").toUpperCase()); } /** * 通过ID删除数据 * * @param index 索引,类似数据库 * @param id 数据ID * @return */ public String deleteDataById(String index, String id) throws IOException { DeleteRequest request = new DeleteRequest(index, id); DeleteResponse deleteResponse = restHighLevelClient.delete(request, RequestOptions.DEFAULT); return deleteResponse.getId(); } /** * 通过ID 更新数据 * * @param object 要更新数据 * @param index 索引,类似数据库 * @param id 数据ID * @return */ public String updateDataById(Object object, String index, String id) throws IOException { UpdateRequest updateRequest = new UpdateRequest(index, id); updateRequest.timeout("1s"); updateRequest.doc(JSON.toJSONString(object), XContentType.JSON); UpdateResponse updateResponse = restHighLevelClient.update(updateRequest, RequestOptions.DEFAULT); log.info("索引为: {}, id为: {},updateResponseID:{}, 更新数据成功", index, id, updateResponse.getId()); return updateResponse.getId(); } /** * 通过ID 更新数据,保证实时性 * * @param object 要增加的数据 * @param index 索引,类似数据库 * @param id 数据ID * @return */ public String updateDataByIdNoRealTime(Object object, String index, String id) throws IOException { //更新请求 UpdateRequest updateRequest = new UpdateRequest(index, id); //保证数据实时更新 updateRequest.setRefreshPolicy("wait_for"); updateRequest.timeout("1s"); updateRequest.doc(JSON.toJSONString(object), XContentType.JSON); //执行更新请求 UpdateResponse updateResponse = restHighLevelClient.update(updateRequest, RequestOptions.DEFAULT); log.info("索引为: {}, id为: {},updateResponseID:{}, 实时更新数据成功", index, id, updateResponse.getId()); return updateResponse.getId(); } /** * 通过ID获取数据 * * @param index 索引,类似数据库 * @param id 数据ID * @param fields 需要显示的字段,逗号分隔(缺省为全部字段) * @return */ public Map<String, Object> searchDataById(String index, String id, String fields) throws IOException { GetRequest request = new GetRequest(index, id); if (StringUtils.isNotEmpty(fields)) { //只查询特定字段。如果需要查询所有字段则不设置该项。 request.fetchSourceContext(new FetchSourceContext(true, fields.split(","), Strings.EMPTY_ARRAY)); } GetResponse response = restHighLevelClient.get(request, RequestOptions.DEFAULT); return response.getSource(); } /** * 通过ID判断文档是否存在 * * @param index 索引,类似数据库 * @param id 数据ID * @return */ public boolean existsById(String index, String id) throws IOException { GetRequest request = new GetRequest(index, id); //不获取返回的_source的上下文 request.fetchSourceContext(new FetchSourceContext(false)); request.storedFields("_none_"); return restHighLevelClient.exists(request, RequestOptions.DEFAULT); } /** * 批量插入false成功 * * @param index 索引,类似数据库 * @param objects 数据 * @return */ public boolean bulkPost(String index, List<?> objects) { BulkRequest bulkRequest = new BulkRequest(); BulkResponse response = null; //最大数量不得超过20万 for (Object object : objects) { IndexRequest request = new IndexRequest(index); request.source(JSON.toJSONString(object), XContentType.JSON); bulkRequest.add(request); } try { response = restHighLevelClient.bulk(bulkRequest, RequestOptions.DEFAULT); } catch (IOException e) { e.printStackTrace(); } return null != response && response.hasFailures(); } /** * 获取低水平客户端 * * @return */ public RestClient getLowLevelClient() { return restHighLevelClient.getLowLevelClient(); } /** * 高亮结果集 特殊处理 * map转对象 JSONObject.parseObject(JSONObject.toJSONString(map), Content.class) * * @param searchResponse * @param highlightField */ private List<Map<String, Object>> setSearchResponse(SearchResponse searchResponse, String highlightField) { //解析结果 ArrayList<Map<String, Object>> list = new ArrayList<>(); for (SearchHit hit : searchResponse.getHits().getHits()) { Map<String, HighlightField> high = hit.getHighlightFields(); HighlightField title = high.get(highlightField); //原来的结果 Map<String, Object> sourceAsMap = hit.getSourceAsMap(); //解析高亮字段,将原来的字段换为高亮字段 if (title != null) { Text[] texts = title.fragments(); StringBuilder nTitle = new StringBuilder(); for (Text text : texts) { nTitle.append(text); } //替换 sourceAsMap.put(highlightField, nTitle.toString()); } list.add(sourceAsMap); } return list; } /** * 查询并分页 * * @param index 索引名称 * @param query 查询条件 * @param highlightField 高亮字段 * @return */ public List<Map<String, Object>> searchListData(String index, SearchSourceBuilder query, String highlightField) throws IOException { SearchRequest request = new SearchRequest(index); //高亮 HighlightBuilder highlight = new HighlightBuilder(); highlight.field(highlightField); //关闭多个高亮 highlight.requireFieldMatch(false); highlight.preTags("<span style='color:red'>"); highlight.postTags("</span>"); query.highlighter(highlight); //不返回源数据。只有条数之类的数据。 //builder.fetchSource(false); request.source(query); SearchResponse response = restHighLevelClient.search(request, RequestOptions.DEFAULT); log.info("totalHits:" + response.getHits().getTotalHits()); if (response.status().getStatus() == 200) { // 解析对象 return setSearchResponse(response, highlightField); } return null; } }
5,基于AOP拦截服务器日志信息(Logstash订阅主题并输出到ES)
实现思路:主要利用AOP的“前置通知” 和 “后置通知” 将请求和响应日志封装成JSON,并推送到Kafak中。
1)LogContainer.java
package com.bsoft.elk.aop; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.stereotype.Component; import java.util.concurrent.BlockingDeque; import java.util.concurrent.LinkedBlockingDeque; /**** * 开启异步线程发送日志 */ @Component public class LogContainer { private static BlockingDeque<String> logDeque = new LinkedBlockingDeque<>(); @Autowired private KafkaTemplate<String, Object> kafkaTemplate; public LogContainer() { // 初始化 new LogThreadKafka().start(); } /** * 存入日志 * * @param log */ public void put(String log) { logDeque.offer(log); } class LogThreadKafka extends Thread { @Override public void run() { while (true) { String log = logDeque.poll(); if (!StringUtils.isEmpty(log)) { // 将消息投递kafka中 kafkaTemplate.send("kafka-log", log); } } } } }
2)日志格式RequestPojo.java
package com.bsoft.elk.pojo; import lombok.Data; import java.util.Date; /*** * 日志格式 */ @Data public class RequestPojo { private String url; private String method; private String signature; private String args; private String requestTime; private String address; private String error; }
3)AopLogAspect.java
package com.bsoft.elk.aop; import com.alibaba.fastjson.JSONObject; import com.bsoft.elk.pojo.RequestPojo; import org.aspectj.lang.JoinPoint; import org.aspectj.lang.annotation.*; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.stereotype.Component; import org.springframework.web.context.request.RequestContextHolder; import org.springframework.web.context.request.ServletRequestAttributes; import javax.servlet.http.HttpServletRequest; import java.net.InetAddress; import java.net.UnknownHostException; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; /*** *日志切面 */ @Aspect @Component public class AopLogAspect { @Value("${server.port}") private String serverPort; @Autowired private KafkaTemplate<String, Object> kafkaTemplate; // 申明一个切点 里面是 execution表达式 @Pointcut("execution(* com.xiaojie.elk.service.*.*(..))") private void serviceAspect() { } @Autowired private LogContainer logContainer; // 请求method前打印内容 @Before(value = "serviceAspect()") public void methodBefore(JoinPoint joinPoint) { ServletRequestAttributes requestAttributes = (ServletRequestAttributes) RequestContextHolder .getRequestAttributes(); HttpServletRequest request = requestAttributes.getRequest(); RequestPojo requestPojo = new RequestPojo(); SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");// 设置日期格式 requestPojo.setRequestTime(df.format(new Date())); requestPojo.setUrl(request.getRequestURL().toString()); requestPojo.setMethod(request.getMethod()); requestPojo.setSignature(joinPoint.getSignature().toString()); requestPojo.setArgs(Arrays.toString(joinPoint.getArgs())); // IP地址信息 requestPojo.setAddress(getIpAddr(request) + ":" + serverPort); // 将日志信息投递到kafka中 String log = JSONObject.toJSONString(requestPojo); logContainer.put(log); } // 在方法执行完结后打印返回内容 @AfterReturning(returning = "o", pointcut = "serviceAspect()") public void methodAfterReturing(Object o) { ServletRequestAttributes requestAttributes = (ServletRequestAttributes) RequestContextHolder .getRequestAttributes(); HttpServletRequest request = requestAttributes.getRequest(); JSONObject respJSONObject = new JSONObject(); JSONObject jsonObject = new JSONObject(); SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");// 设置日期格式 jsonObject.put("response_time", df.format(new Date())); jsonObject.put("response_content", JSONObject.toJSONString(o)); // IP地址信息 jsonObject.put("ip_addres", getIpAddr(request) + ":" + serverPort); respJSONObject.put("response", jsonObject); logContainer.put(respJSONObject.toJSONString()); } /** * 异常通知 * * @param joinPoint */ @AfterThrowing(pointcut = "serviceAspect()", throwing = "e") public void serviceAspect(JoinPoint joinPoint, Exception e) { ServletRequestAttributes requestAttributes = (ServletRequestAttributes) RequestContextHolder .getRequestAttributes(); HttpServletRequest request = requestAttributes.getRequest(); SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");// 设置日期格式 RequestPojo requestPojo = new RequestPojo(); requestPojo.setRequestTime(df.format(new Date())); requestPojo.setUrl(request.getRequestURL().toString()); requestPojo.setMethod(request.getMethod()); requestPojo.setSignature(joinPoint.getSignature().toString()); requestPojo.setArgs(Arrays.toString(joinPoint.getArgs())); // IP地址信息 requestPojo.setAddress(getIpAddr(request) + ":" + serverPort); requestPojo.setError(e.toString()); // 将日志信息投递到kafka中 String log = JSONObject.toJSONString(requestPojo); logContainer.put(log); } /*** * 通过ip和端口弥补了当发生JVM内存泄漏、内存溢出等情况无法定位到具体服务器,这样便于快速排查问题。 * @param request * @return */ public static String getIpAddr(HttpServletRequest request) { //X-Forwarded-For(XFF)是用来识别通过HTTP代理或负载均衡方式连接到Web服务器的客户端最原始的IP地址的HTTP请求头字段。 String ipAddress = request.getHeader("x-forwarded-for"); if (ipAddress == null || ipAddress.length() == 0 || "unknown".equalsIgnoreCase(ipAddress)) { ipAddress = request.getHeader("Proxy-Client-IP"); } if (ipAddress == null || ipAddress.length() == 0 || "unknown".equalsIgnoreCase(ipAddress)) { ipAddress = request.getHeader("WL-Proxy-Client-IP"); } if (ipAddress == null || ipAddress.length() == 0 || "unknown".equalsIgnoreCase(ipAddress)) { ipAddress = request.getRemoteAddr(); if (ipAddress.equals("127.0.0.1") || ipAddress.equals("0:0:0:0:0:0:0:1")) { //根据网卡取本机配置的IP InetAddress inet = null; try { inet = InetAddress.getLocalHost(); } catch (UnknownHostException e) { e.printStackTrace(); } ipAddress = inet.getHostAddress(); } } //对于通过多个代理的情况,第一个IP为客户端真实IP,多个IP按照','分割 if (ipAddress != null && ipAddress.length() > 15) { //"***.***.***.***".length() = 15 if (ipAddress.indexOf(",") > 0) { ipAddress = ipAddress.substring(0, ipAddress.indexOf(",")); } } return ipAddress; } }
6,控制类案例
package com.bsoft.elk.controller; import com.bsoft.elk.service.IndexService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RestController; @RestController public class IndexController { @Autowired private IndexService indexService; @GetMapping("/index") public String index(String name,String pwd){ indexService.index(name,pwd); return "用户名称:"+name+"密码是:************"; } }
7,实现方法
package com.bsoft.elk.service; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Service; @Service @Slf4j public class IndexService { public void index(String name,String pwd){ log.info("接收到用户名:{}>>>>>>>>>>>>>密码{}",name,pwd); } }
8,启动工程
package com.bsoft.elk; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class ElkApp { public static void main(String[] args) { SpringApplication.run(ElkApp.class); } }
请求接口查看日志:

参考资料:
浙公网安备 33010602011771号