python获取es线程情况

 

CREATE TABLE `tb_es_thread_pool_info` (
  `id` bigint(20) NOT NULL AUTO_INCREMENT,
  `host` varchar(64) DEFAULT NULL,
  `name` varchar(64) DEFAULT NULL,
  `type` varchar(64) DEFAULT NULL,
  `size` int(11) DEFAULT NULL,
  `queue` int(11) DEFAULT NULL,
  `queue_size` int(11) DEFAULT NULL,
  `active` int(11) DEFAULT NULL,
  `rejected` int(11) DEFAULT NULL,
  `completed` int(11) DEFAULT NULL,
  `min` int(11) DEFAULT NULL,
  `max` int(11) DEFAULT NULL,
  `keep_alive` varchar(32) DEFAULT NULL,
  `largest` int(11) DEFAULT NULL,
  `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
  `modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
  PRIMARY KEY (`id`),
  KEY `idx_host_name_time` (`host`,`name`,`create_time`)
) ENGINE=InnoDB AUTO_INCREMENT=21 DEFAULT CHARSET=utf8mb4

 

 

#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from elasticsearch import Elasticsearch
import os, json, urllib, datetime, shutil
import traceback
import time
import pymysql


gl_mysql_server = "192.168.1.14"
gl_user_name = "hxl"
gl_password = "mysql"
gl_db_name = "db_cmdbtest"
gl_port = 3306


gl_es_username = 'elastic'
##gl_es_passwd = "123456"
gl_es_passwd = "elastic"
gl_url = 'http://192.168.1.134:19200'

def utc2bjtime(utc_time):
    localtime = time.localtime(utc_time/1000)
    bj_time = time.strftime("%Y-%m-%d %H:%M:%S", localtime)
    return bj_time

def insert_data(thread_pool):
    db = pymysql.connect(host=gl_mysql_server,user=gl_user_name,password=gl_password,db=gl_db_name,port=gl_port,use_unicode=True,charset="utf8")
    cursor = db.cursor()
    # db.autocommit(0)

    host = thread_pool["host"]
    name = thread_pool["name"]
    type = thread_pool["type"]
    size = str(thread_pool["size"])
    queue = str(thread_pool["queue"])
    queue_size = str(thread_pool["queue_size"])
    active = str(thread_pool["active"])
    rejected = str(thread_pool["rejected"])
    completed = str(thread_pool["completed"])
    min = str(thread_pool["min"])
    max = str(thread_pool["max"])
    keep_alive = str(thread_pool["keep_alive"])
    largest = str(thread_pool["largest"])
    try:
        insert_sql = "insert into tb_es_thread_pool_info(host,name,type,size,queue,queue_size,active,rejected,min,max,keep_alive,largest)" \
                     "values ('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (host,name,type,size,queue,queue_size,active,rejected,min,max,keep_alive,largest)

        ##print(insert_sql)
        cursor.execute(insert_sql)
        db.commit()
    except Exception as err:
            # Rollback in case there is any error
        print("sql语句执行错误", err)
        db.rollback()
    db.commit()
    cursor.close()
    db.close()
    return 0

def get_cat_thread():
    es = Elasticsearch([gl_url],http_auth=(gl_es_username, gl_es_passwd))
    try:
        thread_pool_info= es.cat.thread_pool(thread_pool_patterns="generic,get,search,write,index",format="json",h="id,host,name,type,size,queue,queue_size,min,max,active,rejected,completed,keep_alive,largest")
        for thread_pool in thread_pool_info:
            ##print(thread_pool)
            insert_data(thread_pool)
    except Exception as e:
        print(e)

if __name__ == '__main__':
    get_cat_thread()

 

posted @ 2025-05-29 18:13  slnngk  阅读(13)  评论(0)    收藏  举报