(1)利用python执行 slowlog_get查询慢日志,写成job,一分钟执行一次,查200条,然后存到mysql,以便时候分析:
#!/usr/bin/env python import datetime,time,redis import mysql.connector import pytz pystime = time.time() tz = pytz.timezone('Asia/Shanghai') redis_ips=['10.x.x.x','6379','391axxx'] def slow_redis(): Redis = redis.StrictRedis(host=redis_ips[0],port=redis_ips[1],password=redis_ips[2],socket_timeout=1) results = Redis.slowlog_get(200) #每次获取200条 print(results) session_id=results[0]['id'] i_start_time = datetime.datetime.fromtimestamp(int(results[0]['start_time'])).strftime('%Y-%m-%d %H:%M:%S') #时间格式转换 duration = round(int(results[0]['duration'])/1000,2) #毫秒 command = results[0]['command'] if duration>=10: # 开发定义超过10ms的慢日志会留意 i_host=redis_ips[0] s_results={ "session_id":session_id,"host_ip":i_host,"start_time":i_start_time,"duration":duration,"cmd":command,'insert_time':datetime.datetime.now(tz) } # print(s_results) conn = mysql.connector.connect(host='192.168.x.x', port=3306, user='devops', passwd='xxxxx',db='devops', charset='utf8mb4') cur = conn.cursor(buffered=True) try: sql_d = "delete from devops.redis_log where insert_time <= DATE_SUB(CURDATE(), INTERVAL 7 DAY) " # 删除7天以前的数据 cur.execute(sql_d) conn.commit() except: conn.rollback() print('\adevops.redis_log table delete failed. Transaction rolled back') raise try: sql_i = ( f"insert ignore into devops.redis_log ({', '.join(s_results.keys())}) " f"values (%({')s, %('.join(s_results.keys())})s)") cur.execute(sql_i,s_results) conn.commit() # inserted = cur.rowcount except: conn.rollback() print('\adevops.redis_log table ops failed. Transaction rolled back') raise cur.close() conn.close() slow_redis() pyetime = time.time() print ('当前脚本运行时间为:',round(float(pyetime - pystime),3),'s')
注:先再mysql建立表结构:
CREATE TABLE `redis_log` ( `session_id` int DEFAULT NULL, `host_ip` varchar(100) COLLATE utf8mb4_0900_as_cs DEFAULT NULL, `start_time` datetime DEFAULT NULL, `duration` decimal(10,0) DEFAULT NULL, `cmd` varchar(5000) COLLATE utf8mb4_0900_as_cs DEFAULT NULL, `insert_time` timestamp(3) NULL DEFAULT NULL, `remark` varchar(100) COLLATE utf8mb4_0900_as_cs DEFAULT NULL, UNIQUE KEY `redis_slog_pk` (`session_id`), KEY `idx_start_time` (`start_time`)
(2)做成docker镜像,利用k8s cronjob一分钟执行一次。
Dockerfile如下:
FROM python:3.7-alpine RUN addgroup -S zwgroup && adduser -S zhanwei -G zwgroup #creates work dir WORKDIR /app #copy python script to the container folder app COPY redis_sl_db.py /app/redis_sl_db.py COPY ./requirements.txt /app/requirements.txt RUN chmod +x /app/redis_sl_db.py RUN pip install --no-cache-dir -r requirements.txt #user is appuser ENTRYPOINT ["python", "/app/redis_sl_db.py"]
k8s yaml如下:
apiVersion: batch/v1beta1 kind: CronJob metadata: name: dc-csharp-redis-sl spec: schedule: "*/1 * * * *" jobTemplate: spec: backoffLimit: 5 template: spec: containers: - name: dc-csharp-redis-sl image: dockerhub.xxxxx.com/ops/dc_csharp_redis_sl:0.2 imagePullPolicy: IfNotPresent command: [/app/redis_sl_db.py] imagePullSecrets: - name: dockerhub restartPolicy: OnFailure
最后查询mysql表 devops.redis_log数据就可以了