root@user:~/boke# cat aa.txt
10万 6.0 单线程
wget https://download.redis.io/releases/redis-6.2.5.tar.gz
wget https://download.redis.io/releases/redis-5.0.9.tar.gz
mkdir /apps/redis -p
yum install -y epel-release
dnf install jemalloc-devel gcc make
[root@bogon redis-5.0.9]# make PREFIX=/apps/redis/ install
ln -s /apps/redis/bin/* /usr/bin
mkdir /apps/redis/{etc,logs,data,run}
useradd -r -s /sbin/nologin redis
chown redis.redis /apps/redis/ -R
redis.conf
daemonize yes
bind 0.0.0.0
redis-server /apps/redis/etc/redis.conf
[root@bogon etc]# netstat -anp|grep redis
tcp 0 0 0.0.0.0:6379 0.0.0.0:* LISTEN 42975/redis-server
所有web服务器共用一个redis服务器保存账号信息,局限redis挂掉
browser proxy webX redis
应用场景
session
Session 共享:常见于web集群中的Tomcat或者PHP中多web服务器session共享
消息队列:ELK的日志缓存、部分业务的订阅发布系统
计数器:访问排行榜、商品浏览数等和次数相关的数值统计场景
缓存:数据库查询缓存、电商网站商品信息、新闻内容
微博/微信社交场合:共同好友、点赞评论等
logfile "/apps/redis/log/redis_6380.log"
dbfilename dump_6380.rdb
dir /apps/redis/data
sed -i 's/6379/6380/g' redis_6380.conf
sed -i 's/6380/6381/g' redis_6380.conf > redis_6381.conf
[root@bogon etc]# cat redis_6380.conf |grep 6380
port 6380
pidfile /var/run/redis_6380.pid
logfile "/apps/redis/log/redis_6380.log"
dbfilename dump_6380.rdb
###
chown redis.redis *
[root@bogon redis]# sysctl -p
[root@bogon redis]# cat /etc/sysctl.conf
net.core.somaxconn = 1024
vm.overcommit_memory = 1
[root@bogon etc]# cat /etc/rc.d/rc.local +x
#!/bin/bash
echo never > /sys/kernel/mm/transparent_hugepage/enabled
[root@bogon etc]# cat /usr/lib/systemd/system/redis.service
[Unit]
Description=Redis persistent key-value database
After=network.target
[Service]
#ExecStart=/usr/bin/redis-server /etc/redis.conf --supervised systemd
ExecStart=/apps/redis/bin/redis-server /apps/redis/etc/redis_6379.conf --supervised systemd
ExecStop=/bin/kill -s QUIT $MAINPID
Type=notify
User=redis
Group=redis
RuntimeDirectory=redis
RuntimeDirectoryMode=0755
[Install]
WantedBy=multi-user.target
[root@bogon etc]# tail -f /var/log/messages
Sep 24 03:30:11 bogon setroubleshoot[3638]: SELinux is preventing /usr/lib/systemd/systemd from execute access on the file redis-server. For complete SELinux messages run: sealert -l bf702d58-0925-4ddb-85b4-9deb916cc4ad
Sep 24 03:30:11 bogon platform-python[3638]: SELinux is preventing
[root@bogon etc]# egrep -v "(^$|^#)" /apps/redis/etc/redis_6379.conf
bind 0.0.0.0
protected-mode yes
port 6379
tcp-backlog 511
timeout 0
tcp-keepalive 300
daemonize yes
supervised no
pidfile /var/run/redis_6379.pid
loglevel notice
logfile "/apps/redis/log/redis_6379.log"
databases 16
always-show-logo yes
save 900 1
save 300 10
save 60 10000
stop-writes-on-bgsave-error yes
rdbcompression yes
rdbchecksum yes
dbfilename dump_6379.rdb
dir /apps/redis/data
replica-serve-stale-data yes
replica-read-only yes
repl-diskless-sync no
repl-diskless-sync-delay 5
repl-disable-tcp-nodelay no
replica-priority 100
lazyfree-lazy-eviction no
lazyfree-lazy-expire no
lazyfree-lazy-server-del no
replica-lazy-flush no
appendonly no
appendfilename "appendonly.aof"
appendfsync everysec
no-appendfsync-on-rewrite no
auto-aof-rewrite-percentage 100
auto-aof-rewrite-min-size 64mb
aof-load-truncated yes
aof-use-rdb-preamble yes
lua-time-limit 5000
slowlog-log-slower-than 10000
slowlog-max-len 128
latency-monitor-threshold 0
notify-keyspace-events ""
hash-max-ziplist-entries 512
hash-max-ziplist-value 64
list-max-ziplist-size -2
list-compress-depth 0
set-max-intset-entries 512
zset-max-ziplist-entries 128
zset-max-ziplist-value 64
hll-sparse-max-bytes 3000
stream-node-max-bytes 4096
stream-node-max-entries 100
activerehashing yes
client-output-buffer-limit normal 0 0 0
client-output-buffer-limit replica 256mb 64mb 60
client-output-buffer-limit pubsub 32mb 8mb 60
hz 10
dynamic-hz yes
aof-rewrite-incremental-fsync yes
rdb-save-incremental-fsync yes
slowlog-max-len 128 慢查询10微妙
127.0.0.1:6380> SLOWLOG LEN
(integer) 1 0正常1慢
机械盘 几百M 30G*1024/300M=100s
RDB快照: 创建子进程fork定期执行dump到RDB临时文件(防止断电),覆盖RDB最终文件
固态盘 上G
# cat redis_write-linux38.sh
#!/bin/bash
NUM=`seq 1 100000`
for i in ${NUM};do
redis-cli -h 127.0.0.1 set key-${i} value-${i}
echo "key-${i} value-${i} 写入完成"
done
echo "十万个key写入到Redis完成"
127.0.0.1:6379> get key-500
"value-500"
[root@bogon ~]# cat test.py
#!/bin/env python3
import redis
pool = redis.ConnectionPool(host="127.0.0.1", port=6379,password="")
r = redis.Redis(connection_pool=pool)
for i in range(10000):
r.set("k%d" % i,"v%d" % i)
data=r.get("k%d" % i)
print(data)
127.0.0.1:6379> get k500
"v500"
127.0.0.1:6379> CONFIG set maxmemory 1610612735
写时复制
RDB 快照
AOF 默认关闭 AOF优先 实时保存,数据重复
appendonly no
127.0.0.1:6379> config set appendonly yes
OK
2
再更改配置文件重启
默认1s一次
always 一次一备 金融场景
flushall
bgrewriteaof
auto-aof-rewrite-min-size 64mb
127.0.0.1:6379> KEYS *
rename-command keys* "aa"
127.0.0.1:6379> SHUTDOWN
SUBSCRIBE channel1 channel2