# 是一种非关系数据库 no only SQL 缓存 操作原子性 数据保存在内存 定期写到磁盘
安装 sudo apt-get update
sudo apt install redis-server
启动 redis-server
连接:
本机连接: redis-cli 127.0.0.1:6379
远程连接:
修改虚拟机上redis的配置文件 /etc/redis/redis.conf
"""
如果要从外部计算机连接到Redis,可以采用以下解决方案之一:
1)通过从运行服务器的同一主机连接到Redis,只需禁用保护模式,从环回接口发送命令“CONFIG SET protected mode no”,
但是,如果要从internet公开访问Redis,请确保这样做。使用配置重写将此更改永久化。
2) 或者,您可以通过编辑Redis配置文件,将protected mode选项设置为“no”,然后重新启动服务器来禁用保护模式。
3) 如果您只是为了测试而手动启动服务器,请使用“--protected mode no”选项重新启动它。
4) 设置绑定地址或身份验证密码。
"""
bind 0.0.0.0 允许IP所有连接
requirepass password 设置用户认证密码
重启redis /etc/init.d/redis-server restart
之后远程redis需要认证
redis-cli 连接
AUTH password 认证
使用:
操作字符串: key -value
set name "" EX 超时时间秒 FX 超时时间毫秒 NX 当set时如果name存在就忽略,不存在才创建 XX 当set时name存在则修改,不存在不执行 # 存入字符串
get name # 取出字符串
keys * # 查看所有键
mset # 批量设置 MSET key value key value
mget # 批量获取 MGET key key
getset # 获取旧值并设置新值
****************************************************************************
setbit # 将字符串转换为二进制,对二进制的某一位设置0或1
ord("a") 获取ASCII码 bin(ASCII码) 获得二进制
"w":119 01110111 ->
"h":104 01101000 ->
"y":121 01111001 -> "g":103 01100111
19 0
20 0
21 1
22 1
getbit # 获取这个字符串二进制位上的值
bitcount # 获取为1的个数
127.0.0.1:6379> set age 18 ex 3
OK
127.0.0.1:6379> get age
(nil)
127.0.0.1:6379> GETRANGE name 1 2 # 对字符串切片
"hy"
127.0.0.1:6379> SETRANGE name 0 "rtuiijn" # 从0位置向后覆盖
(integer) 7
127.0.0.1:6379> get name
"rtuiijn"
strlen 获取字节长度
incr 对数字自增1
incrbyfloat key num 浮点增
decr 自减
append 拼接
Hash操作 如同结构体 小明 -> {"age":18,"hobby":"gril"} 小红 -> {"age":18,"sex":"women"}
HSET key filed value
hmset key filed value [filed value...]
HGETALL key 获取key中的所有filed
HLEN key 获取key中filed的个数
HEXISTS key 判断key是否存在
hscan key cursor [match *] [count count] cursor 光标
127.0.0.1:6379> HSCAN xiaoming 0 match a*
1) "0"
2) 1) "age"
2) "18"
操作列表
LPUSH KEY VALUE 从左到右存
RPUSH KEY VALUE
LRANGE KEY START STOP
LLEN key
LINSERT key befor|after 位置 value 位置是列表中具体的value
LSET key index value 重新设置某位置上的值
LREM key count value 删掉count个value
LPOP
LINDEX KEY INDEX
LTRIM key start stop
RPOPLPUSH keyA keyB 从A的右边拿出一个值给B
RPOPLPUSH keyA keyB timeout 从A的右边拿出一个值给B,A中无值后等待超时,期间有加入则立即放到B
BLPOP key [key....] timeout 将key中的所有值依次取出,取完所有key之后等待超时时间,此时如果有新加入的值则立刻取出
操作集合 关系测试
SADD key value
SMEMBER key
SCARD key 元素个数
有序集合
管道
python操作redis每次操作都要请求一次redis,消耗比较大,这时就可以使用管道,
所有的操作变为一个事务,最终一起提交到redis执行
import redis
# r = redis.Redis(host="192.168.193.129",port=6379,password="w000000")
# print(r.get("age"))
pool = redis.ConnectionPool(host="192.168.193.129",port=6379,password="w000000") # 建立一个连接池
r = redis.Redis(connection_pool=pool)
pipe = r.pipeline(transaction=True) # 开启事务 transaction:业务
r.set("year","2020")
r.set("month","5")
pipe.execute() # 执行事务
发布订阅:
# 发布订阅类
import redis
class Helper():
def __init__(self):
self.pool = redis.ConnectionPool(host="192.168.193.129",port=6379,password="w000000")
self.__conn = redis.Redis(connection_pool=self.pool)
self.pub = "fm105.8"
self.sub = "fm105.8"
def public(self,msg):
self.__conn.publish(self.pub,msg)
return True
def subscribe(self):
pub = self.__conn.pubsub()
pub.subscribe(self.sub)
pub.parse_response()
return pub
# 发布者
from redis_helper import Helper
obj = Helper()
obj.public("www")
# 订阅者 多个订阅同时接收
from redis_helper import Helper
obj = Helper()
sub = obj.subscribe()
while True:
msg = sub.parse_response()
print(msg)
Celery:
Celery 是一个 基于python开发的分布式异步消息任务队列
报错AttributeError: 'float' object has no attribute 'items':解决redis==2.10.6
报错kombu.exceptions.VersionMismatch: Redis transport requires redis-py versions 3.2.0 or later. You have 2.10.6:解决kombu==4.1.0
celery==4.1.0
celery_test.py
from clery import Celery
app = Celery("tasts",
broker="redis://:w000000@192.168.193.129:6379/0", # 中间件
backend="redis://:w000000@192.168.193.129") # 最后的任务结果需要指定地点写入
@app.tast
def add(x,y):
return x+y
celery -A celery_test worker --loglevel=info 启动
另起会话
进入python
from celery_test import add
add.delay(4,4)
<AsyncResult: 4a710433-ae4b-4f05-ae4c-7385f2968f5a>
>>> result.get()
13
在django中使用celery
一、创建django项目
二、创建celery.py、tasks.py,修改__init__.py位置如下
celery_django
-celery_django
-__init__.py
-celery.py
-app01
-tasks.py
celery.py
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_celery.settings') # 项目名
app = Celery('django_celery') # app名
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
__init__.py
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ['celery_app']
tasks.py
from __future__ import absolute_import, unicode_literals
from celery import shared_task
@shared_task
def add(x, y):
return x + y
@shared_task
def mul(x, y):
return x * y
@shared_task
def xsum(numbers):
return sum(numbers)
三、修改settings.py
CELERY_BROKER_URL='redis://:w000000@192.168.193.129:6379/0'
CELERY_RESULT_BACKEND='redis://:w000000@192.168.193.129'
四、在项目目录中启动celery
celery -A django_celery worker -l info
返回:
/usr/lib/python3/dist-packages/celery/platforms.py:795: RuntimeWarning: You're running the worker with superuser privileges: this is
absolutely not recommended!
Please specify a different user using the -u option.
User information: uid=0 euid=0 gid=0 egid=0
uid=uid, euid=euid, gid=gid, egid=egid,
-------------- celery@why-virtual-machine v4.1.0 (latentcall)
---- **** -----
--- * *** * -- Linux-5.3.0-51-generic-x86_64-with-Ubuntu-18.04-bionic 2020-05-21 11:54:13
-- * - **** ---
- ** ---------- [config]
- ** ---------- .> app: celery_test:0x7f655863c5f8
- ** ---------- .> transport: redis://:**@192.168.193.129:6379/0
- ** ---------- .> results: redis://:**@192.168.193.129/
- *** --- * --- .> concurrency: 1 (prefork)
-- ******* ---- .> task events: OFF (enable -E to monitor tasks in this worker)
--- ***** -----
-------------- [queues]
.> celery exchange=celery(direct) key=celery
[tasks]
. celery_test.tasks.add
. celery_test.tasks.mul
. celery_test.tasks.xsum
. django_celery.celery.debug_task
[2020-05-21 11:54:13,966: INFO/MainProcess] Connected to redis://:**@192.168.193.129:6379/0
[2020-05-21 11:54:13,992: INFO/MainProcess] mingle: searching for neighbors
[2020-05-21 11:54:15,039: INFO/MainProcess] mingle: all alone
[2020-05-21 11:54:15,058: WARNING/MainProcess] /usr/lib/python3/dist-packages/celery/fixups/django.py:202: UserWarning: Using settings.DEBUG leads to a memory leak, never use this setting in production environments!
warnings.warn('Using settings.DEBUG leads to a memory leak, never '
[2020-05-21 11:54:15,058: INFO/MainProcess] celery@why-virtual-machine ready.
[2020-05-21 11:54:16,126: INFO/MainProcess] Received task: celery_test.tasks.add[44d461c0-5fea-4690-93e7-696d3439553c]
[2020-05-21 11:54:16,128: INFO/MainProcess] Received task: celery_test.tasks.add[5c2e933f-a926-4c5b-90a9-cbad1f1c050b]
[2020-05-21 11:54:16,129: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,130: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,134: INFO/MainProcess] Received task: celery_test.tasks.add[38bedc71-a5df-4ec6-8788-5a03fee379d0]
[2020-05-21 11:54:16,136: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[44d461c0-5fea-4690-93e7-696d3439553c] succeeded in 0.007576367999718059s: 111
[2020-05-21 11:54:16,138: INFO/MainProcess] Received task: celery_test.tasks.add[c27419ab-0615-48ba-8778-4ccd5fe5e5fb]
[2020-05-21 11:54:16,140: INFO/MainProcess] Received task: celery_test.tasks.add[2edd2899-85fb-4548-b84a-2a83fe26b0da]
[2020-05-21 11:54:16,140: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,141: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,142: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[5c2e933f-a926-4c5b-90a9-cbad1f1c050b] succeeded in 0.0015097089999471791s: 111
[2020-05-21 11:54:16,144: INFO/MainProcess] Received task: celery_test.tasks.add[572f86f7-a852-4c2d-99c2-344f48703e7b]
[2020-05-21 11:54:16,144: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,144: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,145: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[38bedc71-a5df-4ec6-8788-5a03fee379d0] succeeded in 0.0008416309992753668s: 111
[2020-05-21 11:54:16,147: INFO/MainProcess] Received task: celery_test.tasks.add[5bf2f552-cb77-419b-bea5-276cdec41b27]
[2020-05-21 11:54:16,148: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,148: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,149: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[c27419ab-0615-48ba-8778-4ccd5fe5e5fb] succeeded in 0.0016753949985286454s: 111
[2020-05-21 11:54:16,151: INFO/MainProcess] Received task: celery_test.tasks.add[a3d9893a-9203-48eb-afeb-98c89b0d2f8e]
[2020-05-21 11:54:16,153: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,153: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,154: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[2edd2899-85fb-4548-b84a-2a83fe26b0da] succeeded in 0.001095354000426596s: 111
[2020-05-21 11:54:16,156: INFO/MainProcess] Received task: celery_test.tasks.add[992ad66f-3572-4442-b6ba-001b344327f4]
[2020-05-21 11:54:16,157: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,157: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,158: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[572f86f7-a852-4c2d-99c2-344f48703e7b] succeeded in 0.0011435019987402484s: 111
[2020-05-21 11:54:16,160: INFO/MainProcess] Received task: celery_test.tasks.add[248c52a6-2792-4a0f-b34e-abdfe0ce7c45]
[2020-05-21 11:54:16,160: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,161: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,161: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[5bf2f552-cb77-419b-bea5-276cdec41b27] succeeded in 0.0011216180009796517s: 111
[2020-05-21 11:54:16,163: INFO/MainProcess] Received task: celery_test.tasks.add[4c4c28cc-d8e9-4466-a963-f6e5f71ccbad]
[2020-05-21 11:54:16,164: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,165: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,166: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[a3d9893a-9203-48eb-afeb-98c89b0d2f8e] succeeded in 0.001705070999378222s: 111
[2020-05-21 11:54:16,168: INFO/MainProcess] Received task: celery_test.tasks.add[d0f56426-2254-486a-8ab3-a88da51c9019]
[2020-05-21 11:54:16,169: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,170: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,170: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[992ad66f-3572-4442-b6ba-001b344327f4] succeeded in 0.001145517000622931s: 111
[2020-05-21 11:54:16,172: INFO/MainProcess] Received task: celery_test.tasks.add[5b885602-d37e-4f07-b1fb-e708097c8ac0]
[2020-05-21 11:54:16,173: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,173: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,173: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[248c52a6-2792-4a0f-b34e-abdfe0ce7c45] succeeded in 0.0006293960013863398s: 111
[2020-05-21 11:54:16,175: INFO/MainProcess] Received task: celery_test.tasks.add[3de908e5-186c-41c2-bdf0-6ed67cb05084]
[2020-05-21 11:54:16,176: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,176: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,177: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[4c4c28cc-d8e9-4466-a963-f6e5f71ccbad] succeeded in 0.0010510339998290874s: 111
[2020-05-21 11:54:16,178: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,179: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,179: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[d0f56426-2254-486a-8ab3-a88da51c9019] succeeded in 0.000996217000647448s: 111
[2020-05-21 11:54:16,180: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,181: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,181: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[5b885602-d37e-4f07-b1fb-e708097c8ac0] succeeded in 0.0008322609992319485s: 111
[2020-05-21 11:54:16,182: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,182: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:54:16,183: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[3de908e5-186c-41c2-bdf0-6ed67cb05084] succeeded in 0.0007275499992829282s: 111
[2020-05-21 11:55:36,283: INFO/MainProcess] Received task: celery_test.tasks.add[7252baec-66ca-41e6-8c4b-de6d5083f3b5]
[2020-05-21 11:55:36,284: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:55:36,284: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:55:36,286: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[7252baec-66ca-41e6-8c4b-de6d5083f3b5] succeeded in 0.0020388960001582745s: 111
[2020-05-21 11:55:52,701: INFO/MainProcess] Received task: celery_test.tasks.add[0edcf8a0-48f7-40b2-800c-c66acd983921]
[2020-05-21 11:55:52,702: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:55:52,702: WARNING/ForkPoolWorker-1] 4
[2020-05-21 11:55:52,703: INFO/ForkPoolWorker-1] Task celery_test.tasks.add[0edcf8a0-48f7-40b2-800c-c66acd983921] succeeded in 0.0011271639996266458s: 111
五、启动项目
六、获取任务id
tasks.id
七、根据任务id获取任务结果
from celery.result import AsyncResult
task_id = ""
res_task = AsyncResult(id=task_id)
res = res.get()
celery定时任务
目录结构如下:
s3proj/
├── celery.py
├── __init__.py
├── __pycache__
│ ├── celery.cpython-36.pyc
│ ├── __init__.cpython-36.pyc
│ └── tasks.cpython-36.pyc
└── tasks.py
celery.py:
from celery import Celery
#创建一个Celery对象
broker = 'redis://:w000000@127.0.0.1:6379/0' #任务放在用redis://ip:端口/第几个数据库
backend = 'redis://:w000000@127.0.0.1' #任务结果放在
include = ['s3proj.tasks',] #任务所在目录
app = Celery(broker=broker, backend=backend, include=include)
app.conf.timezone = 'Asia/Shanghai' #配置时区
app.conf.enable_utc = False # 是否使用UTC
from datetime import timedelta
from celery.schedules import crontab
app.conf.beat_schedule = {
#任务名称自定义可随意
'get_banner-task': {
'task': 's3proj.tasks.get_baidu_info',#任务所在路径且指定哪个任务
'schedule': 10.0, #定时任务相关
},
}
tasks.py:
from .celery import app
@app.task #一定要加装饰器
def get_baidu_info():
return "success"
启动worker
celery -A s3proj worker -l info
启动beat
celery -A s3proj beat
Celery与Django结合实现定时任务
1.pip3 install django-celery-beat
2.INSTALLED_APPS = (
...,
'django_celery_beat',
)
3.python3 manage.py makemigrations
4.python3 manage.py migrate
5.进入admin
Clocked
Crontabs # 具体日期时间执行
Intervals # 间隔执行
Periodic tasks # 任务定义,任务在task.py中,会被自动发现
Solar events
celery -A celery_time beat -l info -S django # 启动beat
celery -A celery_time worker -l info # 启动worker