import gevent.monkey
gevent.monkey.patch_all()
import os
import sys
import fcntl
import gevent
from gevent.socket import wait_read
from redis import Redis
PID = os.getpid()
red = Redis('localhost')
def echo_stdin():
# make stdin non-blocking
fcntl.fcntl(sys.stdin, fcntl.F_SETFL, os.O_NONBLOCK)
red.publish('echo', "[%i] joined" % (PID,))
while True:
wait_read(sys.stdin.fileno())
l = sys.stdin.readline().strip()
s = "[%i] %s" % (PID, l)
# save to log
red.rpush('echo_log', s)
# publish message
red.publish('echo', s)
if l == 'quit':
break
def handler():
pubsub = red.pubsub()
# first subscribe, then print log (no race condition this way)
pubsub.subscribe('echo')
# print log
for line in red.lrange('echo_log', 0, -1):
print '.', line
# print channel
for msg in pubsub.listen():
print '>', msg['data']
gevent.spawn(handler)
gevent.spawn(echo_stdin).join()
当然对于普通的set get sadd hset 也是可以配合redis来使用的。但是,没啥优势,因为redis只启用了一个进程针对数据的读写,咱们从程序中复用的那几个连接,最后取数据,还是需要调用那进程,你还不如让他老老实实的干活,别搞个多线程,让他白白折腾。 我这边做了压力测试,python2.7用个gevent后,批量的读写没什么突出的增长。
>>> import geventredis
>>> redis_client = geventredis.connect('127.0.0.1', 6379)
>>> redis_client.set('foo', 'bar')
'OK'
>>> for msg in redis_client.monitor():
print msg