forked from vivekn/redis-simple-cache
-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathrediscache.py
412 lines (345 loc) · 14.3 KB
/
rediscache.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
"""
A simple redis-cache interface for storing python objects.
"""
from functools import wraps
import pickle
import json
import hashlib
import redis
import logging
import six
DEFAULT_EXPIRY = 60 * 60 * 24
class RedisConnect(object):
"""
A simple object to store and pass database connection information.
This makes the Simple Cache class a little more flexible, for cases
where redis connection configuration needs customizing.
"""
def __init__(self, host=None, port=None, db=None, password=None, ssl=False):
self.host = host if host else 'localhost'
self.port = port if port else 6379
self.db = db if db else 0
self.password = password
self.ssl = ssl
def connect(self):
"""
We cannot assume that connection will succeed, as such we use a ping()
method in the redis client library to validate ability to contact redis.
RedisNoConnException is raised if we fail to ping.
:return: redis.StrictRedis Connection Object
"""
try:
redis.StrictRedis(host=self.host, port=self.port, password=self.password, ssl=self.ssl).ping()
except redis.ConnectionError as e:
raise RedisNoConnException("Failed to create connection to redis",
(self.host,
self.port)
)
return redis.StrictRedis(host=self.host,
port=self.port,
db=self.db,
password=self.password)
class CacheMissException(Exception):
pass
class ExpiredKeyException(Exception):
pass
class RedisNoConnException(Exception):
pass
class DoNotCache(Exception):
_result = None
def __init__(self, result):
super(DoNotCache, self).__init__()
self._result = result
@property
def result(self):
return self._result
class SimpleCache(object):
def __init__(self,
limit=10000,
expire=DEFAULT_EXPIRY,
hashkeys=False,
host=None,
port=None,
db=None,
password=None,
ssl=False,
client=None,
namespace="SimpleCache",
read_only_host=None,
read_client=None):
self.limit = limit # No of json encoded strings to cache
self.expire = expire # Time to keys to expire in seconds
self.prefix = namespace
self.host = host
self.port = port
self.db = db
self.password = password
self.ssl = ssl
self.read_only_host = read_only_host
try:
if client:
self.write_connection = client
else:
self.write_connection = RedisConnect(host=self.host,
port=self.port,
db=self.db,
password=self.password,
ssl=self.ssl).connect()
except RedisNoConnException:
self.write_connection = None
try:
if read_client:
self.read_connection = read_client
else:
self.read_connection = RedisConnect(host=self.read_only_host,
port=self.port,
db=self.db,
password=self.password,
ssl=self.ssl).connect()
except RedisNoConnException:
self.read_connection = None
# Should we hash keys? There is a very small risk of collision involved.
self.hashkeys = hashkeys
def get_pipeline(self, mode="write"):
if mode == "read":
return self.read_connection.pipeline()
else:
return self.write_connection.pipeline()
def make_key(self, key):
return "SimpleCache-{0}:{1}".format(self.prefix, key)
def namespace_key(self, namespace):
return self.make_key(namespace + ':*')
def get_set_name(self):
return "SimpleCache-{0}-keys".format(self.prefix)
def store(self, key, value, expire=None):
"""
Method stores a value after checking for space constraints and
freeing up space if required.
:param key: key by which to reference datum being stored in Redis
:param value: actual value being stored under this key
:param expire: time-to-live (ttl) for this datum
"""
key = to_unicode(key)
value = to_unicode(value)
set_name = self.get_set_name()
while self.write_connection.scard(set_name) >= self.limit:
del_key = self.write_connection.spop(set_name)
self.write_connection.delete(self.make_key(del_key))
pipe = self.get_pipeline(mode="write")
if expire is None:
expire = self.expire
if (isinstance(expire, int) and expire <= 0) or (expire is None):
pipe.set(self.make_key(key), value)
else:
pipe.setex(name=self.make_key(key), time=expire, value=value)
pipe.sadd(set_name, key)
pipe.execute()
def expire_all_in_set(self):
"""
Method expires all keys in the namespace of this object.
At times there is a need to invalidate cache in bulk, because a
single change may result in all data returned by a decorated function
to be altered.
Method returns a tuple where first value is total number of keys in
the set of this object's namespace and second value is a number of
keys successfully expired.
:return: int, int
"""
all_members = self.keys()
keys = [self.make_key(k) for k in all_members]
with self.get_pipeline(mode="write") as pipe:
pipe.delete(*keys)
pipe.execute()
return len(self), len(all_members)
def expire_namespace(self, namespace):
"""
Method expires all keys in the namespace of this object.
At times there is a need to invalidate cache in bulk, because a
single change may result in all data returned by a decorated function
to be altered.
Method returns a tuple where first value is total number of keys in
the set of this object's namespace and second value is a number of
keys successfully expired.
:return: int, int
"""
namespace = self.namespace_key(namespace)
all_members = list(self.read_connection.keys(namespace))
with self.get_pipeline(mode="write") as pipe:
pipe.delete(*all_members)
pipe.execute()
return len(self), len(all_members)
def isexpired(self, key):
"""
Method determines whether a given key is already expired. If not expired,
we expect to get back current ttl for the given key.
:param key: key being looked-up in Redis
:return: bool (True) if expired, or int representing current time-to-live (ttl) value
"""
ttl = self.read_connection.pttl("SimpleCache-{0}".format(key))
if ttl == -2: # not exist
ttl = self.read_connection.pttl(self.make_key(key))
elif ttl == -1:
return True
if not ttl is None:
return ttl
else:
return self.read_connection.pttl("{0}:{1}".format(self.prefix, key))
def store_json(self, key, value, expire=None):
self.store(key, json.dumps(value), expire)
def store_pickle(self, key, value, expire=None):
self.store(key, pickle.dumps(value), expire)
def get(self, key):
key = to_unicode(key)
if key: # No need to validate membership, which is an O(1) operation, but seems we can do without.
value = self.read_connection.get(self.make_key(key))
if value is None: # expired key
if not key in self: # If key does not exist at all, it is a straight miss.
raise CacheMissException
self.write_connection.srem(self.get_set_name(), key)
raise ExpiredKeyException
else:
return value
def mget(self, keys):
"""
Method returns a dict of key/values for found keys.
:param keys: array of keys to look up in Redis
:return: dict of found key/values
"""
if keys:
cache_keys = [self.make_key(to_unicode(key)) for key in keys]
values = self.read_connection.mget(cache_keys)
if None in values:
pipe = self.get_pipeline(mode="write")
for cache_key, value in zip(cache_keys, values):
if value is None: # non-existant or expired key
pipe.srem(self.get_set_name(), cache_key)
pipe.execute()
return {k: v for (k, v) in zip(keys, values) if v is not None}
def get_json(self, key):
return json.loads(self.get(key))
def get_pickle(self, key):
return pickle.loads(self.get(key))
def mget_json(self, keys):
"""
Method returns a dict of key/values for found keys with each value
parsed from JSON format.
:param keys: array of keys to look up in Redis
:return: dict of found key/values with values parsed from JSON format
"""
d = self.mget(keys)
if d:
for key in d.keys():
d[key] = json.loads(d[key]) if d[key] else None
return d
def invalidate(self, key):
"""
Method removes (invalidates) an item from the cache.
:param key: key to remove from Redis
"""
key = to_unicode(key)
pipe = self.get_pipeline(mode="write")
pipe.srem(self.get_set_name(), key)
pipe.delete(self.make_key(key))
pipe.execute()
def __contains__(self, key):
return self.read_connection.sismember(self.get_set_name(), key)
def __iter__(self):
if not self.read_connection:
return iter([])
return iter(
["{0}:{1}".format(self.prefix, x)
for x in self.read_connection.smembers(self.get_set_name())
])
def __len__(self):
return self.read_connection.scard(self.get_set_name())
def keys(self):
return self.read_connection.smembers(self.get_set_name())
def flush(self):
keys = list(self.keys())
keys.append(self.get_set_name())
with self.get_pipeline(mode="write") as pipe:
pipe.delete(*keys)
pipe.execute()
def flush_namespace(self, space):
namespace = self.namespace_key(space)
setname = self.get_set_name()
keys = list(self.read_connection.keys(namespace))
with self.get_pipeline(mode="write") as pipe:
pipe.delete(*keys)
pipe.srem(setname, *space)
pipe.execute()
def get_hash(self, args):
if self.hashkeys:
key = hashlib.md5(args).hexdigest()
else:
key = pickle.dumps(args)
return key
def cache_it(limit=10000, expire=DEFAULT_EXPIRY, cache=None,
use_json=False, namespace=None):
"""
Arguments and function result must be pickleable.
:param limit: maximum number of keys to maintain in the set
:param expire: period after which an entry in cache is considered expired
:param cache: SimpleCache object, if created separately
:return: decorated function
"""
cache_ = cache ## Since python 2.x doesn't have the nonlocal keyword, we need to do this
expire_ = expire ## Same here.
def decorator(function):
cache, expire = cache_, expire_
if cache is None:
cache = SimpleCache(limit, expire, hashkeys=True, namespace=function.__module__)
elif expire == DEFAULT_EXPIRY:
# If the expire arg value is the default, set it to None so we store
# the expire value of the passed cache object
expire = None
@wraps(function)
def func(*args, **kwargs):
## Handle cases where caching is down or otherwise not available.
if cache.write_connection is None:
result = function(*args, **kwargs)
return result
serializer = json if use_json else pickle
fetcher = cache.get_json if use_json else cache.get_pickle
storer = cache.store_json if use_json else cache.store_pickle
## Key will be either a md5 hash or just pickle object,
## in the form of `function name`:`key`
key = cache.get_hash(serializer.dumps([args, kwargs]))
cache_key = '{func_name}:{key}'.format(func_name=function.__name__,
key=key)
if namespace:
cache_key = '{namespace}:{key}'.format(namespace=namespace,
key=cache_key)
try:
return fetcher(cache_key)
except (ExpiredKeyException, CacheMissException) as e:
## Add some sort of cache miss handing here.
pass
except:
logging.exception("Unknown redis-simple-cache error. Please check your Redis free space.")
try:
result = function(*args, **kwargs)
except DoNotCache as e:
result = e.result
else:
try:
storer(cache_key, result, expire)
except redis.ConnectionError as e:
logging.exception(e)
return result
return func
return decorator
def cache_it_json(limit=10000, expire=DEFAULT_EXPIRY, cache=None, namespace=None):
"""
Arguments and function result must be able to convert to JSON.
:param limit: maximum number of keys to maintain in the set
:param expire: period after which an entry in cache is considered expired
:param cache: SimpleCache object, if created separately
:return: decorated function
"""
return cache_it(limit=limit, expire=expire, use_json=True,
cache=cache, namespace=None)
def to_unicode(obj, encoding='utf-8'):
if isinstance(obj, six.string_types):
obj = str(obj, encoding)
return obj