Skip to content

Commit a59bd8b

Browse files
committed
Implement Django-compatible cache and add testing
1 parent b08a090 commit a59bd8b

12 files changed

+1135
-190
lines changed

db.sqlite3

2 KB
Binary file not shown.

diskcache/__init__.py

+7-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,13 @@
22

33
from .core import Cache, Disk, EmptyDirWarning
44
from .core import LIMITS, DEFAULT_SETTINGS, EVICTION_POLICY
5-
# from .djangocache import DjangoCache
5+
6+
try:
7+
from .djangocache import DjangoCache
8+
except ImportError:
9+
# Django not installed so ignore.
10+
pass
11+
612

713
__title__ = 'diskcache'
814
__version__ = '0.6.0'

diskcache/core.py

+23-1
Original file line numberDiff line numberDiff line change
@@ -454,7 +454,7 @@ def set(self, key, value, read=False, expire=None, tag=None):
454454

455455
row = sql(
456456
'SELECT version, filename FROM Cache WHERE key = ? AND raw = ?',
457-
(db_key, raw)
457+
(db_key, raw),
458458
).fetchone()
459459

460460
if row:
@@ -644,6 +644,28 @@ def __getitem__(self, key):
644644
return value
645645

646646

647+
def __contains__(self, key):
648+
sql = self._sql
649+
650+
db_key, raw = self._disk.put(key)
651+
652+
row = sql(
653+
'SELECT store_time, expire_time FROM Cache'
654+
' WHERE key = ? AND raw = ?',
655+
(db_key, raw),
656+
).fetchone()
657+
658+
if row is None:
659+
return False
660+
661+
store_time, expire_time = row
662+
663+
if store_time is None:
664+
return False
665+
666+
return expire_time is None or time.time() < expire_time
667+
668+
647669
def __delitem__(self, key):
648670
sql = self._sql
649671

diskcache/djangocache.py

+29-167
Original file line numberDiff line numberDiff line change
@@ -1,192 +1,54 @@
11
"Django-compatible disk and file-based cache."
22

33
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
4-
# from django.core.files.move import file_move_safe
5-
# from django.utils.encoding import force_bytes
6-
7-
# try:
8-
# from django.utils.six.moves import cPickle as pickle
9-
# except ImportError:
10-
# import pickle
114

125
from .core import Cache
136

147

158
class DjangoCache(BaseCache):
16-
"Disk and file-based cache compatible with Django."
17-
9+
"Django-compatible disk and file-based cache."
1810
def __init__(self, directory, params):
1911
super(DjangoCache, self).__init__(params)
20-
2112
self._cache = Cache(directory)
2213

23-
2414
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
25-
pass
26-
27-
add.__doc__ = BaseCache.add.__doc__
28-
15+
if self.has_key(key, version):
16+
return False
17+
self.set(key, value, timeout, version)
18+
return True
2919

3020
def get(self, key, default=None, version=None):
31-
pass
32-
33-
get.__doc__ = BaseCache.get.__doc__
34-
21+
key = self.make_key(key, version=version)
22+
self.validate_key(key)
23+
return self._cache.get(key, default=default)
3524

3625
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
37-
pass
38-
39-
set.__doc__ = BaseCache.set.__doc__
40-
26+
key = self.make_key(key, version=version)
27+
self.validate_key(key)
28+
timeout = self.get_backend_timeout(timeout=timeout)
29+
self._cache.set(key, value, expire=timeout)
4130

4231
def delete(self, key, version=None):
43-
pass
44-
45-
delete.__doc__ = BaseCache.delete.__doc__
32+
key = self.make_key(key, version=version)
33+
self.validate_key(key)
34+
self._cache.delete(key)
4635

36+
def has_key(self, key, version=None):
37+
key = self.make_key(key, version=version)
38+
self.validate_key(key)
39+
return key in self._cache
4740

4841
def clear(self):
49-
pass
42+
self._cache.clear()
5043

51-
clear.__doc__ = BaseCache.clear.__doc__
52-
53-
54-
def close(self, **kwargs):
44+
def close(self):
5545
self._cache.close()
5646

57-
close.__doc__ = BaseCache.close.__doc__
58-
59-
60-
# class FileBasedCache(BaseCache):
61-
# cache_suffix = '.djcache'
62-
63-
# def __init__(self, dir, params):
64-
# super(FileBasedCache, self).__init__(params)
65-
# self._dir = os.path.abspath(dir)
66-
# self._createdir()
67-
68-
# def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
69-
# if self.has_key(key, version):
70-
# return False
71-
# self.set(key, value, timeout, version)
72-
# return True
73-
74-
# def get(self, key, default=None, version=None):
75-
# fname = self._key_to_file(key, version)
76-
# if os.path.exists(fname):
77-
# try:
78-
# with io.open(fname, 'rb') as f:
79-
# if not self._is_expired(f):
80-
# return pickle.loads(zlib.decompress(f.read()))
81-
# except IOError as e:
82-
# if e.errno == errno.ENOENT:
83-
# pass # Cache file was removed after the exists check
84-
# return default
85-
86-
# def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
87-
# self._createdir() # Cache dir can be deleted at any time.
88-
# fname = self._key_to_file(key, version)
89-
# self._cull() # make some room if necessary
90-
# fd, tmp_path = tempfile.mkstemp(dir=self._dir)
91-
# renamed = False
92-
# try:
93-
# with io.open(fd, 'wb') as f:
94-
# expiry = self.get_backend_timeout(timeout)
95-
# f.write(pickle.dumps(expiry, pickle.HIGHEST_PROTOCOL))
96-
# f.write(zlib.compress(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)))
97-
# file_move_safe(tmp_path, fname, allow_overwrite=True)
98-
# renamed = True
99-
# finally:
100-
# if not renamed:
101-
# os.remove(tmp_path)
102-
103-
# def delete(self, key, version=None):
104-
# self._delete(self._key_to_file(key, version))
105-
106-
# def _delete(self, fname):
107-
# if not fname.startswith(self._dir) or not os.path.exists(fname):
108-
# return
109-
# try:
110-
# os.remove(fname)
111-
# except OSError as e:
112-
# # ENOENT can happen if the cache file is removed (by another
113-
# # process) after the os.path.exists check.
114-
# if e.errno != errno.ENOENT:
115-
# raise
116-
117-
# def has_key(self, key, version=None):
118-
# fname = self._key_to_file(key, version)
119-
# if os.path.exists(fname):
120-
# with io.open(fname, 'rb') as f:
121-
# return not self._is_expired(f)
122-
# return False
123-
124-
# def _cull(self):
125-
# """
126-
# Removes random cache entries if max_entries is reached at a ratio
127-
# of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means
128-
# that the entire cache will be purged.
129-
# """
130-
# filelist = self._list_cache_files()
131-
# num_entries = len(filelist)
132-
# if num_entries < self._max_entries:
133-
# return # return early if no culling is required
134-
# if self._cull_frequency == 0:
135-
# return self.clear() # Clear the cache when CULL_FREQUENCY = 0
136-
# # Delete a random selection of entries
137-
# filelist = random.sample(filelist,
138-
# int(num_entries / self._cull_frequency))
139-
# for fname in filelist:
140-
# self._delete(fname)
141-
142-
# def _createdir(self):
143-
# if not os.path.exists(self._dir):
144-
# try:
145-
# os.makedirs(self._dir, 0o700)
146-
# except OSError as e:
147-
# if e.errno != errno.EEXIST:
148-
# raise EnvironmentError(
149-
# "Cache directory '%s' does not exist "
150-
# "and could not be created'" % self._dir)
151-
152-
# def _key_to_file(self, key, version=None):
153-
# """
154-
# Convert a key into a cache file path. Basically this is the
155-
# root cache path joined with the md5sum of the key and a suffix.
156-
# """
157-
# key = self.make_key(key, version=version)
158-
# self.validate_key(key)
159-
# return os.path.join(self._dir, ''.join(
160-
# [hashlib.md5(force_bytes(key)).hexdigest(), self.cache_suffix]))
161-
162-
# def clear(self):
163-
# """
164-
# Remove all the cache files.
165-
# """
166-
# if not os.path.exists(self._dir):
167-
# return
168-
# for fname in self._list_cache_files():
169-
# self._delete(fname)
170-
171-
# def _is_expired(self, f):
172-
# """
173-
# Takes an open cache file and determines if it has expired,
174-
# deletes the file if it is has passed its expiry time.
175-
# """
176-
# exp = pickle.load(f)
177-
# if exp is not None and exp < time.time():
178-
# f.close() # On Windows a file has to be closed before deleting
179-
# self._delete(f.name)
180-
# return True
181-
# return False
182-
183-
# def _list_cache_files(self):
184-
# """
185-
# Get a list of paths to all the cache files. These are all the files
186-
# in the root cache dir that end on the cache_suffix.
187-
# """
188-
# if not os.path.exists(self._dir):
189-
# return []
190-
# filelist = [os.path.join(self._dir, fname) for fname
191-
# in glob.glob1(self._dir, '*%s' % self.cache_suffix)]
192-
# return filelist
47+
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
48+
"Return seconds to expiration."
49+
if timeout == DEFAULT_TIMEOUT:
50+
timeout = self.default_timeout
51+
elif timeout == 0:
52+
# ticket 21147 - avoid time.time() related precision issues
53+
timeout = -1
54+
return None if timeout is None else timeout

docs/index.rst

+20-11
Original file line numberDiff line numberDiff line change
@@ -62,21 +62,30 @@ TODO
6262
TODO
6363
----
6464

65-
0. If you use the cache in a thread, you need to close the cache in that thread
66-
1. Stress test eviction policies.
67-
2. Create and test Django interface.
68-
3. Create and test CLI interface.
65+
1. Create and test CLI interface.
6966

7067
- get, set, store, delete, expire, clear, evict, path, check, stats, show
7168

72-
4. Document SQLite database restore trick using dump command and
69+
2. Test and document stampede_barrier.
70+
3. Document SQLite database restore trick using dump command and
7371
cache.check(fix=True).
74-
5. Test and document stampede_barrier.
75-
6. Benchmark ``set`` with delete, then insert.
76-
7. Add DjangoCache to djangopackages/caching.
77-
8. Document: core.Cache objects cannot be pickled.
78-
9. Document: core.Cache objects do not survive os.fork.
79-
10. Dcoument: core.Cache objects are thread-safe.
72+
4. Add DjangoCache to djangopackages/caching.
73+
5. Document: core.Cache objects cannot be pickled.
74+
6. Document: core.Cache objects do not survive os.fork.
75+
7. Dcoument: core.Cache objects are thread-safe, but should be closed.
76+
8. Feature Request:: Use multiple caches and multiplexing to work around
77+
SQLite one-writer limitation. Writes are distributed randomly or based on
78+
key count and reads are distributed to all.
79+
9. Feature Request: Atomic increment and decrement.
80+
10. Cached things:
81+
numbers (rankings),
82+
processed text (8-128k),
83+
list of labels (1-10 labels, 6-10 characters each)
84+
cache html and javascript pages (60K, 300K)
85+
list of settings (label, value pairs)
86+
sets of numbers (dozens of integers)
87+
QuerySets
88+
8089

8190
Reference and Indices
8291
---------------------

requirements.txt

+1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
mock==1.3.0
22
nose==1.3.7
33
statistics==1.0.3.5
4+
django==1.9.2

tests/models.py

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
import os
2+
3+
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings')
4+
5+
import django
6+
7+
django.setup()
8+
9+
from django.db import models
10+
from django.utils import timezone
11+
12+
13+
def expensive_calculation():
14+
expensive_calculation.num_runs += 1
15+
return timezone.now()
16+
17+
18+
class Poll(models.Model):
19+
question = models.CharField(max_length=200)
20+
answer = models.CharField(max_length=200)
21+
pub_date = models.DateTimeField('date published', default=expensive_calculation)

0 commit comments

Comments
 (0)