|
1 | 1 | "Django-compatible disk and file-based cache."
|
2 | 2 |
|
3 | 3 | from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
|
4 |
| -# from django.core.files.move import file_move_safe |
5 |
| -# from django.utils.encoding import force_bytes |
6 |
| - |
7 |
| -# try: |
8 |
| -# from django.utils.six.moves import cPickle as pickle |
9 |
| -# except ImportError: |
10 |
| -# import pickle |
11 | 4 |
|
12 | 5 | from .core import Cache
|
13 | 6 |
|
14 | 7 |
|
15 | 8 | class DjangoCache(BaseCache):
|
16 |
| - "Disk and file-based cache compatible with Django." |
17 |
| - |
| 9 | + "Django-compatible disk and file-based cache." |
18 | 10 | def __init__(self, directory, params):
|
19 | 11 | super(DjangoCache, self).__init__(params)
|
20 |
| - |
21 | 12 | self._cache = Cache(directory)
|
22 | 13 |
|
23 |
| - |
24 | 14 | def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
25 |
| - pass |
26 |
| - |
27 |
| - add.__doc__ = BaseCache.add.__doc__ |
28 |
| - |
| 15 | + if self.has_key(key, version): |
| 16 | + return False |
| 17 | + self.set(key, value, timeout, version) |
| 18 | + return True |
29 | 19 |
|
30 | 20 | def get(self, key, default=None, version=None):
|
31 |
| - pass |
32 |
| - |
33 |
| - get.__doc__ = BaseCache.get.__doc__ |
34 |
| - |
| 21 | + key = self.make_key(key, version=version) |
| 22 | + self.validate_key(key) |
| 23 | + return self._cache.get(key, default=default) |
35 | 24 |
|
36 | 25 | def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
37 |
| - pass |
38 |
| - |
39 |
| - set.__doc__ = BaseCache.set.__doc__ |
40 |
| - |
| 26 | + key = self.make_key(key, version=version) |
| 27 | + self.validate_key(key) |
| 28 | + timeout = self.get_backend_timeout(timeout=timeout) |
| 29 | + self._cache.set(key, value, expire=timeout) |
41 | 30 |
|
42 | 31 | def delete(self, key, version=None):
|
43 |
| - pass |
44 |
| - |
45 |
| - delete.__doc__ = BaseCache.delete.__doc__ |
| 32 | + key = self.make_key(key, version=version) |
| 33 | + self.validate_key(key) |
| 34 | + self._cache.delete(key) |
46 | 35 |
|
| 36 | + def has_key(self, key, version=None): |
| 37 | + key = self.make_key(key, version=version) |
| 38 | + self.validate_key(key) |
| 39 | + return key in self._cache |
47 | 40 |
|
48 | 41 | def clear(self):
|
49 |
| - pass |
| 42 | + self._cache.clear() |
50 | 43 |
|
51 |
| - clear.__doc__ = BaseCache.clear.__doc__ |
52 |
| - |
53 |
| - |
54 |
| - def close(self, **kwargs): |
| 44 | + def close(self): |
55 | 45 | self._cache.close()
|
56 | 46 |
|
57 |
| - close.__doc__ = BaseCache.close.__doc__ |
58 |
| - |
59 |
| - |
60 |
| -# class FileBasedCache(BaseCache): |
61 |
| -# cache_suffix = '.djcache' |
62 |
| - |
63 |
| -# def __init__(self, dir, params): |
64 |
| -# super(FileBasedCache, self).__init__(params) |
65 |
| -# self._dir = os.path.abspath(dir) |
66 |
| -# self._createdir() |
67 |
| - |
68 |
| -# def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): |
69 |
| -# if self.has_key(key, version): |
70 |
| -# return False |
71 |
| -# self.set(key, value, timeout, version) |
72 |
| -# return True |
73 |
| - |
74 |
| -# def get(self, key, default=None, version=None): |
75 |
| -# fname = self._key_to_file(key, version) |
76 |
| -# if os.path.exists(fname): |
77 |
| -# try: |
78 |
| -# with io.open(fname, 'rb') as f: |
79 |
| -# if not self._is_expired(f): |
80 |
| -# return pickle.loads(zlib.decompress(f.read())) |
81 |
| -# except IOError as e: |
82 |
| -# if e.errno == errno.ENOENT: |
83 |
| -# pass # Cache file was removed after the exists check |
84 |
| -# return default |
85 |
| - |
86 |
| -# def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): |
87 |
| -# self._createdir() # Cache dir can be deleted at any time. |
88 |
| -# fname = self._key_to_file(key, version) |
89 |
| -# self._cull() # make some room if necessary |
90 |
| -# fd, tmp_path = tempfile.mkstemp(dir=self._dir) |
91 |
| -# renamed = False |
92 |
| -# try: |
93 |
| -# with io.open(fd, 'wb') as f: |
94 |
| -# expiry = self.get_backend_timeout(timeout) |
95 |
| -# f.write(pickle.dumps(expiry, pickle.HIGHEST_PROTOCOL)) |
96 |
| -# f.write(zlib.compress(pickle.dumps(value, pickle.HIGHEST_PROTOCOL))) |
97 |
| -# file_move_safe(tmp_path, fname, allow_overwrite=True) |
98 |
| -# renamed = True |
99 |
| -# finally: |
100 |
| -# if not renamed: |
101 |
| -# os.remove(tmp_path) |
102 |
| - |
103 |
| -# def delete(self, key, version=None): |
104 |
| -# self._delete(self._key_to_file(key, version)) |
105 |
| - |
106 |
| -# def _delete(self, fname): |
107 |
| -# if not fname.startswith(self._dir) or not os.path.exists(fname): |
108 |
| -# return |
109 |
| -# try: |
110 |
| -# os.remove(fname) |
111 |
| -# except OSError as e: |
112 |
| -# # ENOENT can happen if the cache file is removed (by another |
113 |
| -# # process) after the os.path.exists check. |
114 |
| -# if e.errno != errno.ENOENT: |
115 |
| -# raise |
116 |
| - |
117 |
| -# def has_key(self, key, version=None): |
118 |
| -# fname = self._key_to_file(key, version) |
119 |
| -# if os.path.exists(fname): |
120 |
| -# with io.open(fname, 'rb') as f: |
121 |
| -# return not self._is_expired(f) |
122 |
| -# return False |
123 |
| - |
124 |
| -# def _cull(self): |
125 |
| -# """ |
126 |
| -# Removes random cache entries if max_entries is reached at a ratio |
127 |
| -# of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means |
128 |
| -# that the entire cache will be purged. |
129 |
| -# """ |
130 |
| -# filelist = self._list_cache_files() |
131 |
| -# num_entries = len(filelist) |
132 |
| -# if num_entries < self._max_entries: |
133 |
| -# return # return early if no culling is required |
134 |
| -# if self._cull_frequency == 0: |
135 |
| -# return self.clear() # Clear the cache when CULL_FREQUENCY = 0 |
136 |
| -# # Delete a random selection of entries |
137 |
| -# filelist = random.sample(filelist, |
138 |
| -# int(num_entries / self._cull_frequency)) |
139 |
| -# for fname in filelist: |
140 |
| -# self._delete(fname) |
141 |
| - |
142 |
| -# def _createdir(self): |
143 |
| -# if not os.path.exists(self._dir): |
144 |
| -# try: |
145 |
| -# os.makedirs(self._dir, 0o700) |
146 |
| -# except OSError as e: |
147 |
| -# if e.errno != errno.EEXIST: |
148 |
| -# raise EnvironmentError( |
149 |
| -# "Cache directory '%s' does not exist " |
150 |
| -# "and could not be created'" % self._dir) |
151 |
| - |
152 |
| -# def _key_to_file(self, key, version=None): |
153 |
| -# """ |
154 |
| -# Convert a key into a cache file path. Basically this is the |
155 |
| -# root cache path joined with the md5sum of the key and a suffix. |
156 |
| -# """ |
157 |
| -# key = self.make_key(key, version=version) |
158 |
| -# self.validate_key(key) |
159 |
| -# return os.path.join(self._dir, ''.join( |
160 |
| -# [hashlib.md5(force_bytes(key)).hexdigest(), self.cache_suffix])) |
161 |
| - |
162 |
| -# def clear(self): |
163 |
| -# """ |
164 |
| -# Remove all the cache files. |
165 |
| -# """ |
166 |
| -# if not os.path.exists(self._dir): |
167 |
| -# return |
168 |
| -# for fname in self._list_cache_files(): |
169 |
| -# self._delete(fname) |
170 |
| - |
171 |
| -# def _is_expired(self, f): |
172 |
| -# """ |
173 |
| -# Takes an open cache file and determines if it has expired, |
174 |
| -# deletes the file if it is has passed its expiry time. |
175 |
| -# """ |
176 |
| -# exp = pickle.load(f) |
177 |
| -# if exp is not None and exp < time.time(): |
178 |
| -# f.close() # On Windows a file has to be closed before deleting |
179 |
| -# self._delete(f.name) |
180 |
| -# return True |
181 |
| -# return False |
182 |
| - |
183 |
| -# def _list_cache_files(self): |
184 |
| -# """ |
185 |
| -# Get a list of paths to all the cache files. These are all the files |
186 |
| -# in the root cache dir that end on the cache_suffix. |
187 |
| -# """ |
188 |
| -# if not os.path.exists(self._dir): |
189 |
| -# return [] |
190 |
| -# filelist = [os.path.join(self._dir, fname) for fname |
191 |
| -# in glob.glob1(self._dir, '*%s' % self.cache_suffix)] |
192 |
| -# return filelist |
| 47 | + def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): |
| 48 | + "Return seconds to expiration." |
| 49 | + if timeout == DEFAULT_TIMEOUT: |
| 50 | + timeout = self.default_timeout |
| 51 | + elif timeout == 0: |
| 52 | + # ticket 21147 - avoid time.time() related precision issues |
| 53 | + timeout = -1 |
| 54 | + return None if timeout is None else timeout |
0 commit comments