Skip to content

Commit

Permalink
Implement Django-compatible cache and add testing
Browse files Browse the repository at this point in the history
  • Loading branch information
grantjenks committed Feb 23, 2016
1 parent b08a090 commit a59bd8b
Show file tree
Hide file tree
Showing 12 changed files with 1,135 additions and 190 deletions.
Binary file added db.sqlite3
Binary file not shown.
8 changes: 7 additions & 1 deletion diskcache/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,13 @@

from .core import Cache, Disk, EmptyDirWarning
from .core import LIMITS, DEFAULT_SETTINGS, EVICTION_POLICY
# from .djangocache import DjangoCache

try:
from .djangocache import DjangoCache
except ImportError:
# Django not installed so ignore.
pass


__title__ = 'diskcache'
__version__ = '0.6.0'
Expand Down
24 changes: 23 additions & 1 deletion diskcache/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@ def set(self, key, value, read=False, expire=None, tag=None):

row = sql(
'SELECT version, filename FROM Cache WHERE key = ? AND raw = ?',
(db_key, raw)
(db_key, raw),
).fetchone()

if row:
Expand Down Expand Up @@ -644,6 +644,28 @@ def __getitem__(self, key):
return value


def __contains__(self, key):
sql = self._sql

db_key, raw = self._disk.put(key)

row = sql(
'SELECT store_time, expire_time FROM Cache'
' WHERE key = ? AND raw = ?',
(db_key, raw),
).fetchone()

if row is None:
return False

store_time, expire_time = row

if store_time is None:
return False

return expire_time is None or time.time() < expire_time


def __delitem__(self, key):
sql = self._sql

Expand Down
196 changes: 29 additions & 167 deletions diskcache/djangocache.py
Original file line number Diff line number Diff line change
@@ -1,192 +1,54 @@
"Django-compatible disk and file-based cache."

from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
# from django.core.files.move import file_move_safe
# from django.utils.encoding import force_bytes

# try:
# from django.utils.six.moves import cPickle as pickle
# except ImportError:
# import pickle

from .core import Cache


class DjangoCache(BaseCache):
"Disk and file-based cache compatible with Django."

"Django-compatible disk and file-based cache."
def __init__(self, directory, params):
super(DjangoCache, self).__init__(params)

self._cache = Cache(directory)


def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
pass

add.__doc__ = BaseCache.add.__doc__

if self.has_key(key, version):
return False
self.set(key, value, timeout, version)
return True

def get(self, key, default=None, version=None):
pass

get.__doc__ = BaseCache.get.__doc__

key = self.make_key(key, version=version)
self.validate_key(key)
return self._cache.get(key, default=default)

def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
pass

set.__doc__ = BaseCache.set.__doc__

key = self.make_key(key, version=version)
self.validate_key(key)
timeout = self.get_backend_timeout(timeout=timeout)
self._cache.set(key, value, expire=timeout)

def delete(self, key, version=None):
pass

delete.__doc__ = BaseCache.delete.__doc__
key = self.make_key(key, version=version)
self.validate_key(key)
self._cache.delete(key)

def has_key(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
return key in self._cache

def clear(self):
pass
self._cache.clear()

clear.__doc__ = BaseCache.clear.__doc__


def close(self, **kwargs):
def close(self):
self._cache.close()

close.__doc__ = BaseCache.close.__doc__


# class FileBasedCache(BaseCache):
# cache_suffix = '.djcache'

# def __init__(self, dir, params):
# super(FileBasedCache, self).__init__(params)
# self._dir = os.path.abspath(dir)
# self._createdir()

# def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
# if self.has_key(key, version):
# return False
# self.set(key, value, timeout, version)
# return True

# def get(self, key, default=None, version=None):
# fname = self._key_to_file(key, version)
# if os.path.exists(fname):
# try:
# with io.open(fname, 'rb') as f:
# if not self._is_expired(f):
# return pickle.loads(zlib.decompress(f.read()))
# except IOError as e:
# if e.errno == errno.ENOENT:
# pass # Cache file was removed after the exists check
# return default

# def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
# self._createdir() # Cache dir can be deleted at any time.
# fname = self._key_to_file(key, version)
# self._cull() # make some room if necessary
# fd, tmp_path = tempfile.mkstemp(dir=self._dir)
# renamed = False
# try:
# with io.open(fd, 'wb') as f:
# expiry = self.get_backend_timeout(timeout)
# f.write(pickle.dumps(expiry, pickle.HIGHEST_PROTOCOL))
# f.write(zlib.compress(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)))
# file_move_safe(tmp_path, fname, allow_overwrite=True)
# renamed = True
# finally:
# if not renamed:
# os.remove(tmp_path)

# def delete(self, key, version=None):
# self._delete(self._key_to_file(key, version))

# def _delete(self, fname):
# if not fname.startswith(self._dir) or not os.path.exists(fname):
# return
# try:
# os.remove(fname)
# except OSError as e:
# # ENOENT can happen if the cache file is removed (by another
# # process) after the os.path.exists check.
# if e.errno != errno.ENOENT:
# raise

# def has_key(self, key, version=None):
# fname = self._key_to_file(key, version)
# if os.path.exists(fname):
# with io.open(fname, 'rb') as f:
# return not self._is_expired(f)
# return False

# def _cull(self):
# """
# Removes random cache entries if max_entries is reached at a ratio
# of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means
# that the entire cache will be purged.
# """
# filelist = self._list_cache_files()
# num_entries = len(filelist)
# if num_entries < self._max_entries:
# return # return early if no culling is required
# if self._cull_frequency == 0:
# return self.clear() # Clear the cache when CULL_FREQUENCY = 0
# # Delete a random selection of entries
# filelist = random.sample(filelist,
# int(num_entries / self._cull_frequency))
# for fname in filelist:
# self._delete(fname)

# def _createdir(self):
# if not os.path.exists(self._dir):
# try:
# os.makedirs(self._dir, 0o700)
# except OSError as e:
# if e.errno != errno.EEXIST:
# raise EnvironmentError(
# "Cache directory '%s' does not exist "
# "and could not be created'" % self._dir)

# def _key_to_file(self, key, version=None):
# """
# Convert a key into a cache file path. Basically this is the
# root cache path joined with the md5sum of the key and a suffix.
# """
# key = self.make_key(key, version=version)
# self.validate_key(key)
# return os.path.join(self._dir, ''.join(
# [hashlib.md5(force_bytes(key)).hexdigest(), self.cache_suffix]))

# def clear(self):
# """
# Remove all the cache files.
# """
# if not os.path.exists(self._dir):
# return
# for fname in self._list_cache_files():
# self._delete(fname)

# def _is_expired(self, f):
# """
# Takes an open cache file and determines if it has expired,
# deletes the file if it is has passed its expiry time.
# """
# exp = pickle.load(f)
# if exp is not None and exp < time.time():
# f.close() # On Windows a file has to be closed before deleting
# self._delete(f.name)
# return True
# return False

# def _list_cache_files(self):
# """
# Get a list of paths to all the cache files. These are all the files
# in the root cache dir that end on the cache_suffix.
# """
# if not os.path.exists(self._dir):
# return []
# filelist = [os.path.join(self._dir, fname) for fname
# in glob.glob1(self._dir, '*%s' % self.cache_suffix)]
# return filelist
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
"Return seconds to expiration."
if timeout == DEFAULT_TIMEOUT:
timeout = self.default_timeout
elif timeout == 0:
# ticket 21147 - avoid time.time() related precision issues
timeout = -1
return None if timeout is None else timeout
31 changes: 20 additions & 11 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -62,21 +62,30 @@ TODO
TODO
----

0. If you use the cache in a thread, you need to close the cache in that thread
1. Stress test eviction policies.
2. Create and test Django interface.
3. Create and test CLI interface.
1. Create and test CLI interface.

- get, set, store, delete, expire, clear, evict, path, check, stats, show

4. Document SQLite database restore trick using dump command and
2. Test and document stampede_barrier.
3. Document SQLite database restore trick using dump command and
cache.check(fix=True).
5. Test and document stampede_barrier.
6. Benchmark ``set`` with delete, then insert.
7. Add DjangoCache to djangopackages/caching.
8. Document: core.Cache objects cannot be pickled.
9. Document: core.Cache objects do not survive os.fork.
10. Dcoument: core.Cache objects are thread-safe.
4. Add DjangoCache to djangopackages/caching.
5. Document: core.Cache objects cannot be pickled.
6. Document: core.Cache objects do not survive os.fork.
7. Dcoument: core.Cache objects are thread-safe, but should be closed.
8. Feature Request:: Use multiple caches and multiplexing to work around
SQLite one-writer limitation. Writes are distributed randomly or based on
key count and reads are distributed to all.
9. Feature Request: Atomic increment and decrement.
10. Cached things:
numbers (rankings),
processed text (8-128k),
list of labels (1-10 labels, 6-10 characters each)
cache html and javascript pages (60K, 300K)
list of settings (label, value pairs)
sets of numbers (dozens of integers)
QuerySets


Reference and Indices
---------------------
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
mock==1.3.0
nose==1.3.7
statistics==1.0.3.5
django==1.9.2
21 changes: 21 additions & 0 deletions tests/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import os

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings')

import django

django.setup()

from django.db import models
from django.utils import timezone


def expensive_calculation():
expensive_calculation.num_runs += 1
return timezone.now()


class Poll(models.Model):
question = models.CharField(max_length=200)
answer = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published', default=expensive_calculation)
Loading

0 comments on commit a59bd8b

Please sign in to comment.