123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267 |
- "Database cache backend."
- import base64
- import pickle
- from datetime import datetime
- from django.conf import settings
- from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
- from django.db import DatabaseError, connections, models, router, transaction
- from django.utils import timezone
- class Options:
- """A class that will quack like a Django model _meta class.
- This allows cache operations to be controlled by the router
- """
- def __init__(self, table):
- self.db_table = table
- self.app_label = 'django_cache'
- self.model_name = 'cacheentry'
- self.verbose_name = 'cache entry'
- self.verbose_name_plural = 'cache entries'
- self.object_name = 'CacheEntry'
- self.abstract = False
- self.managed = True
- self.proxy = False
- self.swapped = False
- class BaseDatabaseCache(BaseCache):
- def __init__(self, table, params):
- super().__init__(params)
- self._table = table
- class CacheEntry:
- _meta = Options(table)
- self.cache_model_class = CacheEntry
- class DatabaseCache(BaseDatabaseCache):
- # This class uses cursors provided by the database connection. This means
- # it reads expiration values as aware or naive datetimes, depending on the
- # value of USE_TZ and whether the database supports time zones. The ORM's
- # conversion and adaptation infrastructure is then used to avoid comparing
- # aware and naive datetimes accidentally.
- pickle_protocol = pickle.HIGHEST_PROTOCOL
- def get(self, key, default=None, version=None):
- return self.get_many([key], version).get(key, default)
- def get_many(self, keys, version=None):
- if not keys:
- return {}
- key_map = {self.make_and_validate_key(key, version=version): key for key in keys}
- db = router.db_for_read(self.cache_model_class)
- connection = connections[db]
- quote_name = connection.ops.quote_name
- table = quote_name(self._table)
- with connection.cursor() as cursor:
- cursor.execute(
- 'SELECT %s, %s, %s FROM %s WHERE %s IN (%s)' % (
- quote_name('cache_key'),
- quote_name('value'),
- quote_name('expires'),
- table,
- quote_name('cache_key'),
- ', '.join(['%s'] * len(key_map)),
- ),
- list(key_map),
- )
- rows = cursor.fetchall()
- result = {}
- expired_keys = []
- expression = models.Expression(output_field=models.DateTimeField())
- converters = (connection.ops.get_db_converters(expression) + expression.get_db_converters(connection))
- for key, value, expires in rows:
- for converter in converters:
- expires = converter(expires, expression, connection)
- if expires < timezone.now():
- expired_keys.append(key)
- else:
- value = connection.ops.process_clob(value)
- value = pickle.loads(base64.b64decode(value.encode()))
- result[key_map.get(key)] = value
- self._base_delete_many(expired_keys)
- return result
- def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
- key = self.make_and_validate_key(key, version=version)
- self._base_set('set', key, value, timeout)
- def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
- key = self.make_and_validate_key(key, version=version)
- return self._base_set('add', key, value, timeout)
- def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
- key = self.make_and_validate_key(key, version=version)
- return self._base_set('touch', key, None, timeout)
- def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
- timeout = self.get_backend_timeout(timeout)
- db = router.db_for_write(self.cache_model_class)
- connection = connections[db]
- quote_name = connection.ops.quote_name
- table = quote_name(self._table)
- with connection.cursor() as cursor:
- cursor.execute("SELECT COUNT(*) FROM %s" % table)
- num = cursor.fetchone()[0]
- now = timezone.now()
- now = now.replace(microsecond=0)
- if timeout is None:
- exp = datetime.max
- else:
- tz = timezone.utc if settings.USE_TZ else None
- exp = datetime.fromtimestamp(timeout, tz=tz)
- exp = exp.replace(microsecond=0)
- if num > self._max_entries:
- self._cull(db, cursor, now, num)
- pickled = pickle.dumps(value, self.pickle_protocol)
- # The DB column is expecting a string, so make sure the value is a
- # string, not bytes. Refs #19274.
- b64encoded = base64.b64encode(pickled).decode('latin1')
- try:
- # Note: typecasting for datetimes is needed by some 3rd party
- # database backends. All core backends work without typecasting,
- # so be careful about changes here - test suite will NOT pick
- # regressions.
- with transaction.atomic(using=db):
- cursor.execute(
- 'SELECT %s, %s FROM %s WHERE %s = %%s' % (
- quote_name('cache_key'),
- quote_name('expires'),
- table,
- quote_name('cache_key'),
- ),
- [key]
- )
- result = cursor.fetchone()
- if result:
- current_expires = result[1]
- expression = models.Expression(output_field=models.DateTimeField())
- for converter in (connection.ops.get_db_converters(expression) +
- expression.get_db_converters(connection)):
- current_expires = converter(current_expires, expression, connection)
- exp = connection.ops.adapt_datetimefield_value(exp)
- if result and mode == 'touch':
- cursor.execute(
- 'UPDATE %s SET %s = %%s WHERE %s = %%s' % (
- table,
- quote_name('expires'),
- quote_name('cache_key')
- ),
- [exp, key]
- )
- elif result and (mode == 'set' or (mode == 'add' and current_expires < now)):
- cursor.execute(
- 'UPDATE %s SET %s = %%s, %s = %%s WHERE %s = %%s' % (
- table,
- quote_name('value'),
- quote_name('expires'),
- quote_name('cache_key'),
- ),
- [b64encoded, exp, key]
- )
- elif mode != 'touch':
- cursor.execute(
- 'INSERT INTO %s (%s, %s, %s) VALUES (%%s, %%s, %%s)' % (
- table,
- quote_name('cache_key'),
- quote_name('value'),
- quote_name('expires'),
- ),
- [key, b64encoded, exp]
- )
- else:
- return False # touch failed.
- except DatabaseError:
- # To be threadsafe, updates/inserts are allowed to fail silently
- return False
- else:
- return True
- def delete(self, key, version=None):
- key = self.make_and_validate_key(key, version=version)
- return self._base_delete_many([key])
- def delete_many(self, keys, version=None):
- keys = [self.make_and_validate_key(key, version=version) for key in keys]
- self._base_delete_many(keys)
- def _base_delete_many(self, keys):
- if not keys:
- return False
- db = router.db_for_write(self.cache_model_class)
- connection = connections[db]
- quote_name = connection.ops.quote_name
- table = quote_name(self._table)
- with connection.cursor() as cursor:
- cursor.execute(
- 'DELETE FROM %s WHERE %s IN (%s)' % (
- table,
- quote_name('cache_key'),
- ', '.join(['%s'] * len(keys)),
- ),
- keys,
- )
- return bool(cursor.rowcount)
- def has_key(self, key, version=None):
- key = self.make_and_validate_key(key, version=version)
- db = router.db_for_read(self.cache_model_class)
- connection = connections[db]
- quote_name = connection.ops.quote_name
- now = timezone.now().replace(microsecond=0, tzinfo=None)
- with connection.cursor() as cursor:
- cursor.execute(
- 'SELECT %s FROM %s WHERE %s = %%s and expires > %%s' % (
- quote_name('cache_key'),
- quote_name(self._table),
- quote_name('cache_key'),
- ),
- [key, connection.ops.adapt_datetimefield_value(now)]
- )
- return cursor.fetchone() is not None
- def _cull(self, db, cursor, now, num):
- if self._cull_frequency == 0:
- self.clear()
- else:
- connection = connections[db]
- table = connection.ops.quote_name(self._table)
- cursor.execute("DELETE FROM %s WHERE expires < %%s" % table,
- [connection.ops.adapt_datetimefield_value(now)])
- deleted_count = cursor.rowcount
- remaining_num = num - deleted_count
- if remaining_num > self._max_entries:
- cull_num = remaining_num // self._cull_frequency
- cursor.execute(
- connection.ops.cache_key_culling_sql() % table,
- [cull_num])
- last_cache_key = cursor.fetchone()
- if last_cache_key:
- cursor.execute(
- 'DELETE FROM %s WHERE cache_key < %%s' % table,
- [last_cache_key[0]],
- )
- def clear(self):
- db = router.db_for_write(self.cache_model_class)
- connection = connections[db]
- table = connection.ops.quote_name(self._table)
- with connection.cursor() as cursor:
- cursor.execute('DELETE FROM %s' % table)
|