12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364 |
- import datetime
- import time
- from django.db.utils import DatabaseError
- try:
- from django.utils.six.moves import _thread as thread
- except ImportError:
- from django.utils.six.moves import _dummy_thread as thread
- from collections import namedtuple
- from contextlib import contextmanager
- from django.conf import settings
- from django.db import DEFAULT_DB_ALIAS
- from django.db.backends.signals import connection_created
- from django.db.backends import util
- from django.db.transaction import TransactionManagementError
- from django.db.utils import DatabaseErrorWrapper
- from django.utils.functional import cached_property
- from django.utils.importlib import import_module
- from django.utils import six
- from django.utils import timezone
- class BaseDatabaseWrapper(object):
- """
- Represents a database connection.
- """
- ops = None
- vendor = 'unknown'
- def __init__(self, settings_dict, alias=DEFAULT_DB_ALIAS,
- allow_thread_sharing=False):
- # `settings_dict` should be a dictionary containing keys such as
- # NAME, USER, etc. It's called `settings_dict` instead of `settings`
- # to disambiguate it from Django settings modules.
- self.connection = None
- self.queries = []
- self.settings_dict = settings_dict
- self.alias = alias
- self.use_debug_cursor = None
- # Savepoint management related attributes
- self.savepoint_state = 0
- # Transaction management related attributes
- self.autocommit = False
- self.transaction_state = []
- # Tracks if the connection is believed to be in transaction. This is
- # set somewhat aggressively, as the DBAPI doesn't make it easy to
- # deduce if the connection is in transaction or not.
- self._dirty = False
- # Tracks if the connection is in a transaction managed by 'atomic'.
- self.in_atomic_block = False
- # List of savepoints created by 'atomic'
- self.savepoint_ids = []
- # Tracks if the outermost 'atomic' block should commit on exit,
- # ie. if autocommit was active on entry.
- self.commit_on_exit = True
- # Tracks if the transaction should be rolled back to the next
- # available savepoint because of an exception in an inner block.
- self.needs_rollback = False
- # Connection termination related attributes
- self.close_at = None
- self.errors_occurred = False
- # Thread-safety related attributes
- self.allow_thread_sharing = allow_thread_sharing
- self._thread_ident = thread.get_ident()
- def __eq__(self, other):
- if isinstance(other, BaseDatabaseWrapper):
- return self.alias == other.alias
- return NotImplemented
- def __ne__(self, other):
- return not self == other
- def __hash__(self):
- return hash(self.alias)
- ##### Backend-specific methods for creating connections and cursors #####
- def get_connection_params(self):
- """Returns a dict of parameters suitable for get_new_connection."""
- raise NotImplementedError
- def get_new_connection(self, conn_params):
- """Opens a connection to the database."""
- raise NotImplementedError
- def init_connection_state(self):
- """Initializes the database connection settings."""
- raise NotImplementedError
- def create_cursor(self):
- """Creates a cursor. Assumes that a connection is established."""
- raise NotImplementedError
- ##### Backend-specific methods for creating connections #####
- def connect(self):
- """Connects to the database. Assumes that the connection is closed."""
- # In case the previous connection was closed while in an atomic block
- self.in_atomic_block = False
- self.savepoint_ids = []
- # Reset parameters defining when to close the connection
- max_age = self.settings_dict['CONN_MAX_AGE']
- self.close_at = None if max_age is None else time.time() + max_age
- self.errors_occurred = False
- # Establish the connection
- conn_params = self.get_connection_params()
- self.connection = self.get_new_connection(conn_params)
- self.init_connection_state()
- if self.settings_dict['AUTOCOMMIT']:
- self.set_autocommit(True)
- connection_created.send(sender=self.__class__, connection=self)
- def ensure_connection(self):
- """
- Guarantees that a connection to the database is established.
- """
- if self.connection is None:
- with self.wrap_database_errors():
- self.connect()
- ##### Backend-specific wrappers for PEP-249 connection methods #####
- def _cursor(self):
- self.ensure_connection()
- with self.wrap_database_errors():
- return self.create_cursor()
- def _commit(self):
- if self.connection is not None:
- with self.wrap_database_errors():
- return self.connection.commit()
- def _rollback(self):
- if self.connection is not None:
- with self.wrap_database_errors():
- return self.connection.rollback()
- def _close(self):
- if self.connection is not None:
- with self.wrap_database_errors():
- return self.connection.close()
- ##### Generic wrappers for PEP-249 connection methods #####
- def cursor(self):
- """
- Creates a cursor, opening a connection if necessary.
- """
- self.validate_thread_sharing()
- if (self.use_debug_cursor or
- (self.use_debug_cursor is None and settings.DEBUG)):
- cursor = self.make_debug_cursor(self._cursor())
- else:
- cursor = util.CursorWrapper(self._cursor(), self)
- return cursor
- def commit(self):
- """
- Commits a transaction and resets the dirty flag.
- """
- self.validate_thread_sharing()
- self.validate_no_atomic_block()
- self._commit()
- self.set_clean()
- def rollback(self):
- """
- Rolls back a transaction and resets the dirty flag.
- """
- self.validate_thread_sharing()
- self.validate_no_atomic_block()
- self._rollback()
- self.set_clean()
- def close(self):
- """
- Closes the connection to the database.
- """
- self.validate_thread_sharing()
- # Don't call validate_no_atomic_block() to avoid making it difficult
- # to get rid of a connection in an invalid state. The next connect()
- # will reset the transaction state anyway.
- try:
- self._close()
- finally:
- self.connection = None
- self.set_clean()
- ##### Backend-specific savepoint management methods #####
- def _savepoint(self, sid):
- self.cursor().execute(self.ops.savepoint_create_sql(sid))
- def _savepoint_rollback(self, sid):
- self.cursor().execute(self.ops.savepoint_rollback_sql(sid))
- def _savepoint_commit(self, sid):
- self.cursor().execute(self.ops.savepoint_commit_sql(sid))
- def _savepoint_allowed(self):
- # Savepoints cannot be created outside a transaction
- return self.features.uses_savepoints and not self.get_autocommit()
- ##### Generic savepoint management methods #####
- def savepoint(self):
- """
- Creates a savepoint inside the current transaction. Returns an
- identifier for the savepoint that will be used for the subsequent
- rollback or commit. Does nothing if savepoints are not supported.
- """
- if not self._savepoint_allowed():
- return
- thread_ident = thread.get_ident()
- tid = str(thread_ident).replace('-', '')
- self.savepoint_state += 1
- sid = "s%s_x%d" % (tid, self.savepoint_state)
- self.validate_thread_sharing()
- self._savepoint(sid)
- return sid
- def savepoint_rollback(self, sid):
- """
- Rolls back to a savepoint. Does nothing if savepoints are not supported.
- """
- if not self._savepoint_allowed():
- return
- self.validate_thread_sharing()
- self._savepoint_rollback(sid)
- def savepoint_commit(self, sid):
- """
- Releases a savepoint. Does nothing if savepoints are not supported.
- """
- if not self._savepoint_allowed():
- return
- self.validate_thread_sharing()
- self._savepoint_commit(sid)
- def clean_savepoints(self):
- """
- Resets the counter used to generate unique savepoint ids in this thread.
- """
- self.savepoint_state = 0
- ##### Backend-specific transaction management methods #####
- def _set_autocommit(self, autocommit):
- """
- Backend-specific implementation to enable or disable autocommit.
- """
- raise NotImplementedError
- ##### Generic transaction management methods #####
- def enter_transaction_management(self, managed=True, forced=False):
- """
- Enters transaction management for a running thread. It must be balanced with
- the appropriate leave_transaction_management call, since the actual state is
- managed as a stack.
- The state and dirty flag are carried over from the surrounding block or
- from the settings, if there is no surrounding block (dirty is always false
- when no current block is running).
- If you switch off transaction management and there is a pending
- commit/rollback, the data will be commited, unless "forced" is True.
- """
- self.validate_no_atomic_block()
- self.transaction_state.append(managed)
- if not managed and self.is_dirty() and not forced:
- self.commit()
- self.set_clean()
- if managed == self.get_autocommit():
- self.set_autocommit(not managed)
- def leave_transaction_management(self):
- """
- Leaves transaction management for a running thread. A dirty flag is carried
- over to the surrounding block, as a commit will commit all changes, even
- those from outside. (Commits are on connection level.)
- """
- self.validate_no_atomic_block()
- if self.transaction_state:
- del self.transaction_state[-1]
- else:
- raise TransactionManagementError(
- "This code isn't under transaction management")
- if self.transaction_state:
- managed = self.transaction_state[-1]
- else:
- managed = not self.settings_dict['AUTOCOMMIT']
- if self._dirty:
- self.rollback()
- if managed == self.get_autocommit():
- self.set_autocommit(not managed)
- raise TransactionManagementError(
- "Transaction managed block ended with pending COMMIT/ROLLBACK")
- if managed == self.get_autocommit():
- self.set_autocommit(not managed)
- def get_autocommit(self):
- """
- Check the autocommit state.
- """
- self.ensure_connection()
- return self.autocommit
- def set_autocommit(self, autocommit):
- """
- Enable or disable autocommit.
- """
- self.validate_no_atomic_block()
- self.ensure_connection()
- self._set_autocommit(autocommit)
- self.autocommit = autocommit
- def get_rollback(self):
- """
- Get the "needs rollback" flag -- for *advanced use* only.
- """
- if not self.in_atomic_block:
- raise TransactionManagementError(
- "The rollback flag doesn't work outside of an 'atomic' block.")
- return self.needs_rollback
- def set_rollback(self, rollback):
- """
- Set or unset the "needs rollback" flag -- for *advanced use* only.
- """
- if not self.in_atomic_block:
- raise TransactionManagementError(
- "The rollback flag doesn't work outside of an 'atomic' block.")
- self.needs_rollback = rollback
- def validate_no_atomic_block(self):
- """
- Raise an error if an atomic block is active.
- """
- if self.in_atomic_block:
- raise TransactionManagementError(
- "This is forbidden when an 'atomic' block is active.")
- def abort(self):
- """
- Roll back any ongoing transaction and clean the transaction state
- stack.
- """
- if self._dirty:
- self.rollback()
- while self.transaction_state:
- self.leave_transaction_management()
- def is_dirty(self):
- """
- Returns True if the current transaction requires a commit for changes to
- happen.
- """
- return self._dirty
- def set_dirty(self):
- """
- Sets a dirty flag for the current thread and code streak. This can be used
- to decide in a managed block of code to decide whether there are open
- changes waiting for commit.
- """
- if not self.get_autocommit():
- self._dirty = True
- def set_clean(self):
- """
- Resets a dirty flag for the current thread and code streak. This can be used
- to decide in a managed block of code to decide whether a commit or rollback
- should happen.
- """
- self._dirty = False
- self.clean_savepoints()
- ##### Foreign key constraints checks handling #####
- @contextmanager
- def constraint_checks_disabled(self):
- """
- Context manager that disables foreign key constraint checking.
- """
- disabled = self.disable_constraint_checking()
- try:
- yield
- finally:
- if disabled:
- self.enable_constraint_checking()
- def disable_constraint_checking(self):
- """
- Backends can implement as needed to temporarily disable foreign key
- constraint checking. Should return True if the constraints were
- disabled and will need to be reenabled.
- """
- return False
- def enable_constraint_checking(self):
- """
- Backends can implement as needed to re-enable foreign key constraint
- checking.
- """
- pass
- def check_constraints(self, table_names=None):
- """
- Backends can override this method if they can apply constraint
- checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE"). Should raise an
- IntegrityError if any invalid foreign key references are encountered.
- """
- pass
- ##### Connection termination handling #####
- def is_usable(self):
- """
- Tests if the database connection is usable.
- This function may assume that self.connection is not None.
- """
- raise NotImplementedError
- def close_if_unusable_or_obsolete(self):
- """
- Closes the current connection if unrecoverable errors have occurred,
- or if it outlived its maximum age.
- """
- if self.connection is not None:
- # If the application didn't restore the original autocommit setting,
- # don't take chances, drop the connection.
- if self.get_autocommit() != self.settings_dict['AUTOCOMMIT']:
- self.close()
- return
- if self.errors_occurred:
- if self.is_usable():
- self.errors_occurred = False
- else:
- self.close()
- return
- if self.close_at is not None and time.time() >= self.close_at:
- self.close()
- return
- ##### Thread safety handling #####
- def validate_thread_sharing(self):
- """
- Validates that the connection isn't accessed by another thread than the
- one which originally created it, unless the connection was explicitly
- authorized to be shared between threads (via the `allow_thread_sharing`
- property). Raises an exception if the validation fails.
- """
- if not (self.allow_thread_sharing
- or self._thread_ident == thread.get_ident()):
- raise DatabaseError("DatabaseWrapper objects created in a "
- "thread can only be used in that same thread. The object "
- "with alias '%s' was created in thread id %s and this is "
- "thread id %s."
- % (self.alias, self._thread_ident, thread.get_ident()))
- ##### Miscellaneous #####
- def wrap_database_errors(self):
- """
- Context manager and decorator that re-throws backend-specific database
- exceptions using Django's common wrappers.
- """
- return DatabaseErrorWrapper(self)
- def make_debug_cursor(self, cursor):
- """
- Creates a cursor that logs all queries in self.queries.
- """
- return util.CursorDebugWrapper(cursor, self)
- @contextmanager
- def temporary_connection(self):
- """
- Context manager that ensures that a connection is established, and
- if it opened one, closes it to avoid leaving a dangling connection.
- This is useful for operations outside of the request-response cycle.
- Provides a cursor: with self.temporary_connection() as cursor: ...
- """
- must_close = self.connection is None
- cursor = self.cursor()
- try:
- yield cursor
- finally:
- cursor.close()
- if must_close:
- self.close()
- def _start_transaction_under_autocommit(self):
- """
- Only required when autocommits_when_autocommit_is_off = True.
- """
- raise NotImplementedError
- def schema_editor(self):
- "Returns a new instance of this backend's SchemaEditor"
- raise NotImplementedError()
- class BaseDatabaseFeatures(object):
- allows_group_by_pk = False
- # True if django.db.backend.utils.typecast_timestamp is used on values
- # returned from dates() calls.
- needs_datetime_string_cast = True
- empty_fetchmany_value = []
- update_can_self_select = True
- # Does the backend distinguish between '' and None?
- interprets_empty_strings_as_nulls = False
- # Does the backend allow inserting duplicate rows when a unique_together
- # constraint exists, but one of the unique_together columns is NULL?
- ignores_nulls_in_unique_constraints = True
- can_use_chunked_reads = True
- can_return_id_from_insert = False
- has_bulk_insert = False
- uses_savepoints = False
- can_combine_inserts_with_and_without_auto_increment_pk = False
- # If True, don't use integer foreign keys referring to, e.g., positive
- # integer primary keys.
- related_fields_match_type = False
- allow_sliced_subqueries = True
- has_select_for_update = False
- has_select_for_update_nowait = False
- supports_select_related = True
- # Does the default test database allow multiple connections?
- # Usually an indication that the test database is in-memory
- test_db_allows_multiple_connections = True
- # Can an object be saved without an explicit primary key?
- supports_unspecified_pk = False
- # Can a fixture contain forward references? i.e., are
- # FK constraints checked at the end of transaction, or
- # at the end of each save operation?
- supports_forward_references = True
- # Does a dirty transaction need to be rolled back
- # before the cursor can be used again?
- requires_rollback_on_dirty_transaction = False
- # Does the backend allow very long model names without error?
- supports_long_model_names = True
- # Is there a REAL datatype in addition to floats/doubles?
- has_real_datatype = False
- supports_subqueries_in_group_by = True
- supports_bitwise_or = True
- # Do time/datetime fields have microsecond precision?
- supports_microsecond_precision = True
- # Does the __regex lookup support backreferencing and grouping?
- supports_regex_backreferencing = True
- # Can date/datetime lookups be performed using a string?
- supports_date_lookup_using_string = True
- # Can datetimes with timezones be used?
- supports_timezones = True
- # Does the database have a copy of the zoneinfo database?
- has_zoneinfo_database = True
- # When performing a GROUP BY, is an ORDER BY NULL required
- # to remove any ordering?
- requires_explicit_null_ordering_when_grouping = False
- # Is there a 1000 item limit on query parameters?
- supports_1000_query_parameters = True
- # Can an object have a primary key of 0? MySQL says No.
- allows_primary_key_0 = True
- # Do we need to NULL a ForeignKey out, or can the constraint check be
- # deferred
- can_defer_constraint_checks = False
- # date_interval_sql can properly handle mixed Date/DateTime fields and timedeltas
- supports_mixed_date_datetime_comparisons = True
- # Does the backend support tablespaces? Default to False because it isn't
- # in the SQL standard.
- supports_tablespaces = False
- # Does the backend reset sequences between tests?
- supports_sequence_reset = True
- # Confirm support for introspected foreign keys
- # Every database can do this reliably, except MySQL,
- # which can't do it for MyISAM tables
- can_introspect_foreign_keys = True
- # Support for the DISTINCT ON clause
- can_distinct_on_fields = False
- # Does the backend decide to commit before SAVEPOINT statements
- # when autocommit is disabled? http://bugs.python.org/issue8145#msg109965
- autocommits_when_autocommit_is_off = False
- # Can we roll back DDL in a transaction?
- can_rollback_ddl = False
- # Can we issue more than one ALTER COLUMN clause in an ALTER TABLE?
- supports_combined_alters = False
- # What's the maximum length for index names?
- max_index_name_length = 63
- # Does it support foreign keys?
- supports_foreign_keys = True
- # Does it support CHECK constraints?
- supports_check_constraints = True
- # Does the backend support 'pyformat' style ("... %(name)s ...", {'name': value})
- # parameter passing? Note this can be provided by the backend even if not
- # supported by the Python driver
- supports_paramstyle_pyformat = True
- def __init__(self, connection):
- self.connection = connection
- @cached_property
- def supports_transactions(self):
- "Confirm support for transactions"
- try:
- # Make sure to run inside a managed transaction block,
- # otherwise autocommit will cause the confimation to
- # fail.
- self.connection.enter_transaction_management()
- cursor = self.connection.cursor()
- cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)')
- self.connection.commit()
- cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)')
- self.connection.rollback()
- cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST')
- count, = cursor.fetchone()
- cursor.execute('DROP TABLE ROLLBACK_TEST')
- self.connection.commit()
- finally:
- self.connection.leave_transaction_management()
- return count == 0
- @cached_property
- def supports_stddev(self):
- "Confirm support for STDDEV and related stats functions"
- class StdDevPop(object):
- sql_function = 'STDDEV_POP'
- try:
- self.connection.ops.check_aggregate_support(StdDevPop())
- return True
- except NotImplementedError:
- return False
- class BaseDatabaseOperations(object):
- """
- This class encapsulates all backend-specific differences, such as the way
- a backend performs ordering or calculates the ID of a recently-inserted
- row.
- """
- compiler_module = "django.db.models.sql.compiler"
- def __init__(self, connection):
- self.connection = connection
- self._cache = None
- def autoinc_sql(self, table, column):
- """
- Returns any SQL needed to support auto-incrementing primary keys, or
- None if no SQL is necessary.
- This SQL is executed when a table is created.
- """
- return None
- def bulk_batch_size(self, fields, objs):
- """
- Returns the maximum allowed batch size for the backend. The fields
- are the fields going to be inserted in the batch, the objs contains
- all the objects to be inserted.
- """
- return len(objs)
- def cache_key_culling_sql(self):
- """
- Returns a SQL query that retrieves the first cache key greater than the
- n smallest.
- This is used by the 'db' cache backend to determine where to start
- culling.
- """
- return "SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s"
- def date_extract_sql(self, lookup_type, field_name):
- """
- Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
- extracts a value from the given date field field_name.
- """
- raise NotImplementedError()
- def date_interval_sql(self, sql, connector, timedelta):
- """
- Implements the date interval functionality for expressions
- """
- raise NotImplementedError()
- def date_trunc_sql(self, lookup_type, field_name):
- """
- Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
- truncates the given date field field_name to a date object with only
- the given specificity.
- """
- raise NotImplementedError()
- def datetime_cast_sql(self):
- """
- Returns the SQL necessary to cast a datetime value so that it will be
- retrieved as a Python datetime object instead of a string.
- This SQL should include a '%s' in place of the field's name.
- """
- return "%s"
- def datetime_extract_sql(self, lookup_type, field_name, tzname):
- """
- Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute' or
- 'second', returns the SQL that extracts a value from the given
- datetime field field_name, and a tuple of parameters.
- """
- raise NotImplementedError()
- def datetime_trunc_sql(self, lookup_type, field_name, tzname):
- """
- Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute' or
- 'second', returns the SQL that truncates the given datetime field
- field_name to a datetime object with only the given specificity, and
- a tuple of parameters.
- """
- raise NotImplementedError()
- def deferrable_sql(self):
- """
- Returns the SQL necessary to make a constraint "initially deferred"
- during a CREATE TABLE statement.
- """
- return ''
- def distinct_sql(self, fields):
- """
- Returns an SQL DISTINCT clause which removes duplicate rows from the
- result set. If any fields are given, only the given fields are being
- checked for duplicates.
- """
- if fields:
- raise NotImplementedError('DISTINCT ON fields is not supported by this database backend')
- else:
- return 'DISTINCT'
- def drop_foreignkey_sql(self):
- """
- Returns the SQL command that drops a foreign key.
- """
- return "DROP CONSTRAINT"
- def drop_sequence_sql(self, table):
- """
- Returns any SQL necessary to drop the sequence for the given table.
- Returns None if no SQL is necessary.
- """
- return None
- def fetch_returned_insert_id(self, cursor):
- """
- Given a cursor object that has just performed an INSERT...RETURNING
- statement into a table that has an auto-incrementing ID, returns the
- newly created ID.
- """
- return cursor.fetchone()[0]
- def field_cast_sql(self, db_type, internal_type):
- """
- Given a column type (e.g. 'BLOB', 'VARCHAR'), and an internal type
- (e.g. 'GenericIPAddressField'), returns the SQL necessary to cast it
- before using it in a WHERE statement. Note that the resulting string
- should contain a '%s' placeholder for the column being searched against.
- """
- return '%s'
- def force_no_ordering(self):
- """
- Returns a list used in the "ORDER BY" clause to force no ordering at
- all. Returning an empty list means that nothing will be included in the
- ordering.
- """
- return []
- def for_update_sql(self, nowait=False):
- """
- Returns the FOR UPDATE SQL clause to lock rows for an update operation.
- """
- if nowait:
- return 'FOR UPDATE NOWAIT'
- else:
- return 'FOR UPDATE'
- def fulltext_search_sql(self, field_name):
- """
- Returns the SQL WHERE clause to use in order to perform a full-text
- search of the given field_name. Note that the resulting string should
- contain a '%s' placeholder for the value being searched against.
- """
- raise NotImplementedError('Full-text search is not implemented for this database backend')
- def last_executed_query(self, cursor, sql, params):
- """
- Returns a string of the query last executed by the given cursor, with
- placeholders replaced with actual values.
- `sql` is the raw query containing placeholders, and `params` is the
- sequence of parameters. These are used by default, but this method
- exists for database backends to provide a better implementation
- according to their own quoting schemes.
- """
- from django.utils.encoding import force_text
- # Convert params to contain Unicode values.
- to_unicode = lambda s: force_text(s, strings_only=True, errors='replace')
- if isinstance(params, (list, tuple)):
- u_params = tuple(to_unicode(val) for val in params)
- elif params is None:
- u_params = ()
- else:
- u_params = dict((to_unicode(k), to_unicode(v)) for k, v in params.items())
- return six.text_type("QUERY = %r - PARAMS = %r") % (sql, u_params)
- def last_insert_id(self, cursor, table_name, pk_name):
- """
- Given a cursor object that has just performed an INSERT statement into
- a table that has an auto-incrementing ID, returns the newly created ID.
- This method also receives the table name and the name of the primary-key
- column.
- """
- return cursor.lastrowid
- def lookup_cast(self, lookup_type):
- """
- Returns the string to use in a query when performing lookups
- ("contains", "like", etc). The resulting string should contain a '%s'
- placeholder for the column being searched against.
- """
- return "%s"
- def max_in_list_size(self):
- """
- Returns the maximum number of items that can be passed in a single 'IN'
- list condition, or None if the backend does not impose a limit.
- """
- return None
- def max_name_length(self):
- """
- Returns the maximum length of table and column names, or None if there
- is no limit.
- """
- return None
- def no_limit_value(self):
- """
- Returns the value to use for the LIMIT when we are wanting "LIMIT
- infinity". Returns None if the limit clause can be omitted in this case.
- """
- raise NotImplementedError
- def pk_default_value(self):
- """
- Returns the value to use during an INSERT statement to specify that
- the field should use its default value.
- """
- return 'DEFAULT'
- def process_clob(self, value):
- """
- Returns the value of a CLOB column, for backends that return a locator
- object that requires additional processing.
- """
- return value
- def return_insert_id(self):
- """
- For backends that support returning the last insert ID as part
- of an insert query, this method returns the SQL and params to
- append to the INSERT query. The returned fragment should
- contain a format string to hold the appropriate column.
- """
- pass
- def compiler(self, compiler_name):
- """
- Returns the SQLCompiler class corresponding to the given name,
- in the namespace corresponding to the `compiler_module` attribute
- on this backend.
- """
- if self._cache is None:
- self._cache = import_module(self.compiler_module)
- return getattr(self._cache, compiler_name)
- def quote_name(self, name):
- """
- Returns a quoted version of the given table, index or column name. Does
- not quote the given name if it's already been quoted.
- """
- raise NotImplementedError()
- def random_function_sql(self):
- """
- Returns a SQL expression that returns a random value.
- """
- return 'RANDOM()'
- def regex_lookup(self, lookup_type):
- """
- Returns the string to use in a query when performing regular expression
- lookups (using "regex" or "iregex"). The resulting string should
- contain a '%s' placeholder for the column being searched against.
- If the feature is not supported (or part of it is not supported), a
- NotImplementedError exception can be raised.
- """
- raise NotImplementedError
- def savepoint_create_sql(self, sid):
- """
- Returns the SQL for starting a new savepoint. Only required if the
- "uses_savepoints" feature is True. The "sid" parameter is a string
- for the savepoint id.
- """
- return "SAVEPOINT %s" % self.quote_name(sid)
- def savepoint_commit_sql(self, sid):
- """
- Returns the SQL for committing the given savepoint.
- """
- return "RELEASE SAVEPOINT %s" % self.quote_name(sid)
- def savepoint_rollback_sql(self, sid):
- """
- Returns the SQL for rolling back the given savepoint.
- """
- return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid)
- def set_time_zone_sql(self):
- """
- Returns the SQL that will set the connection's time zone.
- Returns '' if the backend doesn't support time zones.
- """
- return ''
- def sql_flush(self, style, tables, sequences, allow_cascade=False):
- """
- Returns a list of SQL statements required to remove all data from
- the given database tables (without actually removing the tables
- themselves).
- The returned value also includes SQL statements required to reset DB
- sequences passed in :param sequences:.
- The `style` argument is a Style object as returned by either
- color_style() or no_style() in django.core.management.color.
- The `allow_cascade` argument determines whether truncation may cascade
- to tables with foreign keys pointing the tables being truncated.
- PostgreSQL requires a cascade even if these tables are empty.
- """
- raise NotImplementedError()
- def sequence_reset_by_name_sql(self, style, sequences):
- """
- Returns a list of the SQL statements required to reset sequences
- passed in :param sequences:.
- The `style` argument is a Style object as returned by either
- color_style() or no_style() in django.core.management.color.
- """
- return []
- def sequence_reset_sql(self, style, model_list):
- """
- Returns a list of the SQL statements required to reset sequences for
- the given models.
- The `style` argument is a Style object as returned by either
- color_style() or no_style() in django.core.management.color.
- """
- return [] # No sequence reset required by default.
- def start_transaction_sql(self):
- """
- Returns the SQL statement required to start a transaction.
- """
- return "BEGIN;"
- def end_transaction_sql(self, success=True):
- """
- Returns the SQL statement required to end a transaction.
- """
- if not success:
- return "ROLLBACK;"
- return "COMMIT;"
- def tablespace_sql(self, tablespace, inline=False):
- """
- Returns the SQL that will be used in a query to define the tablespace.
- Returns '' if the backend doesn't support tablespaces.
- If inline is True, the SQL is appended to a row; otherwise it's appended
- to the entire CREATE TABLE or CREATE INDEX statement.
- """
- return ''
- def prep_for_like_query(self, x):
- """Prepares a value for use in a LIKE query."""
- from django.utils.encoding import force_text
- return force_text(x).replace("\\", "\\\\").replace("%", "\%").replace("_", "\_")
- # Same as prep_for_like_query(), but called for "iexact" matches, which
- # need not necessarily be implemented using "LIKE" in the backend.
- prep_for_iexact_query = prep_for_like_query
- def validate_autopk_value(self, value):
- """
- Certain backends do not accept some values for "serial" fields
- (for example zero in MySQL). This method will raise a ValueError
- if the value is invalid, otherwise returns validated value.
- """
- return value
- def value_to_db_date(self, value):
- """
- Transform a date value to an object compatible with what is expected
- by the backend driver for date columns.
- """
- if value is None:
- return None
- return six.text_type(value)
- def value_to_db_datetime(self, value):
- """
- Transform a datetime value to an object compatible with what is expected
- by the backend driver for datetime columns.
- """
- if value is None:
- return None
- return six.text_type(value)
- def value_to_db_time(self, value):
- """
- Transform a time value to an object compatible with what is expected
- by the backend driver for time columns.
- """
- if value is None:
- return None
- if timezone.is_aware(value):
- raise ValueError("Django does not support timezone-aware times.")
- return six.text_type(value)
- def value_to_db_decimal(self, value, max_digits, decimal_places):
- """
- Transform a decimal.Decimal value to an object compatible with what is
- expected by the backend driver for decimal (numeric) columns.
- """
- if value is None:
- return None
- return util.format_number(value, max_digits, decimal_places)
- def year_lookup_bounds_for_date_field(self, value):
- """
- Returns a two-elements list with the lower and upper bound to be used
- with a BETWEEN operator to query a DateField value using a year
- lookup.
- `value` is an int, containing the looked-up year.
- """
- first = datetime.date(value, 1, 1)
- second = datetime.date(value, 12, 31)
- return [first, second]
- def year_lookup_bounds_for_datetime_field(self, value):
- """
- Returns a two-elements list with the lower and upper bound to be used
- with a BETWEEN operator to query a DateTimeField value using a year
- lookup.
- `value` is an int, containing the looked-up year.
- """
- first = datetime.datetime(value, 1, 1)
- second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
- if settings.USE_TZ:
- tz = timezone.get_current_timezone()
- first = timezone.make_aware(first, tz)
- second = timezone.make_aware(second, tz)
- return [first, second]
- def convert_values(self, value, field):
- """
- Coerce the value returned by the database backend into a consistent type
- that is compatible with the field type.
- """
- if value is None:
- return value
- internal_type = field.get_internal_type()
- if internal_type == 'FloatField':
- return float(value)
- elif (internal_type and (internal_type.endswith('IntegerField')
- or internal_type == 'AutoField')):
- return int(value)
- return value
- def check_aggregate_support(self, aggregate_func):
- """Check that the backend supports the provided aggregate
- This is used on specific backends to rule out known aggregates
- that are known to have faulty implementations. If the named
- aggregate function has a known problem, the backend should
- raise NotImplementedError.
- """
- pass
- def combine_expression(self, connector, sub_expressions):
- """Combine a list of subexpressions into a single expression, using
- the provided connecting operator. This is required because operators
- can vary between backends (e.g., Oracle with %% and &) and between
- subexpression types (e.g., date expressions)
- """
- conn = ' %s ' % connector
- return conn.join(sub_expressions)
- def modify_insert_params(self, placeholders, params):
- """Allow modification of insert parameters. Needed for Oracle Spatial
- backend due to #10888.
- """
- return params
- # Structure returned by the DB-API cursor.description interface (PEP 249)
- FieldInfo = namedtuple('FieldInfo',
- 'name type_code display_size internal_size precision scale null_ok'
- )
- class BaseDatabaseIntrospection(object):
- """
- This class encapsulates all backend-specific introspection utilities
- """
- data_types_reverse = {}
- def __init__(self, connection):
- self.connection = connection
- def get_field_type(self, data_type, description):
- """Hook for a database backend to use the cursor description to
- match a Django field type to a database column.
- For Oracle, the column data_type on its own is insufficient to
- distinguish between a FloatField and IntegerField, for example."""
- return self.data_types_reverse[data_type]
- def table_name_converter(self, name):
- """Apply a conversion to the name for the purposes of comparison.
- The default table name converter is for case sensitive comparison.
- """
- return name
- def table_names(self, cursor=None):
- """
- Returns a list of names of all tables that exist in the database.
- The returned table list is sorted by Python's default sorting. We
- do NOT use database's ORDER BY here to avoid subtle differences
- in sorting order between databases.
- """
- if cursor is None:
- cursor = self.connection.cursor()
- return sorted(self.get_table_list(cursor))
- def get_table_list(self, cursor):
- """
- Returns an unsorted list of names of all tables that exist in the
- database.
- """
- raise NotImplementedError
- def django_table_names(self, only_existing=False):
- """
- Returns a list of all table names that have associated Django models and
- are in INSTALLED_APPS.
- If only_existing is True, the resulting list will only include the tables
- that actually exist in the database.
- """
- from django.db import models, router
- tables = set()
- for app in models.get_apps():
- for model in models.get_models(app):
- if not model._meta.managed:
- continue
- if not router.allow_migrate(self.connection.alias, model):
- continue
- tables.add(model._meta.db_table)
- tables.update([f.m2m_db_table() for f in model._meta.local_many_to_many])
- tables = list(tables)
- if only_existing:
- existing_tables = self.table_names()
- tables = [
- t
- for t in tables
- if self.table_name_converter(t) in existing_tables
- ]
- return tables
- def installed_models(self, tables):
- "Returns a set of all models represented by the provided list of table names."
- from django.db import models, router
- all_models = []
- for app in models.get_apps():
- for model in models.get_models(app):
- if router.allow_migrate(self.connection.alias, model):
- all_models.append(model)
- tables = list(map(self.table_name_converter, tables))
- return set([
- m for m in all_models
- if self.table_name_converter(m._meta.db_table) in tables
- ])
- def sequence_list(self):
- "Returns a list of information about all DB sequences for all models in all apps."
- from django.db import models, router
- apps = models.get_apps()
- sequence_list = []
- for app in apps:
- for model in models.get_models(app):
- if not model._meta.managed:
- continue
- if model._meta.swapped:
- continue
- if not router.allow_migrate(self.connection.alias, model):
- continue
- for f in model._meta.local_fields:
- if isinstance(f, models.AutoField):
- sequence_list.append({'table': model._meta.db_table, 'column': f.column})
- break # Only one AutoField is allowed per model, so don't bother continuing.
- for f in model._meta.local_many_to_many:
- # If this is an m2m using an intermediate table,
- # we don't need to reset the sequence.
- if f.rel.through is None:
- sequence_list.append({'table': f.m2m_db_table(), 'column': None})
- return sequence_list
- def get_key_columns(self, cursor, table_name):
- """
- Backends can override this to return a list of (column_name, referenced_table_name,
- referenced_column_name) for all key columns in given table.
- """
- raise NotImplementedError
- def get_primary_key_column(self, cursor, table_name):
- """
- Returns the name of the primary key column for the given table.
- """
- for column in six.iteritems(self.get_indexes(cursor, table_name)):
- if column[1]['primary_key']:
- return column[0]
- return None
- def get_indexes(self, cursor, table_name):
- """
- Returns a dictionary of indexed fieldname -> infodict for the given
- table, where each infodict is in the format:
- {'primary_key': boolean representing whether it's the primary key,
- 'unique': boolean representing whether it's a unique index}
- Only single-column indexes are introspected.
- """
- raise NotImplementedError
- def get_constraints(self, cursor, table_name):
- """
- Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool}}
-
- Both single- and multi-column constraints are introspected.
- """
- raise NotImplementedError
- class BaseDatabaseClient(object):
- """
- This class encapsulates all backend-specific methods for opening a
- client shell.
- """
- # This should be a string representing the name of the executable
- # (e.g., "psql"). Subclasses must override this.
- executable_name = None
- def __init__(self, connection):
- # connection is an instance of BaseDatabaseWrapper.
- self.connection = connection
- def runshell(self):
- raise NotImplementedError()
- class BaseDatabaseValidation(object):
- """
- This class encapsualtes all backend-specific model validation.
- """
- def __init__(self, connection):
- self.connection = connection
- def validate_field(self, errors, opts, f):
- "By default, there is no backend-specific validation"
- pass
|