123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125 |
- import datetime
- import itertools
- import unittest
- from copy import copy
- from unittest import mock
- from django.core.exceptions import FieldError
- from django.core.management.color import no_style
- from django.db import (
- DatabaseError, DataError, IntegrityError, OperationalError, connection,
- )
- from django.db.models import (
- CASCADE, PROTECT, AutoField, BigAutoField, BigIntegerField, BinaryField,
- BooleanField, CharField, CheckConstraint, DateField, DateTimeField,
- DecimalField, F, FloatField, ForeignKey, ForeignObject, Index,
- IntegerField, JSONField, ManyToManyField, Model, OneToOneField, OrderBy,
- PositiveIntegerField, Q, SlugField, SmallAutoField, SmallIntegerField,
- TextField, TimeField, UniqueConstraint, UUIDField, Value,
- )
- from django.db.models.fields.json import KeyTextTransform
- from django.db.models.functions import Abs, Cast, Collate, Lower, Random, Upper
- from django.db.models.indexes import IndexExpression
- from django.db.transaction import TransactionManagementError, atomic
- from django.test import (
- TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature,
- )
- from django.test.utils import (
- CaptureQueriesContext, isolate_apps, register_lookup,
- )
- from django.utils import timezone
- from .fields import (
- CustomManyToManyField, InheritedManyToManyField, MediumBlobField,
- )
- from .models import (
- Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex,
- AuthorWithDefaultHeight, AuthorWithEvenLongerName, AuthorWithIndexedName,
- AuthorWithIndexedNameAndBirthday, AuthorWithUniqueName,
- AuthorWithUniqueNameAndBirthday, Book, BookForeignObj, BookWeak,
- BookWithLongName, BookWithO2O, BookWithoutAuthor, BookWithSlug, IntegerPK,
- Node, Note, NoteRename, Tag, TagIndexed, TagM2MTest, TagUniqueRename,
- Thing, UniqueTest, new_apps,
- )
- class SchemaTests(TransactionTestCase):
- """
- Tests for the schema-alteration code.
- Be aware that these tests are more liable than most to false results,
- as sometimes the code to check if a test has worked is almost as complex
- as the code it is testing.
- """
- available_apps = []
- models = [
- Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex,
- AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book, BookWeak,
- BookWithLongName, BookWithO2O, BookWithSlug, IntegerPK, Node, Note,
- Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest,
- ]
- # Utility functions
- def setUp(self):
- # local_models should contain test dependent model classes that will be
- # automatically removed from the app cache on test tear down.
- self.local_models = []
- # isolated_local_models contains models that are in test methods
- # decorated with @isolate_apps.
- self.isolated_local_models = []
- def tearDown(self):
- # Delete any tables made for our models
- self.delete_tables()
- new_apps.clear_cache()
- for model in new_apps.get_models():
- model._meta._expire_cache()
- if 'schema' in new_apps.all_models:
- for model in self.local_models:
- for many_to_many in model._meta.many_to_many:
- through = many_to_many.remote_field.through
- if through and through._meta.auto_created:
- del new_apps.all_models['schema'][through._meta.model_name]
- del new_apps.all_models['schema'][model._meta.model_name]
- if self.isolated_local_models:
- with connection.schema_editor() as editor:
- for model in self.isolated_local_models:
- editor.delete_model(model)
- def delete_tables(self):
- "Deletes all model tables for our models for a clean test environment"
- converter = connection.introspection.identifier_converter
- with connection.schema_editor() as editor:
- connection.disable_constraint_checking()
- table_names = connection.introspection.table_names()
- if connection.features.ignores_table_name_case:
- table_names = [table_name.lower() for table_name in table_names]
- for model in itertools.chain(SchemaTests.models, self.local_models):
- tbl = converter(model._meta.db_table)
- if connection.features.ignores_table_name_case:
- tbl = tbl.lower()
- if tbl in table_names:
- editor.delete_model(model)
- table_names.remove(tbl)
- connection.enable_constraint_checking()
- def column_classes(self, model):
- with connection.cursor() as cursor:
- columns = {
- d[0]: (connection.introspection.get_field_type(d[1], d), d)
- for d in connection.introspection.get_table_description(
- cursor,
- model._meta.db_table,
- )
- }
- # SQLite has a different format for field_type
- for name, (type, desc) in columns.items():
- if isinstance(type, tuple):
- columns[name] = (type[0], desc)
- # SQLite also doesn't error properly
- if not columns:
- raise DatabaseError("Table does not exist (empty pragma)")
- return columns
- def get_primary_key(self, table):
- with connection.cursor() as cursor:
- return connection.introspection.get_primary_key_column(cursor, table)
- def get_indexes(self, table):
- """
- Get the indexes on the table using a new cursor.
- """
- with connection.cursor() as cursor:
- return [
- c['columns'][0]
- for c in connection.introspection.get_constraints(cursor, table).values()
- if c['index'] and len(c['columns']) == 1
- ]
- def get_uniques(self, table):
- with connection.cursor() as cursor:
- return [
- c['columns'][0]
- for c in connection.introspection.get_constraints(cursor, table).values()
- if c['unique'] and len(c['columns']) == 1
- ]
- def get_constraints(self, table):
- """
- Get the constraints on a table using a new cursor.
- """
- with connection.cursor() as cursor:
- return connection.introspection.get_constraints(cursor, table)
- def get_constraints_for_column(self, model, column_name):
- constraints = self.get_constraints(model._meta.db_table)
- constraints_for_column = []
- for name, details in constraints.items():
- if details['columns'] == [column_name]:
- constraints_for_column.append(name)
- return sorted(constraints_for_column)
- def check_added_field_default(self, schema_editor, model, field, field_name, expected_default,
- cast_function=None):
- with connection.cursor() as cursor:
- schema_editor.add_field(model, field)
- cursor.execute("SELECT {} FROM {};".format(field_name, model._meta.db_table))
- database_default = cursor.fetchall()[0][0]
- if cast_function and type(database_default) != type(expected_default):
- database_default = cast_function(database_default)
- self.assertEqual(database_default, expected_default)
- def get_constraints_count(self, table, column, fk_to):
- """
- Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the
- number of foreign keys, unique constraints, and indexes on
- `table`.`column`. The `fk_to` argument is a 2-tuple specifying the
- expected foreign key relationship's (table, column).
- """
- with connection.cursor() as cursor:
- constraints = connection.introspection.get_constraints(cursor, table)
- counts = {'fks': 0, 'uniques': 0, 'indexes': 0}
- for c in constraints.values():
- if c['columns'] == [column]:
- if c['foreign_key'] == fk_to:
- counts['fks'] += 1
- if c['unique']:
- counts['uniques'] += 1
- elif c['index']:
- counts['indexes'] += 1
- return counts
- def get_column_collation(self, table, column):
- with connection.cursor() as cursor:
- return next(
- f.collation
- for f in connection.introspection.get_table_description(cursor, table)
- if f.name == column
- )
- def assertIndexOrder(self, table, index, order):
- constraints = self.get_constraints(table)
- self.assertIn(index, constraints)
- index_orders = constraints[index]['orders']
- self.assertTrue(all(val == expected for val, expected in zip(index_orders, order)))
- def assertForeignKeyExists(self, model, column, expected_fk_table, field='id'):
- """
- Fail if the FK constraint on `model.Meta.db_table`.`column` to
- `expected_fk_table`.id doesn't exist.
- """
- constraints = self.get_constraints(model._meta.db_table)
- constraint_fk = None
- for details in constraints.values():
- if details['columns'] == [column] and details['foreign_key']:
- constraint_fk = details['foreign_key']
- break
- self.assertEqual(constraint_fk, (expected_fk_table, field))
- def assertForeignKeyNotExists(self, model, column, expected_fk_table):
- with self.assertRaises(AssertionError):
- self.assertForeignKeyExists(model, column, expected_fk_table)
- # Tests
- def test_creation_deletion(self):
- """
- Tries creating a model's table, and then deleting it.
- """
- with connection.schema_editor() as editor:
- # Create the table
- editor.create_model(Author)
- # The table is there
- list(Author.objects.all())
- # Clean up that table
- editor.delete_model(Author)
- # No deferred SQL should be left over.
- self.assertEqual(editor.deferred_sql, [])
- # The table is gone
- with self.assertRaises(DatabaseError):
- list(Author.objects.all())
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_fk(self):
- "Creating tables out of FK order, then repointing, works"
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Book)
- editor.create_model(Author)
- editor.create_model(Tag)
- # Initial tables are there
- list(Author.objects.all())
- list(Book.objects.all())
- # Make sure the FK constraint is present
- with self.assertRaises(IntegrityError):
- Book.objects.create(
- author_id=1,
- title="Much Ado About Foreign Keys",
- pub_date=datetime.datetime.now(),
- )
- # Repoint the FK constraint
- old_field = Book._meta.get_field("author")
- new_field = ForeignKey(Tag, CASCADE)
- new_field.set_attributes_from_name("author")
- with connection.schema_editor() as editor:
- editor.alter_field(Book, old_field, new_field, strict=True)
- self.assertForeignKeyExists(Book, 'author_id', 'schema_tag')
- @skipUnlessDBFeature('can_create_inline_fk')
- def test_inline_fk(self):
- # Create some tables.
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- editor.create_model(Note)
- self.assertForeignKeyNotExists(Note, 'book_id', 'schema_book')
- # Add a foreign key from one to the other.
- with connection.schema_editor() as editor:
- new_field = ForeignKey(Book, CASCADE)
- new_field.set_attributes_from_name('book')
- editor.add_field(Note, new_field)
- self.assertForeignKeyExists(Note, 'book_id', 'schema_book')
- # Creating a FK field with a constraint uses a single statement without
- # a deferred ALTER TABLE.
- self.assertFalse([
- sql for sql in (str(statement) for statement in editor.deferred_sql)
- if sql.startswith('ALTER TABLE') and 'ADD CONSTRAINT' in sql
- ])
- @skipUnlessDBFeature('can_create_inline_fk')
- def test_add_inline_fk_update_data(self):
- with connection.schema_editor() as editor:
- editor.create_model(Node)
- # Add an inline foreign key and update data in the same transaction.
- new_field = ForeignKey(Node, CASCADE, related_name='new_fk', null=True)
- new_field.set_attributes_from_name('new_parent_fk')
- parent = Node.objects.create()
- with connection.schema_editor() as editor:
- editor.add_field(Node, new_field)
- editor.execute('UPDATE schema_node SET new_parent_fk_id = %s;', [parent.pk])
- assertIndex = (
- self.assertIn
- if connection.features.indexes_foreign_keys
- else self.assertNotIn
- )
- assertIndex('new_parent_fk_id', self.get_indexes(Node._meta.db_table))
- @skipUnlessDBFeature(
- 'can_create_inline_fk',
- 'allows_multiple_constraints_on_same_fields',
- )
- @isolate_apps('schema')
- def test_add_inline_fk_index_update_data(self):
- class Node(Model):
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(Node)
- # Add an inline foreign key, update data, and an index in the same
- # transaction.
- new_field = ForeignKey(Node, CASCADE, related_name='new_fk', null=True)
- new_field.set_attributes_from_name('new_parent_fk')
- parent = Node.objects.create()
- with connection.schema_editor() as editor:
- editor.add_field(Node, new_field)
- Node._meta.add_field(new_field)
- editor.execute('UPDATE schema_node SET new_parent_fk_id = %s;', [parent.pk])
- editor.add_index(Node, Index(fields=['new_parent_fk'], name='new_parent_inline_fk_idx'))
- self.assertIn('new_parent_fk_id', self.get_indexes(Node._meta.db_table))
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_char_field_with_db_index_to_fk(self):
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(AuthorCharFieldWithIndex)
- # Change CharField to FK
- old_field = AuthorCharFieldWithIndex._meta.get_field('char_field')
- new_field = ForeignKey(Author, CASCADE, blank=True)
- new_field.set_attributes_from_name('char_field')
- with connection.schema_editor() as editor:
- editor.alter_field(AuthorCharFieldWithIndex, old_field, new_field, strict=True)
- self.assertForeignKeyExists(AuthorCharFieldWithIndex, 'char_field_id', 'schema_author')
- @skipUnlessDBFeature('supports_foreign_keys')
- @skipUnlessDBFeature('supports_index_on_text_field')
- def test_text_field_with_db_index_to_fk(self):
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(AuthorTextFieldWithIndex)
- # Change TextField to FK
- old_field = AuthorTextFieldWithIndex._meta.get_field('text_field')
- new_field = ForeignKey(Author, CASCADE, blank=True)
- new_field.set_attributes_from_name('text_field')
- with connection.schema_editor() as editor:
- editor.alter_field(AuthorTextFieldWithIndex, old_field, new_field, strict=True)
- self.assertForeignKeyExists(AuthorTextFieldWithIndex, 'text_field_id', 'schema_author')
- @isolate_apps('schema')
- def test_char_field_pk_to_auto_field(self):
- class Foo(Model):
- id = CharField(max_length=255, primary_key=True)
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(Foo)
- self.isolated_local_models = [Foo]
- old_field = Foo._meta.get_field('id')
- new_field = AutoField(primary_key=True)
- new_field.set_attributes_from_name('id')
- new_field.model = Foo
- with connection.schema_editor() as editor:
- editor.alter_field(Foo, old_field, new_field, strict=True)
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_fk_to_proxy(self):
- "Creating a FK to a proxy model creates database constraints."
- class AuthorProxy(Author):
- class Meta:
- app_label = 'schema'
- apps = new_apps
- proxy = True
- class AuthorRef(Model):
- author = ForeignKey(AuthorProxy, on_delete=CASCADE)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- self.local_models = [AuthorProxy, AuthorRef]
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(AuthorRef)
- self.assertForeignKeyExists(AuthorRef, 'author_id', 'schema_author')
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_fk_db_constraint(self):
- "The db_constraint parameter is respected"
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Tag)
- editor.create_model(Author)
- editor.create_model(BookWeak)
- # Initial tables are there
- list(Author.objects.all())
- list(Tag.objects.all())
- list(BookWeak.objects.all())
- self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author')
- # Make a db_constraint=False FK
- new_field = ForeignKey(Tag, CASCADE, db_constraint=False)
- new_field.set_attributes_from_name("tag")
- with connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag')
- # Alter to one with a constraint
- new_field2 = ForeignKey(Tag, CASCADE)
- new_field2.set_attributes_from_name("tag")
- with connection.schema_editor() as editor:
- editor.alter_field(Author, new_field, new_field2, strict=True)
- self.assertForeignKeyExists(Author, 'tag_id', 'schema_tag')
- # Alter to one without a constraint again
- new_field2 = ForeignKey(Tag, CASCADE)
- new_field2.set_attributes_from_name("tag")
- with connection.schema_editor() as editor:
- editor.alter_field(Author, new_field2, new_field, strict=True)
- self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag')
- @isolate_apps('schema')
- def test_no_db_constraint_added_during_primary_key_change(self):
- """
- When a primary key that's pointed to by a ForeignKey with
- db_constraint=False is altered, a foreign key constraint isn't added.
- """
- class Author(Model):
- class Meta:
- app_label = 'schema'
- class BookWeak(Model):
- author = ForeignKey(Author, CASCADE, db_constraint=False)
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(BookWeak)
- self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author')
- old_field = Author._meta.get_field('id')
- new_field = BigAutoField(primary_key=True)
- new_field.model = Author
- new_field.set_attributes_from_name('id')
- # @isolate_apps() and inner models are needed to have the model
- # relations populated, otherwise this doesn't act as a regression test.
- self.assertEqual(len(new_field.model._meta.related_objects), 1)
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author')
- def _test_m2m_db_constraint(self, M2MFieldClass):
- class LocalAuthorWithM2M(Model):
- name = CharField(max_length=255)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- self.local_models = [LocalAuthorWithM2M]
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Tag)
- editor.create_model(LocalAuthorWithM2M)
- # Initial tables are there
- list(LocalAuthorWithM2M.objects.all())
- list(Tag.objects.all())
- # Make a db_constraint=False FK
- new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False)
- new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
- # Add the field
- with connection.schema_editor() as editor:
- editor.add_field(LocalAuthorWithM2M, new_field)
- self.assertForeignKeyNotExists(new_field.remote_field.through, 'tag_id', 'schema_tag')
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_m2m_db_constraint(self):
- self._test_m2m_db_constraint(ManyToManyField)
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_m2m_db_constraint_custom(self):
- self._test_m2m_db_constraint(CustomManyToManyField)
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_m2m_db_constraint_inherited(self):
- self._test_m2m_db_constraint(InheritedManyToManyField)
- def test_add_field(self):
- """
- Tests adding fields to models
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Ensure there's no age field
- columns = self.column_classes(Author)
- self.assertNotIn("age", columns)
- # Add the new field
- new_field = IntegerField(null=True)
- new_field.set_attributes_from_name("age")
- with CaptureQueriesContext(connection) as ctx, connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- drop_default_sql = editor.sql_alter_column_no_default % {
- 'column': editor.quote_name(new_field.name),
- }
- self.assertFalse(any(drop_default_sql in query['sql'] for query in ctx.captured_queries))
- columns = self.column_classes(Author)
- self.assertEqual(columns['age'][0], connection.features.introspected_field_types['IntegerField'])
- self.assertTrue(columns['age'][1][6])
- def test_add_field_remove_field(self):
- """
- Adding a field and removing it removes all deferred sql referring to it.
- """
- with connection.schema_editor() as editor:
- # Create a table with a unique constraint on the slug field.
- editor.create_model(Tag)
- # Remove the slug column.
- editor.remove_field(Tag, Tag._meta.get_field('slug'))
- self.assertEqual(editor.deferred_sql, [])
- def test_add_field_temp_default(self):
- """
- Tests adding fields to models with a temporary default
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Ensure there's no age field
- columns = self.column_classes(Author)
- self.assertNotIn("age", columns)
- # Add some rows of data
- Author.objects.create(name="Andrew", height=30)
- Author.objects.create(name="Andrea")
- # Add a not-null field
- new_field = CharField(max_length=30, default="Godwin")
- new_field.set_attributes_from_name("surname")
- with connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- columns = self.column_classes(Author)
- self.assertEqual(columns['surname'][0], connection.features.introspected_field_types['CharField'])
- self.assertEqual(columns['surname'][1][6],
- connection.features.interprets_empty_strings_as_nulls)
- def test_add_field_temp_default_boolean(self):
- """
- Tests adding fields to models with a temporary default where
- the default is False. (#21783)
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Ensure there's no age field
- columns = self.column_classes(Author)
- self.assertNotIn("age", columns)
- # Add some rows of data
- Author.objects.create(name="Andrew", height=30)
- Author.objects.create(name="Andrea")
- # Add a not-null field
- new_field = BooleanField(default=False)
- new_field.set_attributes_from_name("awesome")
- with connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- columns = self.column_classes(Author)
- # BooleanField are stored as TINYINT(1) on MySQL.
- field_type = columns['awesome'][0]
- self.assertEqual(field_type, connection.features.introspected_field_types['BooleanField'])
- def test_add_field_default_transform(self):
- """
- Tests adding fields to models with a default that is not directly
- valid in the database (#22581)
- """
- class TestTransformField(IntegerField):
- # Weird field that saves the count of items in its value
- def get_default(self):
- return self.default
- def get_prep_value(self, value):
- if value is None:
- return 0
- return len(value)
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Add some rows of data
- Author.objects.create(name="Andrew", height=30)
- Author.objects.create(name="Andrea")
- # Add the field with a default it needs to cast (to string in this case)
- new_field = TestTransformField(default={1: 2})
- new_field.set_attributes_from_name("thing")
- with connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- # Ensure the field is there
- columns = self.column_classes(Author)
- field_type, field_info = columns['thing']
- self.assertEqual(field_type, connection.features.introspected_field_types['IntegerField'])
- # Make sure the values were transformed correctly
- self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2)
- def test_add_field_binary(self):
- """
- Tests binary fields get a sane default (#22851)
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Add the new field
- new_field = BinaryField(blank=True)
- new_field.set_attributes_from_name("bits")
- with connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- columns = self.column_classes(Author)
- # MySQL annoyingly uses the same backend, so it'll come back as one of
- # these two types.
- self.assertIn(columns['bits'][0], ("BinaryField", "TextField"))
- @unittest.skipUnless(connection.vendor == 'mysql', "MySQL specific")
- def test_add_binaryfield_mediumblob(self):
- """
- Test adding a custom-sized binary field on MySQL (#24846).
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Add the new field with default
- new_field = MediumBlobField(blank=True, default=b'123')
- new_field.set_attributes_from_name('bits')
- with connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- columns = self.column_classes(Author)
- # Introspection treats BLOBs as TextFields
- self.assertEqual(columns['bits'][0], "TextField")
- def test_alter(self):
- """
- Tests simple altering of fields
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Ensure the field is right to begin with
- columns = self.column_classes(Author)
- self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField'])
- self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls))
- # Alter the name field to a TextField
- old_field = Author._meta.get_field("name")
- new_field = TextField(null=True)
- new_field.set_attributes_from_name("name")
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- columns = self.column_classes(Author)
- self.assertEqual(columns['name'][0], "TextField")
- self.assertTrue(columns['name'][1][6])
- # Change nullability again
- new_field2 = TextField(null=False)
- new_field2.set_attributes_from_name("name")
- with connection.schema_editor() as editor:
- editor.alter_field(Author, new_field, new_field2, strict=True)
- columns = self.column_classes(Author)
- self.assertEqual(columns['name'][0], "TextField")
- self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls))
- def test_alter_auto_field_to_integer_field(self):
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Change AutoField to IntegerField
- old_field = Author._meta.get_field('id')
- new_field = IntegerField(primary_key=True)
- new_field.set_attributes_from_name('id')
- new_field.model = Author
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- # Now that ID is an IntegerField, the database raises an error if it
- # isn't provided.
- if not connection.features.supports_unspecified_pk:
- with self.assertRaises(DatabaseError):
- Author.objects.create()
- def test_alter_auto_field_to_char_field(self):
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Change AutoField to CharField
- old_field = Author._meta.get_field('id')
- new_field = CharField(primary_key=True, max_length=50)
- new_field.set_attributes_from_name('id')
- new_field.model = Author
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- @isolate_apps('schema')
- def test_alter_auto_field_quoted_db_column(self):
- class Foo(Model):
- id = AutoField(primary_key=True, db_column='"quoted_id"')
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(Foo)
- self.isolated_local_models = [Foo]
- old_field = Foo._meta.get_field('id')
- new_field = BigAutoField(primary_key=True)
- new_field.model = Foo
- new_field.db_column = '"quoted_id"'
- new_field.set_attributes_from_name('id')
- with connection.schema_editor() as editor:
- editor.alter_field(Foo, old_field, new_field, strict=True)
- Foo.objects.create()
- def test_alter_not_unique_field_to_primary_key(self):
- # Create the table.
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Change UUIDField to primary key.
- old_field = Author._meta.get_field('uuid')
- new_field = UUIDField(primary_key=True)
- new_field.set_attributes_from_name('uuid')
- new_field.model = Author
- with connection.schema_editor() as editor:
- editor.remove_field(Author, Author._meta.get_field('id'))
- editor.alter_field(Author, old_field, new_field, strict=True)
- # Redundant unique constraint is not added.
- count = self.get_constraints_count(
- Author._meta.db_table,
- Author._meta.get_field('uuid').column,
- None,
- )
- self.assertLessEqual(count['uniques'], 1)
- @isolate_apps('schema')
- def test_alter_primary_key_quoted_db_table(self):
- class Foo(Model):
- class Meta:
- app_label = 'schema'
- db_table = '"foo"'
- with connection.schema_editor() as editor:
- editor.create_model(Foo)
- self.isolated_local_models = [Foo]
- old_field = Foo._meta.get_field('id')
- new_field = BigAutoField(primary_key=True)
- new_field.model = Foo
- new_field.set_attributes_from_name('id')
- with connection.schema_editor() as editor:
- editor.alter_field(Foo, old_field, new_field, strict=True)
- Foo.objects.create()
- def test_alter_text_field(self):
- # Regression for "BLOB/TEXT column 'info' can't have a default value")
- # on MySQL.
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Note)
- old_field = Note._meta.get_field("info")
- new_field = TextField(blank=True)
- new_field.set_attributes_from_name("info")
- with connection.schema_editor() as editor:
- editor.alter_field(Note, old_field, new_field, strict=True)
- def test_alter_text_field_to_not_null_with_default_value(self):
- with connection.schema_editor() as editor:
- editor.create_model(Note)
- old_field = Note._meta.get_field('address')
- new_field = TextField(blank=True, default='', null=False)
- new_field.set_attributes_from_name('address')
- with connection.schema_editor() as editor:
- editor.alter_field(Note, old_field, new_field, strict=True)
- @skipUnlessDBFeature('can_defer_constraint_checks', 'can_rollback_ddl')
- def test_alter_fk_checks_deferred_constraints(self):
- """
- #25492 - Altering a foreign key's structure and data in the same
- transaction.
- """
- with connection.schema_editor() as editor:
- editor.create_model(Node)
- old_field = Node._meta.get_field('parent')
- new_field = ForeignKey(Node, CASCADE)
- new_field.set_attributes_from_name('parent')
- parent = Node.objects.create()
- with connection.schema_editor() as editor:
- # Update the parent FK to create a deferred constraint check.
- Node.objects.update(parent=parent)
- editor.alter_field(Node, old_field, new_field, strict=True)
- def test_alter_text_field_to_date_field(self):
- """
- #25002 - Test conversion of text field to date field.
- """
- with connection.schema_editor() as editor:
- editor.create_model(Note)
- Note.objects.create(info='1988-05-05')
- old_field = Note._meta.get_field('info')
- new_field = DateField(blank=True)
- new_field.set_attributes_from_name('info')
- with connection.schema_editor() as editor:
- editor.alter_field(Note, old_field, new_field, strict=True)
- # Make sure the field isn't nullable
- columns = self.column_classes(Note)
- self.assertFalse(columns['info'][1][6])
- def test_alter_text_field_to_datetime_field(self):
- """
- #25002 - Test conversion of text field to datetime field.
- """
- with connection.schema_editor() as editor:
- editor.create_model(Note)
- Note.objects.create(info='1988-05-05 3:16:17.4567')
- old_field = Note._meta.get_field('info')
- new_field = DateTimeField(blank=True)
- new_field.set_attributes_from_name('info')
- with connection.schema_editor() as editor:
- editor.alter_field(Note, old_field, new_field, strict=True)
- # Make sure the field isn't nullable
- columns = self.column_classes(Note)
- self.assertFalse(columns['info'][1][6])
- def test_alter_text_field_to_time_field(self):
- """
- #25002 - Test conversion of text field to time field.
- """
- with connection.schema_editor() as editor:
- editor.create_model(Note)
- Note.objects.create(info='3:16:17.4567')
- old_field = Note._meta.get_field('info')
- new_field = TimeField(blank=True)
- new_field.set_attributes_from_name('info')
- with connection.schema_editor() as editor:
- editor.alter_field(Note, old_field, new_field, strict=True)
- # Make sure the field isn't nullable
- columns = self.column_classes(Note)
- self.assertFalse(columns['info'][1][6])
- @skipIfDBFeature('interprets_empty_strings_as_nulls')
- def test_alter_textual_field_keep_null_status(self):
- """
- Changing a field type shouldn't affect the not null status.
- """
- with connection.schema_editor() as editor:
- editor.create_model(Note)
- with self.assertRaises(IntegrityError):
- Note.objects.create(info=None)
- old_field = Note._meta.get_field("info")
- new_field = CharField(max_length=50)
- new_field.set_attributes_from_name("info")
- with connection.schema_editor() as editor:
- editor.alter_field(Note, old_field, new_field, strict=True)
- with self.assertRaises(IntegrityError):
- Note.objects.create(info=None)
- @skipUnlessDBFeature('interprets_empty_strings_as_nulls')
- def test_alter_textual_field_not_null_to_null(self):
- """
- Nullability for textual fields is preserved on databases that
- interpret empty strings as NULLs.
- """
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- columns = self.column_classes(Author)
- # Field is nullable.
- self.assertTrue(columns['uuid'][1][6])
- # Change to NOT NULL.
- old_field = Author._meta.get_field('uuid')
- new_field = SlugField(null=False, blank=True)
- new_field.set_attributes_from_name('uuid')
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- columns = self.column_classes(Author)
- # Nullability is preserved.
- self.assertTrue(columns['uuid'][1][6])
- def test_alter_numeric_field_keep_null_status(self):
- """
- Changing a field type shouldn't affect the not null status.
- """
- with connection.schema_editor() as editor:
- editor.create_model(UniqueTest)
- with self.assertRaises(IntegrityError):
- UniqueTest.objects.create(year=None, slug='aaa')
- old_field = UniqueTest._meta.get_field("year")
- new_field = BigIntegerField()
- new_field.set_attributes_from_name("year")
- with connection.schema_editor() as editor:
- editor.alter_field(UniqueTest, old_field, new_field, strict=True)
- with self.assertRaises(IntegrityError):
- UniqueTest.objects.create(year=None, slug='bbb')
- def test_alter_null_to_not_null(self):
- """
- #23609 - Tests handling of default values when altering from NULL to NOT NULL.
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Ensure the field is right to begin with
- columns = self.column_classes(Author)
- self.assertTrue(columns['height'][1][6])
- # Create some test data
- Author.objects.create(name='Not null author', height=12)
- Author.objects.create(name='Null author')
- # Verify null value
- self.assertEqual(Author.objects.get(name='Not null author').height, 12)
- self.assertIsNone(Author.objects.get(name='Null author').height)
- # Alter the height field to NOT NULL with default
- old_field = Author._meta.get_field("height")
- new_field = PositiveIntegerField(default=42)
- new_field.set_attributes_from_name("height")
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- columns = self.column_classes(Author)
- self.assertFalse(columns['height'][1][6])
- # Verify default value
- self.assertEqual(Author.objects.get(name='Not null author').height, 12)
- self.assertEqual(Author.objects.get(name='Null author').height, 42)
- def test_alter_charfield_to_null(self):
- """
- #24307 - Should skip an alter statement on databases with
- interprets_empty_strings_as_nulls when changing a CharField to null.
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Change the CharField to null
- old_field = Author._meta.get_field('name')
- new_field = copy(old_field)
- new_field.null = True
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific')
- def test_alter_char_field_decrease_length(self):
- # Create the table.
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- Author.objects.create(name='x' * 255)
- # Change max_length of CharField.
- old_field = Author._meta.get_field('name')
- new_field = CharField(max_length=254)
- new_field.set_attributes_from_name('name')
- with connection.schema_editor() as editor:
- msg = 'value too long for type character varying(254)'
- with self.assertRaisesMessage(DataError, msg):
- editor.alter_field(Author, old_field, new_field, strict=True)
- @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific')
- def test_alter_field_with_custom_db_type(self):
- from django.contrib.postgres.fields import ArrayField
- class Foo(Model):
- field = ArrayField(CharField(max_length=255))
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(Foo)
- self.isolated_local_models = [Foo]
- old_field = Foo._meta.get_field('field')
- new_field = ArrayField(CharField(max_length=16))
- new_field.set_attributes_from_name('field')
- new_field.model = Foo
- with connection.schema_editor() as editor:
- editor.alter_field(Foo, old_field, new_field, strict=True)
- @isolate_apps('schema')
- @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific')
- def test_alter_array_field_decrease_base_field_length(self):
- from django.contrib.postgres.fields import ArrayField
- class ArrayModel(Model):
- field = ArrayField(CharField(max_length=16))
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(ArrayModel)
- self.isolated_local_models = [ArrayModel]
- ArrayModel.objects.create(field=['x' * 16])
- old_field = ArrayModel._meta.get_field('field')
- new_field = ArrayField(CharField(max_length=15))
- new_field.set_attributes_from_name('field')
- new_field.model = ArrayModel
- with connection.schema_editor() as editor:
- msg = 'value too long for type character varying(15)'
- with self.assertRaisesMessage(DataError, msg):
- editor.alter_field(ArrayModel, old_field, new_field, strict=True)
- @isolate_apps('schema')
- @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific')
- def test_alter_array_field_decrease_nested_base_field_length(self):
- from django.contrib.postgres.fields import ArrayField
- class ArrayModel(Model):
- field = ArrayField(ArrayField(CharField(max_length=16)))
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(ArrayModel)
- self.isolated_local_models = [ArrayModel]
- ArrayModel.objects.create(field=[['x' * 16]])
- old_field = ArrayModel._meta.get_field('field')
- new_field = ArrayField(ArrayField(CharField(max_length=15)))
- new_field.set_attributes_from_name('field')
- new_field.model = ArrayModel
- with connection.schema_editor() as editor:
- msg = 'value too long for type character varying(15)'
- with self.assertRaisesMessage(DataError, msg):
- editor.alter_field(ArrayModel, old_field, new_field, strict=True)
- def test_alter_textfield_to_null(self):
- """
- #24307 - Should skip an alter statement on databases with
- interprets_empty_strings_as_nulls when changing a TextField to null.
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Note)
- # Change the TextField to null
- old_field = Note._meta.get_field('info')
- new_field = copy(old_field)
- new_field.null = True
- with connection.schema_editor() as editor:
- editor.alter_field(Note, old_field, new_field, strict=True)
- @skipUnlessDBFeature('supports_combined_alters')
- def test_alter_null_to_not_null_keeping_default(self):
- """
- #23738 - Can change a nullable field with default to non-nullable
- with the same default.
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(AuthorWithDefaultHeight)
- # Ensure the field is right to begin with
- columns = self.column_classes(AuthorWithDefaultHeight)
- self.assertTrue(columns['height'][1][6])
- # Alter the height field to NOT NULL keeping the previous default
- old_field = AuthorWithDefaultHeight._meta.get_field("height")
- new_field = PositiveIntegerField(default=42)
- new_field.set_attributes_from_name("height")
- with connection.schema_editor() as editor:
- editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True)
- columns = self.column_classes(AuthorWithDefaultHeight)
- self.assertFalse(columns['height'][1][6])
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_alter_fk(self):
- """
- Tests altering of FKs
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- # Ensure the field is right to begin with
- columns = self.column_classes(Book)
- self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField'])
- self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
- # Alter the FK
- old_field = Book._meta.get_field("author")
- new_field = ForeignKey(Author, CASCADE, editable=False)
- new_field.set_attributes_from_name("author")
- with connection.schema_editor() as editor:
- editor.alter_field(Book, old_field, new_field, strict=True)
- columns = self.column_classes(Book)
- self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField'])
- self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_alter_to_fk(self):
- """
- #24447 - Tests adding a FK constraint for an existing column
- """
- class LocalBook(Model):
- author = IntegerField()
- title = CharField(max_length=100, db_index=True)
- pub_date = DateTimeField()
- class Meta:
- app_label = 'schema'
- apps = new_apps
- self.local_models = [LocalBook]
- # Create the tables
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(LocalBook)
- # Ensure no FK constraint exists
- constraints = self.get_constraints(LocalBook._meta.db_table)
- for details in constraints.values():
- if details['foreign_key']:
- self.fail('Found an unexpected FK constraint to %s' % details['columns'])
- old_field = LocalBook._meta.get_field("author")
- new_field = ForeignKey(Author, CASCADE)
- new_field.set_attributes_from_name("author")
- with connection.schema_editor() as editor:
- editor.alter_field(LocalBook, old_field, new_field, strict=True)
- self.assertForeignKeyExists(LocalBook, 'author_id', 'schema_author')
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_alter_o2o_to_fk(self):
- """
- #24163 - Tests altering of OneToOneField to ForeignKey
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(BookWithO2O)
- # Ensure the field is right to begin with
- columns = self.column_classes(BookWithO2O)
- self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField'])
- # Ensure the field is unique
- author = Author.objects.create(name="Joe")
- BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
- with self.assertRaises(IntegrityError):
- BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
- BookWithO2O.objects.all().delete()
- self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author')
- # Alter the OneToOneField to ForeignKey
- old_field = BookWithO2O._meta.get_field("author")
- new_field = ForeignKey(Author, CASCADE)
- new_field.set_attributes_from_name("author")
- with connection.schema_editor() as editor:
- editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
- columns = self.column_classes(Book)
- self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField'])
- # Ensure the field is not unique anymore
- Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
- Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
- self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_alter_fk_to_o2o(self):
- """
- #24163 - Tests altering of ForeignKey to OneToOneField
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- # Ensure the field is right to begin with
- columns = self.column_classes(Book)
- self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField'])
- # Ensure the field is not unique
- author = Author.objects.create(name="Joe")
- Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
- Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
- Book.objects.all().delete()
- self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
- # Alter the ForeignKey to OneToOneField
- old_field = Book._meta.get_field("author")
- new_field = OneToOneField(Author, CASCADE)
- new_field.set_attributes_from_name("author")
- with connection.schema_editor() as editor:
- editor.alter_field(Book, old_field, new_field, strict=True)
- columns = self.column_classes(BookWithO2O)
- self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField'])
- # Ensure the field is unique now
- BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
- with self.assertRaises(IntegrityError):
- BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
- self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author')
- def test_alter_field_fk_to_o2o(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- expected_fks = 1 if connection.features.supports_foreign_keys else 0
- expected_indexes = 1 if connection.features.indexes_foreign_keys else 0
- # Check the index is right to begin with.
- counts = self.get_constraints_count(
- Book._meta.db_table,
- Book._meta.get_field('author').column,
- (Author._meta.db_table, Author._meta.pk.column),
- )
- self.assertEqual(
- counts,
- {'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes},
- )
- old_field = Book._meta.get_field('author')
- new_field = OneToOneField(Author, CASCADE)
- new_field.set_attributes_from_name('author')
- with connection.schema_editor() as editor:
- editor.alter_field(Book, old_field, new_field, strict=True)
- counts = self.get_constraints_count(
- Book._meta.db_table,
- Book._meta.get_field('author').column,
- (Author._meta.db_table, Author._meta.pk.column),
- )
- # The index on ForeignKey is replaced with a unique constraint for OneToOneField.
- self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
- def test_alter_field_fk_keeps_index(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- expected_fks = 1 if connection.features.supports_foreign_keys else 0
- expected_indexes = 1 if connection.features.indexes_foreign_keys else 0
- # Check the index is right to begin with.
- counts = self.get_constraints_count(
- Book._meta.db_table,
- Book._meta.get_field('author').column,
- (Author._meta.db_table, Author._meta.pk.column),
- )
- self.assertEqual(
- counts,
- {'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes},
- )
- old_field = Book._meta.get_field('author')
- # on_delete changed from CASCADE.
- new_field = ForeignKey(Author, PROTECT)
- new_field.set_attributes_from_name('author')
- with connection.schema_editor() as editor:
- editor.alter_field(Book, old_field, new_field, strict=True)
- counts = self.get_constraints_count(
- Book._meta.db_table,
- Book._meta.get_field('author').column,
- (Author._meta.db_table, Author._meta.pk.column),
- )
- # The index remains.
- self.assertEqual(
- counts,
- {'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes},
- )
- def test_alter_field_o2o_to_fk(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(BookWithO2O)
- expected_fks = 1 if connection.features.supports_foreign_keys else 0
- # Check the unique constraint is right to begin with.
- counts = self.get_constraints_count(
- BookWithO2O._meta.db_table,
- BookWithO2O._meta.get_field('author').column,
- (Author._meta.db_table, Author._meta.pk.column),
- )
- self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
- old_field = BookWithO2O._meta.get_field('author')
- new_field = ForeignKey(Author, CASCADE)
- new_field.set_attributes_from_name('author')
- with connection.schema_editor() as editor:
- editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
- counts = self.get_constraints_count(
- BookWithO2O._meta.db_table,
- BookWithO2O._meta.get_field('author').column,
- (Author._meta.db_table, Author._meta.pk.column),
- )
- # The unique constraint on OneToOneField is replaced with an index for ForeignKey.
- self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1})
- def test_alter_field_o2o_keeps_unique(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(BookWithO2O)
- expected_fks = 1 if connection.features.supports_foreign_keys else 0
- # Check the unique constraint is right to begin with.
- counts = self.get_constraints_count(
- BookWithO2O._meta.db_table,
- BookWithO2O._meta.get_field('author').column,
- (Author._meta.db_table, Author._meta.pk.column),
- )
- self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
- old_field = BookWithO2O._meta.get_field('author')
- # on_delete changed from CASCADE.
- new_field = OneToOneField(Author, PROTECT)
- new_field.set_attributes_from_name('author')
- with connection.schema_editor() as editor:
- editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
- counts = self.get_constraints_count(
- BookWithO2O._meta.db_table,
- BookWithO2O._meta.get_field('author').column,
- (Author._meta.db_table, Author._meta.pk.column),
- )
- # The unique constraint remains.
- self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
- @skipUnlessDBFeature('ignores_table_name_case')
- def test_alter_db_table_case(self):
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Alter the case of the table
- old_table_name = Author._meta.db_table
- with connection.schema_editor() as editor:
- editor.alter_db_table(Author, old_table_name, old_table_name.upper())
- def test_alter_implicit_id_to_explicit(self):
- """
- Should be able to convert an implicit "id" field to an explicit "id"
- primary key field.
- """
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- old_field = Author._meta.get_field("id")
- new_field = AutoField(primary_key=True)
- new_field.set_attributes_from_name("id")
- new_field.model = Author
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- # This will fail if DROP DEFAULT is inadvertently executed on this
- # field which drops the id sequence, at least on PostgreSQL.
- Author.objects.create(name='Foo')
- Author.objects.create(name='Bar')
- def test_alter_autofield_pk_to_bigautofield_pk_sequence_owner(self):
- """
- Converting an implicit PK to BigAutoField(primary_key=True) should keep
- a sequence owner on PostgreSQL.
- """
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- old_field = Author._meta.get_field('id')
- new_field = BigAutoField(primary_key=True)
- new_field.set_attributes_from_name('id')
- new_field.model = Author
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- Author.objects.create(name='Foo', pk=1)
- with connection.cursor() as cursor:
- sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author])
- if sequence_reset_sqls:
- cursor.execute(sequence_reset_sqls[0])
- # Fail on PostgreSQL if sequence is missing an owner.
- self.assertIsNotNone(Author.objects.create(name='Bar'))
- def test_alter_autofield_pk_to_smallautofield_pk_sequence_owner(self):
- """
- Converting an implicit PK to SmallAutoField(primary_key=True) should
- keep a sequence owner on PostgreSQL.
- """
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- old_field = Author._meta.get_field('id')
- new_field = SmallAutoField(primary_key=True)
- new_field.set_attributes_from_name('id')
- new_field.model = Author
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- Author.objects.create(name='Foo', pk=1)
- with connection.cursor() as cursor:
- sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author])
- if sequence_reset_sqls:
- cursor.execute(sequence_reset_sqls[0])
- # Fail on PostgreSQL if sequence is missing an owner.
- self.assertIsNotNone(Author.objects.create(name='Bar'))
- def test_alter_int_pk_to_autofield_pk(self):
- """
- Should be able to rename an IntegerField(primary_key=True) to
- AutoField(primary_key=True).
- """
- with connection.schema_editor() as editor:
- editor.create_model(IntegerPK)
- old_field = IntegerPK._meta.get_field('i')
- new_field = AutoField(primary_key=True)
- new_field.model = IntegerPK
- new_field.set_attributes_from_name('i')
- with connection.schema_editor() as editor:
- editor.alter_field(IntegerPK, old_field, new_field, strict=True)
- # A model representing the updated model.
- class IntegerPKToAutoField(Model):
- i = AutoField(primary_key=True)
- j = IntegerField(unique=True)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- db_table = IntegerPK._meta.db_table
- # An id (i) is generated by the database.
- obj = IntegerPKToAutoField.objects.create(j=1)
- self.assertIsNotNone(obj.i)
- def test_alter_int_pk_to_bigautofield_pk(self):
- """
- Should be able to rename an IntegerField(primary_key=True) to
- BigAutoField(primary_key=True).
- """
- with connection.schema_editor() as editor:
- editor.create_model(IntegerPK)
- old_field = IntegerPK._meta.get_field('i')
- new_field = BigAutoField(primary_key=True)
- new_field.model = IntegerPK
- new_field.set_attributes_from_name('i')
- with connection.schema_editor() as editor:
- editor.alter_field(IntegerPK, old_field, new_field, strict=True)
- # A model representing the updated model.
- class IntegerPKToBigAutoField(Model):
- i = BigAutoField(primary_key=True)
- j = IntegerField(unique=True)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- db_table = IntegerPK._meta.db_table
- # An id (i) is generated by the database.
- obj = IntegerPKToBigAutoField.objects.create(j=1)
- self.assertIsNotNone(obj.i)
- @isolate_apps('schema')
- def test_alter_smallint_pk_to_smallautofield_pk(self):
- """
- Should be able to rename an SmallIntegerField(primary_key=True) to
- SmallAutoField(primary_key=True).
- """
- class SmallIntegerPK(Model):
- i = SmallIntegerField(primary_key=True)
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(SmallIntegerPK)
- self.isolated_local_models = [SmallIntegerPK]
- old_field = SmallIntegerPK._meta.get_field('i')
- new_field = SmallAutoField(primary_key=True)
- new_field.model = SmallIntegerPK
- new_field.set_attributes_from_name('i')
- with connection.schema_editor() as editor:
- editor.alter_field(SmallIntegerPK, old_field, new_field, strict=True)
- def test_alter_int_pk_to_int_unique(self):
- """
- Should be able to rename an IntegerField(primary_key=True) to
- IntegerField(unique=True).
- """
- with connection.schema_editor() as editor:
- editor.create_model(IntegerPK)
- # Delete the old PK
- old_field = IntegerPK._meta.get_field('i')
- new_field = IntegerField(unique=True)
- new_field.model = IntegerPK
- new_field.set_attributes_from_name('i')
- with connection.schema_editor() as editor:
- editor.alter_field(IntegerPK, old_field, new_field, strict=True)
- # The primary key constraint is gone. Result depends on database:
- # 'id' for SQLite, None for others (must not be 'i').
- self.assertIn(self.get_primary_key(IntegerPK._meta.db_table), ('id', None))
- # Set up a model class as it currently stands. The original IntegerPK
- # class is now out of date and some backends make use of the whole
- # model class when modifying a field (such as sqlite3 when remaking a
- # table) so an outdated model class leads to incorrect results.
- class Transitional(Model):
- i = IntegerField(unique=True)
- j = IntegerField(unique=True)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- db_table = 'INTEGERPK'
- # model requires a new PK
- old_field = Transitional._meta.get_field('j')
- new_field = IntegerField(primary_key=True)
- new_field.model = Transitional
- new_field.set_attributes_from_name('j')
- with connection.schema_editor() as editor:
- editor.alter_field(Transitional, old_field, new_field, strict=True)
- # Create a model class representing the updated model.
- class IntegerUnique(Model):
- i = IntegerField(unique=True)
- j = IntegerField(primary_key=True)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- db_table = 'INTEGERPK'
- # Ensure unique constraint works.
- IntegerUnique.objects.create(i=1, j=1)
- with self.assertRaises(IntegrityError):
- IntegerUnique.objects.create(i=1, j=2)
- def test_rename(self):
- """
- Tests simple altering of fields
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Ensure the field is right to begin with
- columns = self.column_classes(Author)
- self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField'])
- self.assertNotIn("display_name", columns)
- # Alter the name field's name
- old_field = Author._meta.get_field("name")
- new_field = CharField(max_length=254)
- new_field.set_attributes_from_name("display_name")
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- columns = self.column_classes(Author)
- self.assertEqual(columns['display_name'][0], connection.features.introspected_field_types['CharField'])
- self.assertNotIn("name", columns)
- @isolate_apps('schema')
- def test_rename_referenced_field(self):
- class Author(Model):
- name = CharField(max_length=255, unique=True)
- class Meta:
- app_label = 'schema'
- class Book(Model):
- author = ForeignKey(Author, CASCADE, to_field='name')
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- new_field = CharField(max_length=255, unique=True)
- new_field.set_attributes_from_name('renamed')
- with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor:
- editor.alter_field(Author, Author._meta.get_field('name'), new_field)
- # Ensure the foreign key reference was updated.
- self.assertForeignKeyExists(Book, 'author_id', 'schema_author', 'renamed')
- @skipIfDBFeature('interprets_empty_strings_as_nulls')
- def test_rename_keep_null_status(self):
- """
- Renaming a field shouldn't affect the not null status.
- """
- with connection.schema_editor() as editor:
- editor.create_model(Note)
- with self.assertRaises(IntegrityError):
- Note.objects.create(info=None)
- old_field = Note._meta.get_field("info")
- new_field = TextField()
- new_field.set_attributes_from_name("detail_info")
- with connection.schema_editor() as editor:
- editor.alter_field(Note, old_field, new_field, strict=True)
- columns = self.column_classes(Note)
- self.assertEqual(columns['detail_info'][0], "TextField")
- self.assertNotIn("info", columns)
- with self.assertRaises(IntegrityError):
- NoteRename.objects.create(detail_info=None)
- def _test_m2m_create(self, M2MFieldClass):
- """
- Tests M2M fields on models during creation
- """
- class LocalBookWithM2M(Model):
- author = ForeignKey(Author, CASCADE)
- title = CharField(max_length=100, db_index=True)
- pub_date = DateTimeField()
- tags = M2MFieldClass("TagM2MTest", related_name="books")
- class Meta:
- app_label = 'schema'
- apps = new_apps
- self.local_models = [LocalBookWithM2M]
- # Create the tables
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(TagM2MTest)
- editor.create_model(LocalBookWithM2M)
- # Ensure there is now an m2m table there
- columns = self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through)
- self.assertEqual(columns['tagm2mtest_id'][0], connection.features.introspected_field_types['IntegerField'])
- def test_m2m_create(self):
- self._test_m2m_create(ManyToManyField)
- def test_m2m_create_custom(self):
- self._test_m2m_create(CustomManyToManyField)
- def test_m2m_create_inherited(self):
- self._test_m2m_create(InheritedManyToManyField)
- def _test_m2m_create_through(self, M2MFieldClass):
- """
- Tests M2M fields on models during creation with through models
- """
- class LocalTagThrough(Model):
- book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE)
- tag = ForeignKey("schema.TagM2MTest", CASCADE)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- class LocalBookWithM2MThrough(Model):
- tags = M2MFieldClass("TagM2MTest", related_name="books", through=LocalTagThrough)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- self.local_models = [LocalTagThrough, LocalBookWithM2MThrough]
- # Create the tables
- with connection.schema_editor() as editor:
- editor.create_model(LocalTagThrough)
- editor.create_model(TagM2MTest)
- editor.create_model(LocalBookWithM2MThrough)
- # Ensure there is now an m2m table there
- columns = self.column_classes(LocalTagThrough)
- self.assertEqual(columns['book_id'][0], connection.features.introspected_field_types['IntegerField'])
- self.assertEqual(columns['tag_id'][0], connection.features.introspected_field_types['IntegerField'])
- def test_m2m_create_through(self):
- self._test_m2m_create_through(ManyToManyField)
- def test_m2m_create_through_custom(self):
- self._test_m2m_create_through(CustomManyToManyField)
- def test_m2m_create_through_inherited(self):
- self._test_m2m_create_through(InheritedManyToManyField)
- def _test_m2m(self, M2MFieldClass):
- """
- Tests adding/removing M2M fields on models
- """
- class LocalAuthorWithM2M(Model):
- name = CharField(max_length=255)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- self.local_models = [LocalAuthorWithM2M]
- # Create the tables
- with connection.schema_editor() as editor:
- editor.create_model(LocalAuthorWithM2M)
- editor.create_model(TagM2MTest)
- # Create an M2M field
- new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors")
- new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
- # Ensure there's no m2m table there
- with self.assertRaises(DatabaseError):
- self.column_classes(new_field.remote_field.through)
- # Add the field
- with connection.schema_editor() as editor:
- editor.add_field(LocalAuthorWithM2M, new_field)
- # Ensure there is now an m2m table there
- columns = self.column_classes(new_field.remote_field.through)
- self.assertEqual(columns['tagm2mtest_id'][0], connection.features.introspected_field_types['IntegerField'])
- # "Alter" the field. This should not rename the DB table to itself.
- with connection.schema_editor() as editor:
- editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True)
- # Remove the M2M table again
- with connection.schema_editor() as editor:
- editor.remove_field(LocalAuthorWithM2M, new_field)
- # Ensure there's no m2m table there
- with self.assertRaises(DatabaseError):
- self.column_classes(new_field.remote_field.through)
- # Make sure the model state is coherent with the table one now that
- # we've removed the tags field.
- opts = LocalAuthorWithM2M._meta
- opts.local_many_to_many.remove(new_field)
- del new_apps.all_models['schema'][new_field.remote_field.through._meta.model_name]
- opts._expire_cache()
- def test_m2m(self):
- self._test_m2m(ManyToManyField)
- def test_m2m_custom(self):
- self._test_m2m(CustomManyToManyField)
- def test_m2m_inherited(self):
- self._test_m2m(InheritedManyToManyField)
- def _test_m2m_through_alter(self, M2MFieldClass):
- """
- Tests altering M2Ms with explicit through models (should no-op)
- """
- class LocalAuthorTag(Model):
- author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE)
- tag = ForeignKey("schema.TagM2MTest", CASCADE)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- class LocalAuthorWithM2MThrough(Model):
- name = CharField(max_length=255)
- tags = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough]
- # Create the tables
- with connection.schema_editor() as editor:
- editor.create_model(LocalAuthorTag)
- editor.create_model(LocalAuthorWithM2MThrough)
- editor.create_model(TagM2MTest)
- # Ensure the m2m table is there
- self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
- # "Alter" the field's blankness. This should not actually do anything.
- old_field = LocalAuthorWithM2MThrough._meta.get_field("tags")
- new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag)
- new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags")
- with connection.schema_editor() as editor:
- editor.alter_field(LocalAuthorWithM2MThrough, old_field, new_field, strict=True)
- # Ensure the m2m table is still there
- self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
- def test_m2m_through_alter(self):
- self._test_m2m_through_alter(ManyToManyField)
- def test_m2m_through_alter_custom(self):
- self._test_m2m_through_alter(CustomManyToManyField)
- def test_m2m_through_alter_inherited(self):
- self._test_m2m_through_alter(InheritedManyToManyField)
- def _test_m2m_repoint(self, M2MFieldClass):
- """
- Tests repointing M2M fields
- """
- class LocalBookWithM2M(Model):
- author = ForeignKey(Author, CASCADE)
- title = CharField(max_length=100, db_index=True)
- pub_date = DateTimeField()
- tags = M2MFieldClass("TagM2MTest", related_name="books")
- class Meta:
- app_label = 'schema'
- apps = new_apps
- self.local_models = [LocalBookWithM2M]
- # Create the tables
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(LocalBookWithM2M)
- editor.create_model(TagM2MTest)
- editor.create_model(UniqueTest)
- # Ensure the M2M exists and points to TagM2MTest
- if connection.features.supports_foreign_keys:
- self.assertForeignKeyExists(
- LocalBookWithM2M._meta.get_field("tags").remote_field.through,
- 'tagm2mtest_id',
- 'schema_tagm2mtest',
- )
- # Repoint the M2M
- old_field = LocalBookWithM2M._meta.get_field("tags")
- new_field = M2MFieldClass(UniqueTest)
- new_field.contribute_to_class(LocalBookWithM2M, "uniques")
- with connection.schema_editor() as editor:
- editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True)
- # Ensure old M2M is gone
- with self.assertRaises(DatabaseError):
- self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through)
- # This model looks like the new model and is used for teardown.
- opts = LocalBookWithM2M._meta
- opts.local_many_to_many.remove(old_field)
- # Ensure the new M2M exists and points to UniqueTest
- if connection.features.supports_foreign_keys:
- self.assertForeignKeyExists(new_field.remote_field.through, 'uniquetest_id', 'schema_uniquetest')
- def test_m2m_repoint(self):
- self._test_m2m_repoint(ManyToManyField)
- def test_m2m_repoint_custom(self):
- self._test_m2m_repoint(CustomManyToManyField)
- def test_m2m_repoint_inherited(self):
- self._test_m2m_repoint(InheritedManyToManyField)
- @isolate_apps('schema')
- def test_m2m_rename_field_in_target_model(self):
- class LocalTagM2MTest(Model):
- title = CharField(max_length=255)
- class Meta:
- app_label = 'schema'
- class LocalM2M(Model):
- tags = ManyToManyField(LocalTagM2MTest)
- class Meta:
- app_label = 'schema'
- # Create the tables.
- with connection.schema_editor() as editor:
- editor.create_model(LocalM2M)
- editor.create_model(LocalTagM2MTest)
- self.isolated_local_models = [LocalM2M, LocalTagM2MTest]
- # Ensure the m2m table is there.
- self.assertEqual(len(self.column_classes(LocalM2M)), 1)
- # Alter a field in LocalTagM2MTest.
- old_field = LocalTagM2MTest._meta.get_field('title')
- new_field = CharField(max_length=254)
- new_field.contribute_to_class(LocalTagM2MTest, 'title1')
- # @isolate_apps() and inner models are needed to have the model
- # relations populated, otherwise this doesn't act as a regression test.
- self.assertEqual(len(new_field.model._meta.related_objects), 1)
- with connection.schema_editor() as editor:
- editor.alter_field(LocalTagM2MTest, old_field, new_field, strict=True)
- # Ensure the m2m table is still there.
- self.assertEqual(len(self.column_classes(LocalM2M)), 1)
- @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints')
- def test_check_constraints(self):
- """
- Tests creating/deleting CHECK constraints
- """
- # Create the tables
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Ensure the constraint exists
- constraints = self.get_constraints(Author._meta.db_table)
- if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()):
- self.fail("No check constraint for height found")
- # Alter the column to remove it
- old_field = Author._meta.get_field("height")
- new_field = IntegerField(null=True, blank=True)
- new_field.set_attributes_from_name("height")
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- constraints = self.get_constraints(Author._meta.db_table)
- for details in constraints.values():
- if details['columns'] == ["height"] and details['check']:
- self.fail("Check constraint for height found")
- # Alter the column to re-add it
- new_field2 = Author._meta.get_field("height")
- with connection.schema_editor() as editor:
- editor.alter_field(Author, new_field, new_field2, strict=True)
- constraints = self.get_constraints(Author._meta.db_table)
- if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()):
- self.fail("No check constraint for height found")
- @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints')
- def test_remove_field_check_does_not_remove_meta_constraints(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Add the custom check constraint
- constraint = CheckConstraint(check=Q(height__gte=0), name='author_height_gte_0_check')
- custom_constraint_name = constraint.name
- Author._meta.constraints = [constraint]
- with connection.schema_editor() as editor:
- editor.add_constraint(Author, constraint)
- # Ensure the constraints exist
- constraints = self.get_constraints(Author._meta.db_table)
- self.assertIn(custom_constraint_name, constraints)
- other_constraints = [
- name for name, details in constraints.items()
- if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name
- ]
- self.assertEqual(len(other_constraints), 1)
- # Alter the column to remove field check
- old_field = Author._meta.get_field('height')
- new_field = IntegerField(null=True, blank=True)
- new_field.set_attributes_from_name('height')
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- constraints = self.get_constraints(Author._meta.db_table)
- self.assertIn(custom_constraint_name, constraints)
- other_constraints = [
- name for name, details in constraints.items()
- if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name
- ]
- self.assertEqual(len(other_constraints), 0)
- # Alter the column to re-add field check
- new_field2 = Author._meta.get_field('height')
- with connection.schema_editor() as editor:
- editor.alter_field(Author, new_field, new_field2, strict=True)
- constraints = self.get_constraints(Author._meta.db_table)
- self.assertIn(custom_constraint_name, constraints)
- other_constraints = [
- name for name, details in constraints.items()
- if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name
- ]
- self.assertEqual(len(other_constraints), 1)
- # Drop the check constraint
- with connection.schema_editor() as editor:
- Author._meta.constraints = []
- editor.remove_constraint(Author, constraint)
- def test_unique(self):
- """
- Tests removing and adding unique constraints to a single column.
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Tag)
- # Ensure the field is unique to begin with
- Tag.objects.create(title="foo", slug="foo")
- with self.assertRaises(IntegrityError):
- Tag.objects.create(title="bar", slug="foo")
- Tag.objects.all().delete()
- # Alter the slug field to be non-unique
- old_field = Tag._meta.get_field("slug")
- new_field = SlugField(unique=False)
- new_field.set_attributes_from_name("slug")
- with connection.schema_editor() as editor:
- editor.alter_field(Tag, old_field, new_field, strict=True)
- # Ensure the field is no longer unique
- Tag.objects.create(title="foo", slug="foo")
- Tag.objects.create(title="bar", slug="foo")
- Tag.objects.all().delete()
- # Alter the slug field to be unique
- new_field2 = SlugField(unique=True)
- new_field2.set_attributes_from_name("slug")
- with connection.schema_editor() as editor:
- editor.alter_field(Tag, new_field, new_field2, strict=True)
- # Ensure the field is unique again
- Tag.objects.create(title="foo", slug="foo")
- with self.assertRaises(IntegrityError):
- Tag.objects.create(title="bar", slug="foo")
- Tag.objects.all().delete()
- # Rename the field
- new_field3 = SlugField(unique=True)
- new_field3.set_attributes_from_name("slug2")
- with connection.schema_editor() as editor:
- editor.alter_field(Tag, new_field2, new_field3, strict=True)
- # Ensure the field is still unique
- TagUniqueRename.objects.create(title="foo", slug2="foo")
- with self.assertRaises(IntegrityError):
- TagUniqueRename.objects.create(title="bar", slug2="foo")
- Tag.objects.all().delete()
- def test_unique_name_quoting(self):
- old_table_name = TagUniqueRename._meta.db_table
- try:
- with connection.schema_editor() as editor:
- editor.create_model(TagUniqueRename)
- editor.alter_db_table(TagUniqueRename, old_table_name, 'unique-table')
- TagUniqueRename._meta.db_table = 'unique-table'
- # This fails if the unique index name isn't quoted.
- editor.alter_unique_together(TagUniqueRename, [], (('title', 'slug2'),))
- finally:
- TagUniqueRename._meta.db_table = old_table_name
- @isolate_apps('schema')
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_unique_no_unnecessary_fk_drops(self):
- """
- If AlterField isn't selective about dropping foreign key constraints
- when modifying a field with a unique constraint, the AlterField
- incorrectly drops and recreates the Book.author foreign key even though
- it doesn't restrict the field being changed (#29193).
- """
- class Author(Model):
- name = CharField(max_length=254, unique=True)
- class Meta:
- app_label = 'schema'
- class Book(Model):
- author = ForeignKey(Author, CASCADE)
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- new_field = CharField(max_length=255, unique=True)
- new_field.model = Author
- new_field.set_attributes_from_name('name')
- with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm:
- with connection.schema_editor() as editor:
- editor.alter_field(Author, Author._meta.get_field('name'), new_field)
- # One SQL statement is executed to alter the field.
- self.assertEqual(len(cm.records), 1)
- @isolate_apps('schema')
- def test_unique_and_reverse_m2m(self):
- """
- AlterField can modify a unique field when there's a reverse M2M
- relation on the model.
- """
- class Tag(Model):
- title = CharField(max_length=255)
- slug = SlugField(unique=True)
- class Meta:
- app_label = 'schema'
- class Book(Model):
- tags = ManyToManyField(Tag, related_name='books')
- class Meta:
- app_label = 'schema'
- self.isolated_local_models = [Book._meta.get_field('tags').remote_field.through]
- with connection.schema_editor() as editor:
- editor.create_model(Tag)
- editor.create_model(Book)
- new_field = SlugField(max_length=75, unique=True)
- new_field.model = Tag
- new_field.set_attributes_from_name('slug')
- with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm:
- with connection.schema_editor() as editor:
- editor.alter_field(Tag, Tag._meta.get_field('slug'), new_field)
- # One SQL statement is executed to alter the field.
- self.assertEqual(len(cm.records), 1)
- # Ensure that the field is still unique.
- Tag.objects.create(title='foo', slug='foo')
- with self.assertRaises(IntegrityError):
- Tag.objects.create(title='bar', slug='foo')
- @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields')
- def test_remove_field_unique_does_not_remove_meta_constraints(self):
- with connection.schema_editor() as editor:
- editor.create_model(AuthorWithUniqueName)
- # Add the custom unique constraint
- constraint = UniqueConstraint(fields=['name'], name='author_name_uniq')
- custom_constraint_name = constraint.name
- AuthorWithUniqueName._meta.constraints = [constraint]
- with connection.schema_editor() as editor:
- editor.add_constraint(AuthorWithUniqueName, constraint)
- # Ensure the constraints exist
- constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
- self.assertIn(custom_constraint_name, constraints)
- other_constraints = [
- name for name, details in constraints.items()
- if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name
- ]
- self.assertEqual(len(other_constraints), 1)
- # Alter the column to remove field uniqueness
- old_field = AuthorWithUniqueName._meta.get_field('name')
- new_field = CharField(max_length=255)
- new_field.set_attributes_from_name('name')
- with connection.schema_editor() as editor:
- editor.alter_field(AuthorWithUniqueName, old_field, new_field, strict=True)
- constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
- self.assertIn(custom_constraint_name, constraints)
- other_constraints = [
- name for name, details in constraints.items()
- if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name
- ]
- self.assertEqual(len(other_constraints), 0)
- # Alter the column to re-add field uniqueness
- new_field2 = AuthorWithUniqueName._meta.get_field('name')
- with connection.schema_editor() as editor:
- editor.alter_field(AuthorWithUniqueName, new_field, new_field2, strict=True)
- constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
- self.assertIn(custom_constraint_name, constraints)
- other_constraints = [
- name for name, details in constraints.items()
- if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name
- ]
- self.assertEqual(len(other_constraints), 1)
- # Drop the unique constraint
- with connection.schema_editor() as editor:
- AuthorWithUniqueName._meta.constraints = []
- editor.remove_constraint(AuthorWithUniqueName, constraint)
- def test_unique_together(self):
- """
- Tests removing and adding unique_together constraints on a model.
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(UniqueTest)
- # Ensure the fields are unique to begin with
- UniqueTest.objects.create(year=2012, slug="foo")
- UniqueTest.objects.create(year=2011, slug="foo")
- UniqueTest.objects.create(year=2011, slug="bar")
- with self.assertRaises(IntegrityError):
- UniqueTest.objects.create(year=2012, slug="foo")
- UniqueTest.objects.all().delete()
- # Alter the model to its non-unique-together companion
- with connection.schema_editor() as editor:
- editor.alter_unique_together(UniqueTest, UniqueTest._meta.unique_together, [])
- # Ensure the fields are no longer unique
- UniqueTest.objects.create(year=2012, slug="foo")
- UniqueTest.objects.create(year=2012, slug="foo")
- UniqueTest.objects.all().delete()
- # Alter it back
- new_field2 = SlugField(unique=True)
- new_field2.set_attributes_from_name("slug")
- with connection.schema_editor() as editor:
- editor.alter_unique_together(UniqueTest, [], UniqueTest._meta.unique_together)
- # Ensure the fields are unique again
- UniqueTest.objects.create(year=2012, slug="foo")
- with self.assertRaises(IntegrityError):
- UniqueTest.objects.create(year=2012, slug="foo")
- UniqueTest.objects.all().delete()
- def test_unique_together_with_fk(self):
- """
- Tests removing and adding unique_together constraints that include
- a foreign key.
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- # Ensure the fields are unique to begin with
- self.assertEqual(Book._meta.unique_together, ())
- # Add the unique_together constraint
- with connection.schema_editor() as editor:
- editor.alter_unique_together(Book, [], [['author', 'title']])
- # Alter it back
- with connection.schema_editor() as editor:
- editor.alter_unique_together(Book, [['author', 'title']], [])
- def test_unique_together_with_fk_with_existing_index(self):
- """
- Tests removing and adding unique_together constraints that include
- a foreign key, where the foreign key is added after the model is
- created.
- """
- # Create the tables
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(BookWithoutAuthor)
- new_field = ForeignKey(Author, CASCADE)
- new_field.set_attributes_from_name('author')
- editor.add_field(BookWithoutAuthor, new_field)
- # Ensure the fields aren't unique to begin with
- self.assertEqual(Book._meta.unique_together, ())
- # Add the unique_together constraint
- with connection.schema_editor() as editor:
- editor.alter_unique_together(Book, [], [['author', 'title']])
- # Alter it back
- with connection.schema_editor() as editor:
- editor.alter_unique_together(Book, [['author', 'title']], [])
- @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields')
- def test_remove_unique_together_does_not_remove_meta_constraints(self):
- with connection.schema_editor() as editor:
- editor.create_model(AuthorWithUniqueNameAndBirthday)
- # Add the custom unique constraint
- constraint = UniqueConstraint(fields=['name', 'birthday'], name='author_name_birthday_uniq')
- custom_constraint_name = constraint.name
- AuthorWithUniqueNameAndBirthday._meta.constraints = [constraint]
- with connection.schema_editor() as editor:
- editor.add_constraint(AuthorWithUniqueNameAndBirthday, constraint)
- # Ensure the constraints exist
- constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table)
- self.assertIn(custom_constraint_name, constraints)
- other_constraints = [
- name for name, details in constraints.items()
- if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name
- ]
- self.assertEqual(len(other_constraints), 1)
- # Remove unique together
- unique_together = AuthorWithUniqueNameAndBirthday._meta.unique_together
- with connection.schema_editor() as editor:
- editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, unique_together, [])
- constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table)
- self.assertIn(custom_constraint_name, constraints)
- other_constraints = [
- name for name, details in constraints.items()
- if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name
- ]
- self.assertEqual(len(other_constraints), 0)
- # Re-add unique together
- with connection.schema_editor() as editor:
- editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, [], unique_together)
- constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table)
- self.assertIn(custom_constraint_name, constraints)
- other_constraints = [
- name for name, details in constraints.items()
- if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name
- ]
- self.assertEqual(len(other_constraints), 1)
- # Drop the unique constraint
- with connection.schema_editor() as editor:
- AuthorWithUniqueNameAndBirthday._meta.constraints = []
- editor.remove_constraint(AuthorWithUniqueNameAndBirthday, constraint)
- def test_unique_constraint(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- constraint = UniqueConstraint(fields=['name'], name='name_uq')
- # Add constraint.
- with connection.schema_editor() as editor:
- editor.add_constraint(Author, constraint)
- sql = constraint.create_sql(Author, editor)
- table = Author._meta.db_table
- self.assertIs(sql.references_table(table), True)
- self.assertIs(sql.references_column(table, 'name'), True)
- # Remove constraint.
- with connection.schema_editor() as editor:
- editor.remove_constraint(Author, constraint)
- self.assertNotIn(constraint.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_unique_constraint(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- constraint = UniqueConstraint(Upper('name').desc(), name='func_upper_uq')
- # Add constraint.
- with connection.schema_editor() as editor:
- editor.add_constraint(Author, constraint)
- sql = constraint.create_sql(Author, editor)
- table = Author._meta.db_table
- constraints = self.get_constraints(table)
- if connection.features.supports_index_column_ordering:
- self.assertIndexOrder(table, constraint.name, ['DESC'])
- self.assertIn(constraint.name, constraints)
- self.assertIs(constraints[constraint.name]['unique'], True)
- # SQL contains a database function.
- self.assertIs(sql.references_column(table, 'name'), True)
- self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql))
- # Remove constraint.
- with connection.schema_editor() as editor:
- editor.remove_constraint(Author, constraint)
- self.assertNotIn(constraint.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_composite_func_unique_constraint(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(BookWithSlug)
- constraint = UniqueConstraint(
- Upper('title'),
- Lower('slug'),
- name='func_upper_lower_unq',
- )
- # Add constraint.
- with connection.schema_editor() as editor:
- editor.add_constraint(BookWithSlug, constraint)
- sql = constraint.create_sql(BookWithSlug, editor)
- table = BookWithSlug._meta.db_table
- constraints = self.get_constraints(table)
- self.assertIn(constraint.name, constraints)
- self.assertIs(constraints[constraint.name]['unique'], True)
- # SQL contains database functions.
- self.assertIs(sql.references_column(table, 'title'), True)
- self.assertIs(sql.references_column(table, 'slug'), True)
- sql = str(sql)
- self.assertIn('UPPER(%s)' % editor.quote_name('title'), sql)
- self.assertIn('LOWER(%s)' % editor.quote_name('slug'), sql)
- self.assertLess(sql.index('UPPER'), sql.index('LOWER'))
- # Remove constraint.
- with connection.schema_editor() as editor:
- editor.remove_constraint(BookWithSlug, constraint)
- self.assertNotIn(constraint.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_unique_constraint_field_and_expression(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- constraint = UniqueConstraint(
- F('height').desc(),
- 'uuid',
- Lower('name').asc(),
- name='func_f_lower_field_unq',
- )
- # Add constraint.
- with connection.schema_editor() as editor:
- editor.add_constraint(Author, constraint)
- sql = constraint.create_sql(Author, editor)
- table = Author._meta.db_table
- if connection.features.supports_index_column_ordering:
- self.assertIndexOrder(table, constraint.name, ['DESC', 'ASC', 'ASC'])
- constraints = self.get_constraints(table)
- self.assertIs(constraints[constraint.name]['unique'], True)
- self.assertEqual(len(constraints[constraint.name]['columns']), 3)
- self.assertEqual(constraints[constraint.name]['columns'][1], 'uuid')
- # SQL contains database functions and columns.
- self.assertIs(sql.references_column(table, 'height'), True)
- self.assertIs(sql.references_column(table, 'name'), True)
- self.assertIs(sql.references_column(table, 'uuid'), True)
- self.assertIn('LOWER(%s)' % editor.quote_name('name'), str(sql))
- # Remove constraint.
- with connection.schema_editor() as editor:
- editor.remove_constraint(Author, constraint)
- self.assertNotIn(constraint.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes', 'supports_partial_indexes')
- def test_func_unique_constraint_partial(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- constraint = UniqueConstraint(
- Upper('name'),
- name='func_upper_cond_weight_uq',
- condition=Q(weight__isnull=False),
- )
- # Add constraint.
- with connection.schema_editor() as editor:
- editor.add_constraint(Author, constraint)
- sql = constraint.create_sql(Author, editor)
- table = Author._meta.db_table
- constraints = self.get_constraints(table)
- self.assertIn(constraint.name, constraints)
- self.assertIs(constraints[constraint.name]['unique'], True)
- self.assertIs(sql.references_column(table, 'name'), True)
- self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql))
- self.assertIn(
- 'WHERE %s IS NOT NULL' % editor.quote_name('weight'),
- str(sql),
- )
- # Remove constraint.
- with connection.schema_editor() as editor:
- editor.remove_constraint(Author, constraint)
- self.assertNotIn(constraint.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes', 'supports_covering_indexes')
- def test_func_unique_constraint_covering(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- constraint = UniqueConstraint(
- Upper('name'),
- name='func_upper_covering_uq',
- include=['weight', 'height'],
- )
- # Add constraint.
- with connection.schema_editor() as editor:
- editor.add_constraint(Author, constraint)
- sql = constraint.create_sql(Author, editor)
- table = Author._meta.db_table
- constraints = self.get_constraints(table)
- self.assertIn(constraint.name, constraints)
- self.assertIs(constraints[constraint.name]['unique'], True)
- self.assertEqual(
- constraints[constraint.name]['columns'],
- [None, 'weight', 'height'],
- )
- self.assertIs(sql.references_column(table, 'name'), True)
- self.assertIs(sql.references_column(table, 'weight'), True)
- self.assertIs(sql.references_column(table, 'height'), True)
- self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql))
- self.assertIn(
- 'INCLUDE (%s, %s)' % (
- editor.quote_name('weight'),
- editor.quote_name('height'),
- ),
- str(sql),
- )
- # Remove constraint.
- with connection.schema_editor() as editor:
- editor.remove_constraint(Author, constraint)
- self.assertNotIn(constraint.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_unique_constraint_lookups(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs):
- constraint = UniqueConstraint(
- F('name__lower'),
- F('weight__abs'),
- name='func_lower_abs_lookup_uq',
- )
- # Add constraint.
- with connection.schema_editor() as editor:
- editor.add_constraint(Author, constraint)
- sql = constraint.create_sql(Author, editor)
- table = Author._meta.db_table
- constraints = self.get_constraints(table)
- self.assertIn(constraint.name, constraints)
- self.assertIs(constraints[constraint.name]['unique'], True)
- # SQL contains columns.
- self.assertIs(sql.references_column(table, 'name'), True)
- self.assertIs(sql.references_column(table, 'weight'), True)
- # Remove constraint.
- with connection.schema_editor() as editor:
- editor.remove_constraint(Author, constraint)
- self.assertNotIn(constraint.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_unique_constraint_collate(self):
- collation = connection.features.test_collations.get('non_default')
- if not collation:
- self.skipTest(
- 'This backend does not support case-insensitive collations.'
- )
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(BookWithSlug)
- constraint = UniqueConstraint(
- Collate(F('title'), collation=collation).desc(),
- Collate('slug', collation=collation),
- name='func_collate_uq',
- )
- # Add constraint.
- with connection.schema_editor() as editor:
- editor.add_constraint(BookWithSlug, constraint)
- sql = constraint.create_sql(BookWithSlug, editor)
- table = BookWithSlug._meta.db_table
- constraints = self.get_constraints(table)
- self.assertIn(constraint.name, constraints)
- self.assertIs(constraints[constraint.name]['unique'], True)
- if connection.features.supports_index_column_ordering:
- self.assertIndexOrder(table, constraint.name, ['DESC', 'ASC'])
- # SQL contains columns and a collation.
- self.assertIs(sql.references_column(table, 'title'), True)
- self.assertIs(sql.references_column(table, 'slug'), True)
- self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql))
- # Remove constraint.
- with connection.schema_editor() as editor:
- editor.remove_constraint(BookWithSlug, constraint)
- self.assertNotIn(constraint.name, self.get_constraints(table))
- @skipIfDBFeature('supports_expression_indexes')
- def test_func_unique_constraint_unsupported(self):
- # UniqueConstraint is ignored on databases that don't support indexes on
- # expressions.
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- constraint = UniqueConstraint(F('name'), name='func_name_uq')
- with connection.schema_editor() as editor, self.assertNumQueries(0):
- self.assertIsNone(editor.add_constraint(Author, constraint))
- self.assertIsNone(editor.remove_constraint(Author, constraint))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_unique_constraint_nonexistent_field(self):
- constraint = UniqueConstraint(Lower('nonexistent'), name='func_nonexistent_uq')
- msg = (
- "Cannot resolve keyword 'nonexistent' into field. Choices are: "
- "height, id, name, uuid, weight"
- )
- with self.assertRaisesMessage(FieldError, msg):
- with connection.schema_editor() as editor:
- editor.add_constraint(Author, constraint)
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_unique_constraint_nondeterministic(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- constraint = UniqueConstraint(Random(), name='func_random_uq')
- with connection.schema_editor() as editor:
- with self.assertRaises(DatabaseError):
- editor.add_constraint(Author, constraint)
- def test_index_together(self):
- """
- Tests removing and adding index_together constraints on a model.
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Tag)
- # Ensure there's no index on the year/slug columns first
- self.assertIs(
- any(
- c["index"]
- for c in self.get_constraints("schema_tag").values()
- if c['columns'] == ["slug", "title"]
- ),
- False,
- )
- # Alter the model to add an index
- with connection.schema_editor() as editor:
- editor.alter_index_together(Tag, [], [("slug", "title")])
- # Ensure there is now an index
- self.assertIs(
- any(
- c["index"]
- for c in self.get_constraints("schema_tag").values()
- if c['columns'] == ["slug", "title"]
- ),
- True,
- )
- # Alter it back
- new_field2 = SlugField(unique=True)
- new_field2.set_attributes_from_name("slug")
- with connection.schema_editor() as editor:
- editor.alter_index_together(Tag, [("slug", "title")], [])
- # Ensure there's no index
- self.assertIs(
- any(
- c["index"]
- for c in self.get_constraints("schema_tag").values()
- if c['columns'] == ["slug", "title"]
- ),
- False,
- )
- def test_index_together_with_fk(self):
- """
- Tests removing and adding index_together constraints that include
- a foreign key.
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- # Ensure the fields are unique to begin with
- self.assertEqual(Book._meta.index_together, ())
- # Add the unique_together constraint
- with connection.schema_editor() as editor:
- editor.alter_index_together(Book, [], [['author', 'title']])
- # Alter it back
- with connection.schema_editor() as editor:
- editor.alter_index_together(Book, [['author', 'title']], [])
- def test_create_index_together(self):
- """
- Tests creating models with index_together already defined
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(TagIndexed)
- # Ensure there is an index
- self.assertIs(
- any(
- c["index"]
- for c in self.get_constraints("schema_tagindexed").values()
- if c['columns'] == ["slug", "title"]
- ),
- True,
- )
- @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields')
- def test_remove_index_together_does_not_remove_meta_indexes(self):
- with connection.schema_editor() as editor:
- editor.create_model(AuthorWithIndexedNameAndBirthday)
- # Add the custom index
- index = Index(fields=['name', 'birthday'], name='author_name_birthday_idx')
- custom_index_name = index.name
- AuthorWithIndexedNameAndBirthday._meta.indexes = [index]
- with connection.schema_editor() as editor:
- editor.add_index(AuthorWithIndexedNameAndBirthday, index)
- # Ensure the indexes exist
- constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table)
- self.assertIn(custom_index_name, constraints)
- other_constraints = [
- name for name, details in constraints.items()
- if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name
- ]
- self.assertEqual(len(other_constraints), 1)
- # Remove index together
- index_together = AuthorWithIndexedNameAndBirthday._meta.index_together
- with connection.schema_editor() as editor:
- editor.alter_index_together(AuthorWithIndexedNameAndBirthday, index_together, [])
- constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table)
- self.assertIn(custom_index_name, constraints)
- other_constraints = [
- name for name, details in constraints.items()
- if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name
- ]
- self.assertEqual(len(other_constraints), 0)
- # Re-add index together
- with connection.schema_editor() as editor:
- editor.alter_index_together(AuthorWithIndexedNameAndBirthday, [], index_together)
- constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table)
- self.assertIn(custom_index_name, constraints)
- other_constraints = [
- name for name, details in constraints.items()
- if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name
- ]
- self.assertEqual(len(other_constraints), 1)
- # Drop the index
- with connection.schema_editor() as editor:
- AuthorWithIndexedNameAndBirthday._meta.indexes = []
- editor.remove_index(AuthorWithIndexedNameAndBirthday, index)
- @isolate_apps('schema')
- def test_db_table(self):
- """
- Tests renaming of the table
- """
- class Author(Model):
- name = CharField(max_length=255)
- class Meta:
- app_label = 'schema'
- class Book(Model):
- author = ForeignKey(Author, CASCADE)
- class Meta:
- app_label = 'schema'
- # Create the table and one referring it.
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- # Ensure the table is there to begin with
- columns = self.column_classes(Author)
- self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField'])
- # Alter the table
- with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor:
- editor.alter_db_table(Author, "schema_author", "schema_otherauthor")
- Author._meta.db_table = "schema_otherauthor"
- columns = self.column_classes(Author)
- self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField'])
- # Ensure the foreign key reference was updated
- self.assertForeignKeyExists(Book, "author_id", "schema_otherauthor")
- # Alter the table again
- with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor:
- editor.alter_db_table(Author, "schema_otherauthor", "schema_author")
- # Ensure the table is still there
- Author._meta.db_table = "schema_author"
- columns = self.column_classes(Author)
- self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField'])
- def test_add_remove_index(self):
- """
- Tests index addition and removal
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Ensure the table is there and has no index
- self.assertNotIn('title', self.get_indexes(Author._meta.db_table))
- # Add the index
- index = Index(fields=['name'], name='author_title_idx')
- with connection.schema_editor() as editor:
- editor.add_index(Author, index)
- self.assertIn('name', self.get_indexes(Author._meta.db_table))
- # Drop the index
- with connection.schema_editor() as editor:
- editor.remove_index(Author, index)
- self.assertNotIn('name', self.get_indexes(Author._meta.db_table))
- def test_remove_db_index_doesnt_remove_custom_indexes(self):
- """
- Changing db_index to False doesn't remove indexes from Meta.indexes.
- """
- with connection.schema_editor() as editor:
- editor.create_model(AuthorWithIndexedName)
- # Ensure the table has its index
- self.assertIn('name', self.get_indexes(AuthorWithIndexedName._meta.db_table))
- # Add the custom index
- index = Index(fields=['-name'], name='author_name_idx')
- author_index_name = index.name
- with connection.schema_editor() as editor:
- db_index_name = editor._create_index_name(
- table_name=AuthorWithIndexedName._meta.db_table,
- column_names=('name',),
- )
- try:
- AuthorWithIndexedName._meta.indexes = [index]
- with connection.schema_editor() as editor:
- editor.add_index(AuthorWithIndexedName, index)
- old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
- self.assertIn(author_index_name, old_constraints)
- self.assertIn(db_index_name, old_constraints)
- # Change name field to db_index=False
- old_field = AuthorWithIndexedName._meta.get_field('name')
- new_field = CharField(max_length=255)
- new_field.set_attributes_from_name('name')
- with connection.schema_editor() as editor:
- editor.alter_field(AuthorWithIndexedName, old_field, new_field, strict=True)
- new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
- self.assertNotIn(db_index_name, new_constraints)
- # The index from Meta.indexes is still in the database.
- self.assertIn(author_index_name, new_constraints)
- # Drop the index
- with connection.schema_editor() as editor:
- editor.remove_index(AuthorWithIndexedName, index)
- finally:
- AuthorWithIndexedName._meta.indexes = []
- def test_order_index(self):
- """
- Indexes defined with ordering (ASC/DESC) defined on column
- """
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # The table doesn't have an index
- self.assertNotIn('title', self.get_indexes(Author._meta.db_table))
- index_name = 'author_name_idx'
- # Add the index
- index = Index(fields=['name', '-weight'], name=index_name)
- with connection.schema_editor() as editor:
- editor.add_index(Author, index)
- if connection.features.supports_index_column_ordering:
- self.assertIndexOrder(Author._meta.db_table, index_name, ['ASC', 'DESC'])
- # Drop the index
- with connection.schema_editor() as editor:
- editor.remove_index(Author, index)
- def test_indexes(self):
- """
- Tests creation/altering of indexes
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- # Ensure the table is there and has the right index
- self.assertIn(
- "title",
- self.get_indexes(Book._meta.db_table),
- )
- # Alter to remove the index
- old_field = Book._meta.get_field("title")
- new_field = CharField(max_length=100, db_index=False)
- new_field.set_attributes_from_name("title")
- with connection.schema_editor() as editor:
- editor.alter_field(Book, old_field, new_field, strict=True)
- # Ensure the table is there and has no index
- self.assertNotIn(
- "title",
- self.get_indexes(Book._meta.db_table),
- )
- # Alter to re-add the index
- new_field2 = Book._meta.get_field("title")
- with connection.schema_editor() as editor:
- editor.alter_field(Book, new_field, new_field2, strict=True)
- # Ensure the table is there and has the index again
- self.assertIn(
- "title",
- self.get_indexes(Book._meta.db_table),
- )
- # Add a unique column, verify that creates an implicit index
- new_field3 = BookWithSlug._meta.get_field("slug")
- with connection.schema_editor() as editor:
- editor.add_field(Book, new_field3)
- self.assertIn(
- "slug",
- self.get_uniques(Book._meta.db_table),
- )
- # Remove the unique, check the index goes with it
- new_field4 = CharField(max_length=20, unique=False)
- new_field4.set_attributes_from_name("slug")
- with connection.schema_editor() as editor:
- editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True)
- self.assertNotIn(
- "slug",
- self.get_uniques(Book._meta.db_table),
- )
- def test_text_field_with_db_index(self):
- with connection.schema_editor() as editor:
- editor.create_model(AuthorTextFieldWithIndex)
- # The text_field index is present if the database supports it.
- assertion = self.assertIn if connection.features.supports_index_on_text_field else self.assertNotIn
- assertion('text_field', self.get_indexes(AuthorTextFieldWithIndex._meta.db_table))
- def _index_expressions_wrappers(self):
- index_expression = IndexExpression()
- index_expression.set_wrapper_classes(connection)
- return ', '.join([
- wrapper_cls.__qualname__ for wrapper_cls in index_expression.wrapper_classes
- ])
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_index_multiple_wrapper_references(self):
- index = Index(OrderBy(F('name').desc(), descending=True), name='name')
- msg = (
- "Multiple references to %s can't be used in an indexed expression."
- % self._index_expressions_wrappers()
- )
- with connection.schema_editor() as editor:
- with self.assertRaisesMessage(ValueError, msg):
- editor.add_index(Author, index)
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_index_invalid_topmost_expressions(self):
- index = Index(Upper(F('name').desc()), name='name')
- msg = (
- '%s must be topmost expressions in an indexed expression.'
- % self._index_expressions_wrappers()
- )
- with connection.schema_editor() as editor:
- with self.assertRaisesMessage(ValueError, msg):
- editor.add_index(Author, index)
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_index(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- index = Index(Lower('name').desc(), name='func_lower_idx')
- # Add index.
- with connection.schema_editor() as editor:
- editor.add_index(Author, index)
- sql = index.create_sql(Author, editor)
- table = Author._meta.db_table
- if connection.features.supports_index_column_ordering:
- self.assertIndexOrder(table, index.name, ['DESC'])
- # SQL contains a database function.
- self.assertIs(sql.references_column(table, 'name'), True)
- self.assertIn('LOWER(%s)' % editor.quote_name('name'), str(sql))
- # Remove index.
- with connection.schema_editor() as editor:
- editor.remove_index(Author, index)
- self.assertNotIn(index.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_index_f(self):
- with connection.schema_editor() as editor:
- editor.create_model(Tag)
- index = Index('slug', F('title').desc(), name='func_f_idx')
- # Add index.
- with connection.schema_editor() as editor:
- editor.add_index(Tag, index)
- sql = index.create_sql(Tag, editor)
- table = Tag._meta.db_table
- self.assertIn(index.name, self.get_constraints(table))
- if connection.features.supports_index_column_ordering:
- self.assertIndexOrder(Tag._meta.db_table, index.name, ['ASC', 'DESC'])
- # SQL contains columns.
- self.assertIs(sql.references_column(table, 'slug'), True)
- self.assertIs(sql.references_column(table, 'title'), True)
- # Remove index.
- with connection.schema_editor() as editor:
- editor.remove_index(Tag, index)
- self.assertNotIn(index.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_index_lookups(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs):
- index = Index(
- F('name__lower'),
- F('weight__abs'),
- name='func_lower_abs_lookup_idx',
- )
- # Add index.
- with connection.schema_editor() as editor:
- editor.add_index(Author, index)
- sql = index.create_sql(Author, editor)
- table = Author._meta.db_table
- self.assertIn(index.name, self.get_constraints(table))
- # SQL contains columns.
- self.assertIs(sql.references_column(table, 'name'), True)
- self.assertIs(sql.references_column(table, 'weight'), True)
- # Remove index.
- with connection.schema_editor() as editor:
- editor.remove_index(Author, index)
- self.assertNotIn(index.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_composite_func_index(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- index = Index(Lower('name'), Upper('name'), name='func_lower_upper_idx')
- # Add index.
- with connection.schema_editor() as editor:
- editor.add_index(Author, index)
- sql = index.create_sql(Author, editor)
- table = Author._meta.db_table
- self.assertIn(index.name, self.get_constraints(table))
- # SQL contains database functions.
- self.assertIs(sql.references_column(table, 'name'), True)
- sql = str(sql)
- self.assertIn('LOWER(%s)' % editor.quote_name('name'), sql)
- self.assertIn('UPPER(%s)' % editor.quote_name('name'), sql)
- self.assertLess(sql.index('LOWER'), sql.index('UPPER'))
- # Remove index.
- with connection.schema_editor() as editor:
- editor.remove_index(Author, index)
- self.assertNotIn(index.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_composite_func_index_field_and_expression(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- index = Index(
- F('author').desc(),
- Lower('title').asc(),
- 'pub_date',
- name='func_f_lower_field_idx',
- )
- # Add index.
- with connection.schema_editor() as editor:
- editor.add_index(Book, index)
- sql = index.create_sql(Book, editor)
- table = Book._meta.db_table
- constraints = self.get_constraints(table)
- if connection.features.supports_index_column_ordering:
- self.assertIndexOrder(table, index.name, ['DESC', 'ASC', 'ASC'])
- self.assertEqual(len(constraints[index.name]['columns']), 3)
- self.assertEqual(constraints[index.name]['columns'][2], 'pub_date')
- # SQL contains database functions and columns.
- self.assertIs(sql.references_column(table, 'author_id'), True)
- self.assertIs(sql.references_column(table, 'title'), True)
- self.assertIs(sql.references_column(table, 'pub_date'), True)
- self.assertIn('LOWER(%s)' % editor.quote_name('title'), str(sql))
- # Remove index.
- with connection.schema_editor() as editor:
- editor.remove_index(Book, index)
- self.assertNotIn(index.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- @isolate_apps('schema')
- def test_func_index_f_decimalfield(self):
- class Node(Model):
- value = DecimalField(max_digits=5, decimal_places=2)
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(Node)
- index = Index(F('value'), name='func_f_decimalfield_idx')
- # Add index.
- with connection.schema_editor() as editor:
- editor.add_index(Node, index)
- sql = index.create_sql(Node, editor)
- table = Node._meta.db_table
- self.assertIn(index.name, self.get_constraints(table))
- self.assertIs(sql.references_column(table, 'value'), True)
- # SQL doesn't contain casting.
- self.assertNotIn('CAST', str(sql))
- # Remove index.
- with connection.schema_editor() as editor:
- editor.remove_index(Node, index)
- self.assertNotIn(index.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_index_cast(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- index = Index(Cast('weight', FloatField()), name='func_cast_idx')
- # Add index.
- with connection.schema_editor() as editor:
- editor.add_index(Author, index)
- sql = index.create_sql(Author, editor)
- table = Author._meta.db_table
- self.assertIn(index.name, self.get_constraints(table))
- self.assertIs(sql.references_column(table, 'weight'), True)
- # Remove index.
- with connection.schema_editor() as editor:
- editor.remove_index(Author, index)
- self.assertNotIn(index.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_index_collate(self):
- collation = connection.features.test_collations.get('non_default')
- if not collation:
- self.skipTest(
- 'This backend does not support case-insensitive collations.'
- )
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(BookWithSlug)
- index = Index(
- Collate(F('title'), collation=collation).desc(),
- Collate('slug', collation=collation),
- name='func_collate_idx',
- )
- # Add index.
- with connection.schema_editor() as editor:
- editor.add_index(BookWithSlug, index)
- sql = index.create_sql(BookWithSlug, editor)
- table = Book._meta.db_table
- self.assertIn(index.name, self.get_constraints(table))
- if connection.features.supports_index_column_ordering:
- self.assertIndexOrder(table, index.name, ['DESC', 'ASC'])
- # SQL contains columns and a collation.
- self.assertIs(sql.references_column(table, 'title'), True)
- self.assertIs(sql.references_column(table, 'slug'), True)
- self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql))
- # Remove index.
- with connection.schema_editor() as editor:
- editor.remove_index(Book, index)
- self.assertNotIn(index.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- @skipIfDBFeature('collate_as_index_expression')
- def test_func_index_collate_f_ordered(self):
- collation = connection.features.test_collations.get('non_default')
- if not collation:
- self.skipTest(
- 'This backend does not support case-insensitive collations.'
- )
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- index = Index(
- Collate(F('name').desc(), collation=collation),
- name='func_collate_f_desc_idx',
- )
- # Add index.
- with connection.schema_editor() as editor:
- editor.add_index(Author, index)
- sql = index.create_sql(Author, editor)
- table = Author._meta.db_table
- self.assertIn(index.name, self.get_constraints(table))
- if connection.features.supports_index_column_ordering:
- self.assertIndexOrder(table, index.name, ['DESC'])
- # SQL contains columns and a collation.
- self.assertIs(sql.references_column(table, 'name'), True)
- self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql))
- # Remove index.
- with connection.schema_editor() as editor:
- editor.remove_index(Author, index)
- self.assertNotIn(index.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_index_calc(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- index = Index(F('height') / (F('weight') + Value(5)), name='func_calc_idx')
- # Add index.
- with connection.schema_editor() as editor:
- editor.add_index(Author, index)
- sql = index.create_sql(Author, editor)
- table = Author._meta.db_table
- self.assertIn(index.name, self.get_constraints(table))
- # SQL contains columns and expressions.
- self.assertIs(sql.references_column(table, 'height'), True)
- self.assertIs(sql.references_column(table, 'weight'), True)
- sql = str(sql)
- self.assertIs(
- sql.index(editor.quote_name('height')) <
- sql.index('/') <
- sql.index(editor.quote_name('weight')) <
- sql.index('+') <
- sql.index('5'),
- True,
- )
- # Remove index.
- with connection.schema_editor() as editor:
- editor.remove_index(Author, index)
- self.assertNotIn(index.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes', 'supports_json_field')
- @isolate_apps('schema')
- def test_func_index_json_key_transform(self):
- class JSONModel(Model):
- field = JSONField()
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(JSONModel)
- self.isolated_local_models = [JSONModel]
- index = Index('field__some_key', name='func_json_key_idx')
- with connection.schema_editor() as editor:
- editor.add_index(JSONModel, index)
- sql = index.create_sql(JSONModel, editor)
- table = JSONModel._meta.db_table
- self.assertIn(index.name, self.get_constraints(table))
- self.assertIs(sql.references_column(table, 'field'), True)
- with connection.schema_editor() as editor:
- editor.remove_index(JSONModel, index)
- self.assertNotIn(index.name, self.get_constraints(table))
- @skipUnlessDBFeature('supports_expression_indexes', 'supports_json_field')
- @isolate_apps('schema')
- def test_func_index_json_key_transform_cast(self):
- class JSONModel(Model):
- field = JSONField()
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(JSONModel)
- self.isolated_local_models = [JSONModel]
- index = Index(
- Cast(KeyTextTransform('some_key', 'field'), IntegerField()),
- name='func_json_key_cast_idx',
- )
- with connection.schema_editor() as editor:
- editor.add_index(JSONModel, index)
- sql = index.create_sql(JSONModel, editor)
- table = JSONModel._meta.db_table
- self.assertIn(index.name, self.get_constraints(table))
- self.assertIs(sql.references_column(table, 'field'), True)
- with connection.schema_editor() as editor:
- editor.remove_index(JSONModel, index)
- self.assertNotIn(index.name, self.get_constraints(table))
- @skipIfDBFeature('supports_expression_indexes')
- def test_func_index_unsupported(self):
- # Index is ignored on databases that don't support indexes on
- # expressions.
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- index = Index(F('name'), name='random_idx')
- with connection.schema_editor() as editor, self.assertNumQueries(0):
- self.assertIsNone(editor.add_index(Author, index))
- self.assertIsNone(editor.remove_index(Author, index))
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_index_nonexistent_field(self):
- index = Index(Lower('nonexistent'), name='func_nonexistent_idx')
- msg = (
- "Cannot resolve keyword 'nonexistent' into field. Choices are: "
- "height, id, name, uuid, weight"
- )
- with self.assertRaisesMessage(FieldError, msg):
- with connection.schema_editor() as editor:
- editor.add_index(Author, index)
- @skipUnlessDBFeature('supports_expression_indexes')
- def test_func_index_nondeterministic(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- index = Index(Random(), name='func_random_idx')
- with connection.schema_editor() as editor:
- with self.assertRaises(DatabaseError):
- editor.add_index(Author, index)
- def test_primary_key(self):
- """
- Tests altering of the primary key
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Tag)
- # Ensure the table is there and has the right PK
- self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'id')
- # Alter to change the PK
- id_field = Tag._meta.get_field("id")
- old_field = Tag._meta.get_field("slug")
- new_field = SlugField(primary_key=True)
- new_field.set_attributes_from_name("slug")
- new_field.model = Tag
- with connection.schema_editor() as editor:
- editor.remove_field(Tag, id_field)
- editor.alter_field(Tag, old_field, new_field)
- # Ensure the PK changed
- self.assertNotIn(
- 'id',
- self.get_indexes(Tag._meta.db_table),
- )
- self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'slug')
- def test_context_manager_exit(self):
- """
- Ensures transaction is correctly closed when an error occurs
- inside a SchemaEditor context.
- """
- class SomeError(Exception):
- pass
- try:
- with connection.schema_editor():
- raise SomeError
- except SomeError:
- self.assertFalse(connection.in_atomic_block)
- @skipIfDBFeature('can_rollback_ddl')
- def test_unsupported_transactional_ddl_disallowed(self):
- message = (
- "Executing DDL statements while in a transaction on databases "
- "that can't perform a rollback is prohibited."
- )
- with atomic(), connection.schema_editor() as editor:
- with self.assertRaisesMessage(TransactionManagementError, message):
- editor.execute(editor.sql_create_table % {'table': 'foo', 'definition': ''})
- @skipUnlessDBFeature('supports_foreign_keys', 'indexes_foreign_keys')
- def test_foreign_key_index_long_names_regression(self):
- """
- Regression test for #21497.
- Only affects databases that supports foreign keys.
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(AuthorWithEvenLongerName)
- editor.create_model(BookWithLongName)
- # Find the properly shortened column name
- column_name = connection.ops.quote_name("author_foreign_key_with_really_long_field_name_id")
- column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase
- # Ensure the table is there and has an index on the column
- self.assertIn(
- column_name,
- self.get_indexes(BookWithLongName._meta.db_table),
- )
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_add_foreign_key_long_names(self):
- """
- Regression test for #23009.
- Only affects databases that supports foreign keys.
- """
- # Create the initial tables
- with connection.schema_editor() as editor:
- editor.create_model(AuthorWithEvenLongerName)
- editor.create_model(BookWithLongName)
- # Add a second FK, this would fail due to long ref name before the fix
- new_field = ForeignKey(AuthorWithEvenLongerName, CASCADE, related_name="something")
- new_field.set_attributes_from_name("author_other_really_long_named_i_mean_so_long_fk")
- with connection.schema_editor() as editor:
- editor.add_field(BookWithLongName, new_field)
- @isolate_apps('schema')
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_add_foreign_key_quoted_db_table(self):
- class Author(Model):
- class Meta:
- db_table = '"table_author_double_quoted"'
- app_label = 'schema'
- class Book(Model):
- author = ForeignKey(Author, CASCADE)
- class Meta:
- app_label = 'schema'
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- if connection.vendor == 'mysql':
- self.assertForeignKeyExists(Book, 'author_id', '"table_author_double_quoted"')
- else:
- self.assertForeignKeyExists(Book, 'author_id', 'table_author_double_quoted')
- def test_add_foreign_object(self):
- with connection.schema_editor() as editor:
- editor.create_model(BookForeignObj)
- new_field = ForeignObject(Author, on_delete=CASCADE, from_fields=['author_id'], to_fields=['id'])
- new_field.set_attributes_from_name('author')
- with connection.schema_editor() as editor:
- editor.add_field(BookForeignObj, new_field)
- def test_creation_deletion_reserved_names(self):
- """
- Tries creating a model's table, and then deleting it when it has a
- SQL reserved name.
- """
- # Create the table
- with connection.schema_editor() as editor:
- try:
- editor.create_model(Thing)
- except OperationalError as e:
- self.fail("Errors when applying initial migration for a model "
- "with a table named after an SQL reserved word: %s" % e)
- # The table is there
- list(Thing.objects.all())
- # Clean up that table
- with connection.schema_editor() as editor:
- editor.delete_model(Thing)
- # The table is gone
- with self.assertRaises(DatabaseError):
- list(Thing.objects.all())
- def test_remove_constraints_capital_letters(self):
- """
- #23065 - Constraint names must be quoted if they contain capital letters.
- """
- def get_field(*args, field_class=IntegerField, **kwargs):
- kwargs['db_column'] = "CamelCase"
- field = field_class(*args, **kwargs)
- field.set_attributes_from_name("CamelCase")
- return field
- model = Author
- field = get_field()
- table = model._meta.db_table
- column = field.column
- identifier_converter = connection.introspection.identifier_converter
- with connection.schema_editor() as editor:
- editor.create_model(model)
- editor.add_field(model, field)
- constraint_name = 'CamelCaseIndex'
- expected_constraint_name = identifier_converter(constraint_name)
- editor.execute(
- editor.sql_create_index % {
- "table": editor.quote_name(table),
- "name": editor.quote_name(constraint_name),
- "using": "",
- "columns": editor.quote_name(column),
- "extra": "",
- "condition": "",
- "include": "",
- }
- )
- self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
- editor.alter_field(model, get_field(db_index=True), field, strict=True)
- self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
- constraint_name = 'CamelCaseUniqConstraint'
- expected_constraint_name = identifier_converter(constraint_name)
- editor.execute(editor._create_unique_sql(model, [field], constraint_name))
- self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
- editor.alter_field(model, get_field(unique=True), field, strict=True)
- self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
- if editor.sql_create_fk:
- constraint_name = 'CamelCaseFKConstraint'
- expected_constraint_name = identifier_converter(constraint_name)
- editor.execute(
- editor.sql_create_fk % {
- "table": editor.quote_name(table),
- "name": editor.quote_name(constraint_name),
- "column": editor.quote_name(column),
- "to_table": editor.quote_name(table),
- "to_column": editor.quote_name(model._meta.auto_field.column),
- "deferrable": connection.ops.deferrable_sql(),
- }
- )
- self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
- editor.alter_field(model, get_field(Author, CASCADE, field_class=ForeignKey), field, strict=True)
- self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
- def test_add_field_use_effective_default(self):
- """
- #23987 - effective_default() should be used as the field default when
- adding a new field.
- """
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Ensure there's no surname field
- columns = self.column_classes(Author)
- self.assertNotIn("surname", columns)
- # Create a row
- Author.objects.create(name='Anonymous1')
- # Add new CharField to ensure default will be used from effective_default
- new_field = CharField(max_length=15, blank=True)
- new_field.set_attributes_from_name("surname")
- with connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- # Ensure field was added with the right default
- with connection.cursor() as cursor:
- cursor.execute("SELECT surname FROM schema_author;")
- item = cursor.fetchall()[0]
- self.assertEqual(item[0], None if connection.features.interprets_empty_strings_as_nulls else '')
- def test_add_field_default_dropped(self):
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Ensure there's no surname field
- columns = self.column_classes(Author)
- self.assertNotIn("surname", columns)
- # Create a row
- Author.objects.create(name='Anonymous1')
- # Add new CharField with a default
- new_field = CharField(max_length=15, blank=True, default='surname default')
- new_field.set_attributes_from_name("surname")
- with connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- # Ensure field was added with the right default
- with connection.cursor() as cursor:
- cursor.execute("SELECT surname FROM schema_author;")
- item = cursor.fetchall()[0]
- self.assertEqual(item[0], 'surname default')
- # And that the default is no longer set in the database.
- field = next(
- f for f in connection.introspection.get_table_description(cursor, "schema_author")
- if f.name == "surname"
- )
- if connection.features.can_introspect_default:
- self.assertIsNone(field.default)
- def test_add_field_default_nullable(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Add new nullable CharField with a default.
- new_field = CharField(max_length=15, blank=True, null=True, default='surname')
- new_field.set_attributes_from_name('surname')
- with connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- Author.objects.create(name='Anonymous1')
- with connection.cursor() as cursor:
- cursor.execute('SELECT surname FROM schema_author;')
- item = cursor.fetchall()[0]
- self.assertIsNone(item[0])
- field = next(
- f
- for f in connection.introspection.get_table_description(
- cursor,
- 'schema_author',
- )
- if f.name == 'surname'
- )
- # Field is still nullable.
- self.assertTrue(field.null_ok)
- # The database default is no longer set.
- if connection.features.can_introspect_default:
- self.assertIn(field.default, ['NULL', None])
- def test_add_textfield_default_nullable(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Add new nullable TextField with a default.
- new_field = TextField(blank=True, null=True, default='text')
- new_field.set_attributes_from_name('description')
- with connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- Author.objects.create(name='Anonymous1')
- with connection.cursor() as cursor:
- cursor.execute('SELECT description FROM schema_author;')
- item = cursor.fetchall()[0]
- self.assertIsNone(item[0])
- field = next(
- f
- for f in connection.introspection.get_table_description(
- cursor,
- 'schema_author',
- )
- if f.name == 'description'
- )
- # Field is still nullable.
- self.assertTrue(field.null_ok)
- # The database default is no longer set.
- if connection.features.can_introspect_default:
- self.assertIn(field.default, ['NULL', None])
- def test_alter_field_default_dropped(self):
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Create a row
- Author.objects.create(name='Anonymous1')
- self.assertIsNone(Author.objects.get().height)
- old_field = Author._meta.get_field('height')
- # The default from the new field is used in updating existing rows.
- new_field = IntegerField(blank=True, default=42)
- new_field.set_attributes_from_name('height')
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- self.assertEqual(Author.objects.get().height, 42)
- # The database default should be removed.
- with connection.cursor() as cursor:
- field = next(
- f for f in connection.introspection.get_table_description(cursor, "schema_author")
- if f.name == "height"
- )
- if connection.features.can_introspect_default:
- self.assertIsNone(field.default)
- def test_alter_field_default_doesnt_perform_queries(self):
- """
- No queries are performed if a field default changes and the field's
- not changing from null to non-null.
- """
- with connection.schema_editor() as editor:
- editor.create_model(AuthorWithDefaultHeight)
- old_field = AuthorWithDefaultHeight._meta.get_field('height')
- new_default = old_field.default * 2
- new_field = PositiveIntegerField(null=True, blank=True, default=new_default)
- new_field.set_attributes_from_name('height')
- with connection.schema_editor() as editor, self.assertNumQueries(0):
- editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True)
- @skipUnlessDBFeature('supports_foreign_keys')
- def test_alter_field_fk_attributes_noop(self):
- """
- No queries are performed when changing field attributes that don't
- affect the schema.
- """
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- old_field = Book._meta.get_field('author')
- new_field = ForeignKey(
- Author,
- blank=True,
- editable=False,
- error_messages={'invalid': 'error message'},
- help_text='help text',
- limit_choices_to={'limit': 'choice'},
- on_delete=PROTECT,
- related_name='related_name',
- related_query_name='related_query_name',
- validators=[lambda x: x],
- verbose_name='verbose name',
- )
- new_field.set_attributes_from_name('author')
- with connection.schema_editor() as editor, self.assertNumQueries(0):
- editor.alter_field(Book, old_field, new_field, strict=True)
- with connection.schema_editor() as editor, self.assertNumQueries(0):
- editor.alter_field(Book, new_field, old_field, strict=True)
- def test_add_textfield_unhashable_default(self):
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Create a row
- Author.objects.create(name='Anonymous1')
- # Create a field that has an unhashable default
- new_field = TextField(default={})
- new_field.set_attributes_from_name("info")
- with connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
- def test_add_indexed_charfield(self):
- field = CharField(max_length=255, db_index=True)
- field.set_attributes_from_name('nom_de_plume')
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.add_field(Author, field)
- # Should create two indexes; one for like operator.
- self.assertEqual(
- self.get_constraints_for_column(Author, 'nom_de_plume'),
- ['schema_author_nom_de_plume_7570a851', 'schema_author_nom_de_plume_7570a851_like'],
- )
- @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
- def test_add_unique_charfield(self):
- field = CharField(max_length=255, unique=True)
- field.set_attributes_from_name('nom_de_plume')
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.add_field(Author, field)
- # Should create two indexes; one for like operator.
- self.assertEqual(
- self.get_constraints_for_column(Author, 'nom_de_plume'),
- ['schema_author_nom_de_plume_7570a851_like', 'schema_author_nom_de_plume_key']
- )
- @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
- def test_alter_field_add_index_to_charfield(self):
- # Create the table and verify no initial indexes.
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
- # Alter to add db_index=True and create 2 indexes.
- old_field = Author._meta.get_field('name')
- new_field = CharField(max_length=255, db_index=True)
- new_field.set_attributes_from_name('name')
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- self.assertEqual(
- self.get_constraints_for_column(Author, 'name'),
- ['schema_author_name_1fbc5617', 'schema_author_name_1fbc5617_like']
- )
- # Remove db_index=True to drop both indexes.
- with connection.schema_editor() as editor:
- editor.alter_field(Author, new_field, old_field, strict=True)
- self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
- @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
- def test_alter_field_add_unique_to_charfield(self):
- # Create the table and verify no initial indexes.
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
- # Alter to add unique=True and create 2 indexes.
- old_field = Author._meta.get_field('name')
- new_field = CharField(max_length=255, unique=True)
- new_field.set_attributes_from_name('name')
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- self.assertEqual(
- self.get_constraints_for_column(Author, 'name'),
- ['schema_author_name_1fbc5617_like', 'schema_author_name_1fbc5617_uniq']
- )
- # Remove unique=True to drop both indexes.
- with connection.schema_editor() as editor:
- editor.alter_field(Author, new_field, old_field, strict=True)
- self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
- @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
- def test_alter_field_add_index_to_textfield(self):
- # Create the table and verify no initial indexes.
- with connection.schema_editor() as editor:
- editor.create_model(Note)
- self.assertEqual(self.get_constraints_for_column(Note, 'info'), [])
- # Alter to add db_index=True and create 2 indexes.
- old_field = Note._meta.get_field('info')
- new_field = TextField(db_index=True)
- new_field.set_attributes_from_name('info')
- with connection.schema_editor() as editor:
- editor.alter_field(Note, old_field, new_field, strict=True)
- self.assertEqual(
- self.get_constraints_for_column(Note, 'info'),
- ['schema_note_info_4b0ea695', 'schema_note_info_4b0ea695_like']
- )
- # Remove db_index=True to drop both indexes.
- with connection.schema_editor() as editor:
- editor.alter_field(Note, new_field, old_field, strict=True)
- self.assertEqual(self.get_constraints_for_column(Note, 'info'), [])
- @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
- def test_alter_field_add_unique_to_charfield_with_db_index(self):
- # Create the table and verify initial indexes.
- with connection.schema_editor() as editor:
- editor.create_model(BookWithoutAuthor)
- self.assertEqual(
- self.get_constraints_for_column(BookWithoutAuthor, 'title'),
- ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
- )
- # Alter to add unique=True (should replace the index)
- old_field = BookWithoutAuthor._meta.get_field('title')
- new_field = CharField(max_length=100, db_index=True, unique=True)
- new_field.set_attributes_from_name('title')
- with connection.schema_editor() as editor:
- editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
- self.assertEqual(
- self.get_constraints_for_column(BookWithoutAuthor, 'title'),
- ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq']
- )
- # Alter to remove unique=True (should drop unique index)
- new_field2 = CharField(max_length=100, db_index=True)
- new_field2.set_attributes_from_name('title')
- with connection.schema_editor() as editor:
- editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
- self.assertEqual(
- self.get_constraints_for_column(BookWithoutAuthor, 'title'),
- ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
- )
- @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
- def test_alter_field_remove_unique_and_db_index_from_charfield(self):
- # Create the table and verify initial indexes.
- with connection.schema_editor() as editor:
- editor.create_model(BookWithoutAuthor)
- self.assertEqual(
- self.get_constraints_for_column(BookWithoutAuthor, 'title'),
- ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
- )
- # Alter to add unique=True (should replace the index)
- old_field = BookWithoutAuthor._meta.get_field('title')
- new_field = CharField(max_length=100, db_index=True, unique=True)
- new_field.set_attributes_from_name('title')
- with connection.schema_editor() as editor:
- editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
- self.assertEqual(
- self.get_constraints_for_column(BookWithoutAuthor, 'title'),
- ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq']
- )
- # Alter to remove both unique=True and db_index=True (should drop all indexes)
- new_field2 = CharField(max_length=100)
- new_field2.set_attributes_from_name('title')
- with connection.schema_editor() as editor:
- editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
- self.assertEqual(self.get_constraints_for_column(BookWithoutAuthor, 'title'), [])
- @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
- def test_alter_field_swap_unique_and_db_index_with_charfield(self):
- # Create the table and verify initial indexes.
- with connection.schema_editor() as editor:
- editor.create_model(BookWithoutAuthor)
- self.assertEqual(
- self.get_constraints_for_column(BookWithoutAuthor, 'title'),
- ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
- )
- # Alter to set unique=True and remove db_index=True (should replace the index)
- old_field = BookWithoutAuthor._meta.get_field('title')
- new_field = CharField(max_length=100, unique=True)
- new_field.set_attributes_from_name('title')
- with connection.schema_editor() as editor:
- editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
- self.assertEqual(
- self.get_constraints_for_column(BookWithoutAuthor, 'title'),
- ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq']
- )
- # Alter to set db_index=True and remove unique=True (should restore index)
- new_field2 = CharField(max_length=100, db_index=True)
- new_field2.set_attributes_from_name('title')
- with connection.schema_editor() as editor:
- editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
- self.assertEqual(
- self.get_constraints_for_column(BookWithoutAuthor, 'title'),
- ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
- )
- @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
- def test_alter_field_add_db_index_to_charfield_with_unique(self):
- # Create the table and verify initial indexes.
- with connection.schema_editor() as editor:
- editor.create_model(Tag)
- self.assertEqual(
- self.get_constraints_for_column(Tag, 'slug'),
- ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key']
- )
- # Alter to add db_index=True
- old_field = Tag._meta.get_field('slug')
- new_field = SlugField(db_index=True, unique=True)
- new_field.set_attributes_from_name('slug')
- with connection.schema_editor() as editor:
- editor.alter_field(Tag, old_field, new_field, strict=True)
- self.assertEqual(
- self.get_constraints_for_column(Tag, 'slug'),
- ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key']
- )
- # Alter to remove db_index=True
- new_field2 = SlugField(unique=True)
- new_field2.set_attributes_from_name('slug')
- with connection.schema_editor() as editor:
- editor.alter_field(Tag, new_field, new_field2, strict=True)
- self.assertEqual(
- self.get_constraints_for_column(Tag, 'slug'),
- ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key']
- )
- def test_alter_field_add_index_to_integerfield(self):
- # Create the table and verify no initial indexes.
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- self.assertEqual(self.get_constraints_for_column(Author, 'weight'), [])
- # Alter to add db_index=True and create index.
- old_field = Author._meta.get_field('weight')
- new_field = IntegerField(null=True, db_index=True)
- new_field.set_attributes_from_name('weight')
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- self.assertEqual(self.get_constraints_for_column(Author, 'weight'), ['schema_author_weight_587740f9'])
- # Remove db_index=True to drop index.
- with connection.schema_editor() as editor:
- editor.alter_field(Author, new_field, old_field, strict=True)
- self.assertEqual(self.get_constraints_for_column(Author, 'weight'), [])
- def test_alter_pk_with_self_referential_field(self):
- """
- Changing the primary key field name of a model with a self-referential
- foreign key (#26384).
- """
- with connection.schema_editor() as editor:
- editor.create_model(Node)
- old_field = Node._meta.get_field('node_id')
- new_field = AutoField(primary_key=True)
- new_field.set_attributes_from_name('id')
- with connection.schema_editor() as editor:
- editor.alter_field(Node, old_field, new_field, strict=True)
- self.assertForeignKeyExists(Node, 'parent_id', Node._meta.db_table)
- @mock.patch('django.db.backends.base.schema.datetime')
- @mock.patch('django.db.backends.base.schema.timezone')
- def test_add_datefield_and_datetimefield_use_effective_default(self, mocked_datetime, mocked_tz):
- """
- effective_default() should be used for DateField, DateTimeField, and
- TimeField if auto_now or auto_now_add is set (#25005).
- """
- now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1)
- now_tz = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1, tzinfo=timezone.utc)
- mocked_datetime.now = mock.MagicMock(return_value=now)
- mocked_tz.now = mock.MagicMock(return_value=now_tz)
- # Create the table
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Check auto_now/auto_now_add attributes are not defined
- columns = self.column_classes(Author)
- self.assertNotIn("dob_auto_now", columns)
- self.assertNotIn("dob_auto_now_add", columns)
- self.assertNotIn("dtob_auto_now", columns)
- self.assertNotIn("dtob_auto_now_add", columns)
- self.assertNotIn("tob_auto_now", columns)
- self.assertNotIn("tob_auto_now_add", columns)
- # Create a row
- Author.objects.create(name='Anonymous1')
- # Ensure fields were added with the correct defaults
- dob_auto_now = DateField(auto_now=True)
- dob_auto_now.set_attributes_from_name('dob_auto_now')
- self.check_added_field_default(
- editor, Author, dob_auto_now, 'dob_auto_now', now.date(),
- cast_function=lambda x: x.date(),
- )
- dob_auto_now_add = DateField(auto_now_add=True)
- dob_auto_now_add.set_attributes_from_name('dob_auto_now_add')
- self.check_added_field_default(
- editor, Author, dob_auto_now_add, 'dob_auto_now_add', now.date(),
- cast_function=lambda x: x.date(),
- )
- dtob_auto_now = DateTimeField(auto_now=True)
- dtob_auto_now.set_attributes_from_name('dtob_auto_now')
- self.check_added_field_default(
- editor, Author, dtob_auto_now, 'dtob_auto_now', now,
- )
- dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True)
- dt_tm_of_birth_auto_now_add.set_attributes_from_name('dtob_auto_now_add')
- self.check_added_field_default(
- editor, Author, dt_tm_of_birth_auto_now_add, 'dtob_auto_now_add', now,
- )
- tob_auto_now = TimeField(auto_now=True)
- tob_auto_now.set_attributes_from_name('tob_auto_now')
- self.check_added_field_default(
- editor, Author, tob_auto_now, 'tob_auto_now', now.time(),
- cast_function=lambda x: x.time(),
- )
- tob_auto_now_add = TimeField(auto_now_add=True)
- tob_auto_now_add.set_attributes_from_name('tob_auto_now_add')
- self.check_added_field_default(
- editor, Author, tob_auto_now_add, 'tob_auto_now_add', now.time(),
- cast_function=lambda x: x.time(),
- )
- def test_namespaced_db_table_create_index_name(self):
- """
- Table names are stripped of their namespace/schema before being used to
- generate index names.
- """
- with connection.schema_editor() as editor:
- max_name_length = connection.ops.max_name_length() or 200
- namespace = 'n' * max_name_length
- table_name = 't' * max_name_length
- namespaced_table_name = '"%s"."%s"' % (namespace, table_name)
- self.assertEqual(
- editor._create_index_name(table_name, []),
- editor._create_index_name(namespaced_table_name, []),
- )
- @unittest.skipUnless(connection.vendor == 'oracle', 'Oracle specific db_table syntax')
- def test_creation_with_db_table_double_quotes(self):
- oracle_user = connection.creation._test_database_user()
- class Student(Model):
- name = CharField(max_length=30)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user
- class Document(Model):
- name = CharField(max_length=30)
- students = ManyToManyField(Student)
- class Meta:
- app_label = 'schema'
- apps = new_apps
- db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user
- self.local_models = [Student, Document]
- with connection.schema_editor() as editor:
- editor.create_model(Student)
- editor.create_model(Document)
- doc = Document.objects.create(name='Test Name')
- student = Student.objects.create(name='Some man')
- doc.students.add(student)
- @isolate_apps('schema')
- @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific db_table syntax.')
- def test_namespaced_db_table_foreign_key_reference(self):
- with connection.cursor() as cursor:
- cursor.execute('CREATE SCHEMA django_schema_tests')
- def delete_schema():
- with connection.cursor() as cursor:
- cursor.execute('DROP SCHEMA django_schema_tests CASCADE')
- self.addCleanup(delete_schema)
- class Author(Model):
- class Meta:
- app_label = 'schema'
- class Book(Model):
- class Meta:
- app_label = 'schema'
- db_table = '"django_schema_tests"."schema_book"'
- author = ForeignKey(Author, CASCADE)
- author.set_attributes_from_name('author')
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- editor.add_field(Book, author)
- def test_rename_table_renames_deferred_sql_references(self):
- atomic_rename = connection.features.supports_atomic_references_rename
- with connection.schema_editor(atomic=atomic_rename) as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- editor.alter_db_table(Author, 'schema_author', 'schema_renamed_author')
- editor.alter_db_table(Author, 'schema_book', 'schema_renamed_book')
- try:
- self.assertGreater(len(editor.deferred_sql), 0)
- for statement in editor.deferred_sql:
- self.assertIs(statement.references_table('schema_author'), False)
- self.assertIs(statement.references_table('schema_book'), False)
- finally:
- editor.alter_db_table(Author, 'schema_renamed_author', 'schema_author')
- editor.alter_db_table(Author, 'schema_renamed_book', 'schema_book')
- def test_rename_column_renames_deferred_sql_references(self):
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- editor.create_model(Book)
- old_title = Book._meta.get_field('title')
- new_title = CharField(max_length=100, db_index=True)
- new_title.set_attributes_from_name('renamed_title')
- editor.alter_field(Book, old_title, new_title)
- old_author = Book._meta.get_field('author')
- new_author = ForeignKey(Author, CASCADE)
- new_author.set_attributes_from_name('renamed_author')
- editor.alter_field(Book, old_author, new_author)
- self.assertGreater(len(editor.deferred_sql), 0)
- for statement in editor.deferred_sql:
- self.assertIs(statement.references_column('book', 'title'), False)
- self.assertIs(statement.references_column('book', 'author_id'), False)
- @isolate_apps('schema')
- def test_referenced_field_without_constraint_rename_inside_atomic_block(self):
- """
- Foreign keys without database level constraint don't prevent the field
- they reference from being renamed in an atomic block.
- """
- class Foo(Model):
- field = CharField(max_length=255, unique=True)
- class Meta:
- app_label = 'schema'
- class Bar(Model):
- foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False)
- class Meta:
- app_label = 'schema'
- self.isolated_local_models = [Foo, Bar]
- with connection.schema_editor() as editor:
- editor.create_model(Foo)
- editor.create_model(Bar)
- new_field = CharField(max_length=255, unique=True)
- new_field.set_attributes_from_name('renamed')
- with connection.schema_editor(atomic=True) as editor:
- editor.alter_field(Foo, Foo._meta.get_field('field'), new_field)
- @isolate_apps('schema')
- def test_referenced_table_without_constraint_rename_inside_atomic_block(self):
- """
- Foreign keys without database level constraint don't prevent the table
- they reference from being renamed in an atomic block.
- """
- class Foo(Model):
- field = CharField(max_length=255, unique=True)
- class Meta:
- app_label = 'schema'
- class Bar(Model):
- foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False)
- class Meta:
- app_label = 'schema'
- self.isolated_local_models = [Foo, Bar]
- with connection.schema_editor() as editor:
- editor.create_model(Foo)
- editor.create_model(Bar)
- new_field = CharField(max_length=255, unique=True)
- new_field.set_attributes_from_name('renamed')
- with connection.schema_editor(atomic=True) as editor:
- editor.alter_db_table(Foo, Foo._meta.db_table, 'renamed_table')
- Foo._meta.db_table = 'renamed_table'
- @isolate_apps('schema')
- @skipUnlessDBFeature('supports_collation_on_charfield')
- def test_db_collation_charfield(self):
- collation = connection.features.test_collations.get('non_default')
- if not collation:
- self.skipTest('Language collations are not supported.')
- class Foo(Model):
- field = CharField(max_length=255, db_collation=collation)
- class Meta:
- app_label = 'schema'
- self.isolated_local_models = [Foo]
- with connection.schema_editor() as editor:
- editor.create_model(Foo)
- self.assertEqual(
- self.get_column_collation(Foo._meta.db_table, 'field'),
- collation,
- )
- @isolate_apps('schema')
- @skipUnlessDBFeature('supports_collation_on_textfield')
- def test_db_collation_textfield(self):
- collation = connection.features.test_collations.get('non_default')
- if not collation:
- self.skipTest('Language collations are not supported.')
- class Foo(Model):
- field = TextField(db_collation=collation)
- class Meta:
- app_label = 'schema'
- self.isolated_local_models = [Foo]
- with connection.schema_editor() as editor:
- editor.create_model(Foo)
- self.assertEqual(
- self.get_column_collation(Foo._meta.db_table, 'field'),
- collation,
- )
- @skipUnlessDBFeature('supports_collation_on_charfield')
- def test_add_field_db_collation(self):
- collation = connection.features.test_collations.get('non_default')
- if not collation:
- self.skipTest('Language collations are not supported.')
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- new_field = CharField(max_length=255, db_collation=collation)
- new_field.set_attributes_from_name('alias')
- with connection.schema_editor() as editor:
- editor.add_field(Author, new_field)
- columns = self.column_classes(Author)
- self.assertEqual(
- columns['alias'][0],
- connection.features.introspected_field_types['CharField'],
- )
- self.assertEqual(columns['alias'][1][8], collation)
- @skipUnlessDBFeature('supports_collation_on_charfield')
- def test_alter_field_db_collation(self):
- collation = connection.features.test_collations.get('non_default')
- if not collation:
- self.skipTest('Language collations are not supported.')
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- old_field = Author._meta.get_field('name')
- new_field = CharField(max_length=255, db_collation=collation)
- new_field.set_attributes_from_name('name')
- new_field.model = Author
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field, strict=True)
- self.assertEqual(
- self.get_column_collation(Author._meta.db_table, 'name'),
- collation,
- )
- with connection.schema_editor() as editor:
- editor.alter_field(Author, new_field, old_field, strict=True)
- self.assertIsNone(self.get_column_collation(Author._meta.db_table, 'name'))
- @skipUnlessDBFeature('supports_collation_on_charfield')
- def test_alter_field_type_and_db_collation(self):
- collation = connection.features.test_collations.get('non_default')
- if not collation:
- self.skipTest('Language collations are not supported.')
- with connection.schema_editor() as editor:
- editor.create_model(Note)
- old_field = Note._meta.get_field('info')
- new_field = CharField(max_length=255, db_collation=collation)
- new_field.set_attributes_from_name('info')
- new_field.model = Note
- with connection.schema_editor() as editor:
- editor.alter_field(Note, old_field, new_field, strict=True)
- columns = self.column_classes(Note)
- self.assertEqual(
- columns['info'][0],
- connection.features.introspected_field_types['CharField'],
- )
- self.assertEqual(columns['info'][1][8], collation)
- with connection.schema_editor() as editor:
- editor.alter_field(Note, new_field, old_field, strict=True)
- columns = self.column_classes(Note)
- self.assertEqual(columns['info'][0], 'TextField')
- self.assertIsNone(columns['info'][1][8])
- @skipUnlessDBFeature(
- 'supports_collation_on_charfield',
- 'supports_non_deterministic_collations',
- )
- def test_ci_cs_db_collation(self):
- cs_collation = connection.features.test_collations.get('cs')
- ci_collation = connection.features.test_collations.get('ci')
- try:
- if connection.vendor == 'mysql':
- cs_collation = 'latin1_general_cs'
- elif connection.vendor == 'postgresql':
- cs_collation = 'en-x-icu'
- with connection.cursor() as cursor:
- cursor.execute(
- "CREATE COLLATION IF NOT EXISTS case_insensitive "
- "(provider = icu, locale = 'und-u-ks-level2', "
- "deterministic = false)"
- )
- ci_collation = 'case_insensitive'
- # Create the table.
- with connection.schema_editor() as editor:
- editor.create_model(Author)
- # Case-insensitive collation.
- old_field = Author._meta.get_field('name')
- new_field_ci = CharField(max_length=255, db_collation=ci_collation)
- new_field_ci.set_attributes_from_name('name')
- new_field_ci.model = Author
- with connection.schema_editor() as editor:
- editor.alter_field(Author, old_field, new_field_ci, strict=True)
- Author.objects.create(name='ANDREW')
- self.assertIs(Author.objects.filter(name='Andrew').exists(), True)
- # Case-sensitive collation.
- new_field_cs = CharField(max_length=255, db_collation=cs_collation)
- new_field_cs.set_attributes_from_name('name')
- new_field_cs.model = Author
- with connection.schema_editor() as editor:
- editor.alter_field(Author, new_field_ci, new_field_cs, strict=True)
- self.assertIs(Author.objects.filter(name='Andrew').exists(), False)
- finally:
- if connection.vendor == 'postgresql':
- with connection.cursor() as cursor:
- cursor.execute('DROP COLLATION IF EXISTS case_insensitive')
|