2012-09-24 20:23:25 +08:00
import hashlib
2013-07-02 18:51:18 +08:00
import operator
2013-07-26 23:40:27 +08:00
2012-08-02 22:08:39 +08:00
from django . db . backends . creation import BaseDatabaseCreation
2013-09-17 00:52:05 +08:00
from django . db . backends . utils import truncate_name
2012-06-19 20:25:22 +08:00
from django . db . models . fields . related import ManyToManyField
2013-05-18 17:06:30 +08:00
from django . db . transaction import atomic
2013-11-23 06:31:50 +08:00
from django . utils . encoding import force_bytes
2013-07-26 23:40:27 +08:00
from django . utils . log import getLogger
2013-07-02 18:51:18 +08:00
from django . utils . six . moves import reduce
2013-08-24 00:59:35 +08:00
from django . utils . six import callable
2012-06-19 00:32:03 +08:00
logger = getLogger ( ' django.db.backends.schema ' )
class BaseDatabaseSchemaEditor ( object ) :
"""
This class ( and its subclasses ) are responsible for emitting schema - changing
statements to the databases - model creation / removal / alteration , field
renaming , index fiddling , and so on .
It is intended to eventually completely replace DatabaseCreation .
This class should be used by creating an instance for each set of schema
changes ( e . g . a syncdb run , a migration file ) , and by first calling start ( ) ,
then the relevant actions , and then commit ( ) . This is necessary to allow
things like circular foreign key references - FKs will only be created once
commit ( ) is called .
"""
# Overrideable SQL templates
sql_create_table = " CREATE TABLE %(table)s ( %(definition)s ) "
2012-08-10 19:38:18 +08:00
sql_create_table_unique = " UNIQUE ( %(columns)s ) "
2012-06-19 00:32:03 +08:00
sql_rename_table = " ALTER TABLE %(old_table)s RENAME TO %(new_table)s "
2012-08-10 22:03:18 +08:00
sql_retablespace_table = " ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s "
2012-06-19 00:32:03 +08:00
sql_delete_table = " DROP TABLE %(table)s CASCADE "
2012-06-19 20:25:22 +08:00
sql_create_column = " ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s "
sql_alter_column = " ALTER TABLE %(table)s %(changes)s "
2012-06-19 00:32:03 +08:00
sql_alter_column_type = " ALTER COLUMN %(column)s TYPE %(type)s "
sql_alter_column_null = " ALTER COLUMN %(column)s DROP NOT NULL "
sql_alter_column_not_null = " ALTER COLUMN %(column)s SET NOT NULL "
2012-06-19 20:25:22 +08:00
sql_alter_column_default = " ALTER COLUMN %(column)s SET DEFAULT %(default)s "
sql_alter_column_no_default = " ALTER COLUMN %(column)s DROP DEFAULT "
sql_delete_column = " ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE "
sql_rename_column = " ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s "
2012-06-19 00:32:03 +08:00
2012-09-08 03:40:59 +08:00
sql_create_check = " ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK ( %(check)s ) "
2012-06-19 00:32:03 +08:00
sql_delete_check = " ALTER TABLE %(table)s DROP CONSTRAINT %(name)s "
sql_create_unique = " ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE ( %(columns)s ) "
sql_delete_unique = " ALTER TABLE %(table)s DROP CONSTRAINT %(name)s "
sql_create_fk = " ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY ( %(column)s ) REFERENCES %(to_table)s ( %(to_column)s ) DEFERRABLE INITIALLY DEFERRED "
2013-12-29 05:00:16 +08:00
sql_create_inline_fk = None
2012-06-19 00:32:03 +08:00
sql_delete_fk = " ALTER TABLE %(table)s DROP CONSTRAINT %(name)s "
2013-09-07 04:27:51 +08:00
sql_create_index = " CREATE INDEX %(name)s ON %(table)s ( %(columns)s ) %(extra)s "
2012-06-19 00:32:03 +08:00
sql_delete_index = " DROP INDEX %(name)s "
2012-09-05 00:53:31 +08:00
sql_create_pk = " ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY ( %(columns)s ) "
sql_delete_pk = " ALTER TABLE %(table)s DROP CONSTRAINT %(name)s "
2012-06-19 00:32:03 +08:00
2013-09-07 04:27:51 +08:00
def __init__ ( self , connection , collect_sql = False ) :
2012-06-19 00:32:03 +08:00
self . connection = connection
2013-09-07 04:27:51 +08:00
self . collect_sql = collect_sql
if self . collect_sql :
self . collected_sql = [ ]
2012-06-19 00:32:03 +08:00
# State-managing methods
2013-05-18 17:06:30 +08:00
def __enter__ ( self ) :
2013-05-18 17:48:46 +08:00
self . deferred_sql = [ ]
2014-04-24 03:34:57 +08:00
if self . connection . features . can_rollback_ddl :
2014-04-24 03:38:09 +08:00
self . atomic = atomic ( self . connection . alias )
self . atomic . __enter__ ( )
2013-05-18 17:06:30 +08:00
return self
def __exit__ ( self , exc_type , exc_value , traceback ) :
if exc_type is None :
2013-05-18 17:48:46 +08:00
for sql in self . deferred_sql :
self . execute ( sql )
2014-04-24 03:34:57 +08:00
if self . connection . features . can_rollback_ddl :
2014-04-24 03:38:09 +08:00
self . atomic . __exit__ ( exc_type , exc_value , traceback )
2012-06-19 00:32:03 +08:00
# Core utility functions
2013-08-10 00:42:56 +08:00
def execute ( self , sql , params = [ ] ) :
2012-06-19 00:32:03 +08:00
"""
Executes the given SQL statement , with optional parameters .
"""
# Log the command we're running, then run it
2013-07-02 18:43:44 +08:00
logger . debug ( " %s ; (params %r ) " % ( sql , params ) )
2013-09-07 04:27:51 +08:00
if self . collect_sql :
2014-02-09 20:41:55 +08:00
self . collected_sql . append ( ( sql % tuple ( map ( self . quote_value , params ) ) ) + " ; " )
2013-09-07 04:27:51 +08:00
else :
2014-01-09 23:05:15 +08:00
with self . connection . cursor ( ) as cursor :
cursor . execute ( sql , params )
2012-06-19 00:32:03 +08:00
def quote_name ( self , name ) :
return self . connection . ops . quote_name ( name )
2012-06-19 20:25:22 +08:00
# Field <-> database mapping functions
def column_sql ( self , model , field , include_default = False ) :
"""
Takes a field and returns its column definition .
The field must already have had set_attributes_from_name called .
"""
# Get the column's type and use that as the basis of the SQL
2012-09-08 03:40:59 +08:00
db_params = field . db_parameters ( connection = self . connection )
sql = db_params [ ' type ' ]
2012-06-19 20:25:22 +08:00
params = [ ]
# Check for fields that aren't actually columns (e.g. M2M)
if sql is None :
2013-11-27 20:56:33 +08:00
return None , None
2012-06-19 20:25:22 +08:00
# Work out nullability
null = field . null
2013-08-14 03:54:57 +08:00
# If we were told to include a default value, do so
default_value = self . effective_default ( field )
if include_default and default_value is not None :
2013-08-23 19:07:55 +08:00
if self . connection . features . requires_literal_defaults :
# Some databases can't take defaults as a parameter (oracle)
# If this is the case, the individual schema backend should
# implement prepare_default
sql + = " DEFAULT %s " % self . prepare_default ( default_value )
else :
sql + = " DEFAULT %s "
params + = [ default_value ]
2012-06-19 20:25:22 +08:00
# Oracle treats the empty string ('') as null, so coerce the null
# option whenever '' is a possible value.
if ( field . empty_strings_allowed and not field . primary_key and
self . connection . features . interprets_empty_strings_as_nulls ) :
null = True
if null :
sql + = " NULL "
else :
sql + = " NOT NULL "
# Primary key/unique outputs
if field . primary_key :
sql + = " PRIMARY KEY "
elif field . unique :
sql + = " UNIQUE "
2013-08-23 21:38:55 +08:00
# Optionally add the tablespace if it's an implicitly indexed column
tablespace = field . db_tablespace or model . _meta . db_tablespace
if tablespace and self . connection . features . supports_tablespaces and field . unique :
sql + = " %s " % self . connection . ops . tablespace_sql ( tablespace , inline = True )
2012-06-19 20:25:22 +08:00
# Return the sql
return sql , params
2013-08-23 19:07:55 +08:00
def prepare_default ( self , value ) :
"""
Only used for backends which have requires_literal_defaults feature
"""
2013-09-07 02:24:52 +08:00
raise NotImplementedError ( ' subclasses of BaseDatabaseSchemaEditor for backends which have requires_literal_defaults must provide a prepare_default() method ' )
2013-08-23 19:07:55 +08:00
2012-06-19 20:25:22 +08:00
def effective_default ( self , field ) :
2012-09-18 17:37:30 +08:00
"""
Returns a field ' s effective database default value
"""
2012-06-19 20:25:22 +08:00
if field . has_default ( ) :
default = field . get_default ( )
elif not field . null and field . blank and field . empty_strings_allowed :
default = " "
else :
default = None
# If it's a callable, call it
if callable ( default ) :
default = default ( )
return default
2014-02-09 20:41:55 +08:00
def quote_value ( self , value ) :
"""
Returns a quoted version of the value so it ' s safe to use in an SQL
string . This is not safe against injection from user code ; it is
intended only for use in making SQL scripts or preparing default values
for particularly tricky backends ( defaults are not user - defined , though ,
so this is safe ) .
"""
raise NotImplementedError ( )
2012-06-19 00:32:03 +08:00
# Actions
2012-09-22 08:17:08 +08:00
def create_model ( self , model ) :
2012-06-19 00:32:03 +08:00
"""
Takes a model and creates a table for it in the database .
Will also create any accompanying indexes or unique constraints .
"""
# Create column SQL, add FK deferreds if needed
column_sqls = [ ]
2012-06-19 20:25:22 +08:00
params = [ ]
2012-06-19 00:32:03 +08:00
for field in model . _meta . local_fields :
# SQL
2012-06-19 20:25:22 +08:00
definition , extra_params = self . column_sql ( model , field )
2012-06-19 00:32:03 +08:00
if definition is None :
continue
2012-09-08 03:40:59 +08:00
# Check constraints can go on the column SQL here
db_params = field . db_parameters ( connection = self . connection )
if db_params [ ' check ' ] :
definition + = " CHECK ( %s ) " % db_params [ ' check ' ]
2013-12-28 22:05:56 +08:00
# Autoincrement SQL (for backends with inline variant)
col_type_suffix = field . db_type_suffix ( connection = self . connection )
if col_type_suffix :
definition + = " %s " % col_type_suffix
2012-06-19 20:25:22 +08:00
params . extend ( extra_params )
2012-08-10 19:38:18 +08:00
# Indexes
2013-08-14 03:54:57 +08:00
if field . db_index and not field . unique :
2012-08-10 19:38:18 +08:00
self . deferred_sql . append (
self . sql_create_index % {
" name " : self . _create_index_name ( model , [ field . column ] , suffix = " " ) ,
" table " : self . quote_name ( model . _meta . db_table ) ,
" columns " : self . quote_name ( field . column ) ,
" extra " : " " ,
}
)
2012-06-19 00:32:03 +08:00
# FK
2013-12-29 05:00:16 +08:00
if field . rel :
2012-06-19 00:32:03 +08:00
to_table = field . rel . to . _meta . db_table
to_column = field . rel . to . _meta . get_field ( field . rel . field_name ) . column
2013-12-29 05:00:16 +08:00
if self . connection . features . supports_foreign_keys :
self . deferred_sql . append (
self . sql_create_fk % {
" name " : self . _create_index_name ( model , [ field . column ] , suffix = " _fk_ %s _ %s " % ( to_table , to_column ) ) ,
" table " : self . quote_name ( model . _meta . db_table ) ,
" column " : self . quote_name ( field . column ) ,
" to_table " : self . quote_name ( to_table ) ,
" to_column " : self . quote_name ( to_column ) ,
}
)
elif self . sql_create_inline_fk :
definition + = " " + self . sql_create_inline_fk % {
2012-06-19 00:32:03 +08:00
" to_table " : self . quote_name ( to_table ) ,
" to_column " : self . quote_name ( to_column ) ,
}
2013-12-29 05:00:16 +08:00
# Add the SQL to our big list
column_sqls . append ( " %s %s " % (
self . quote_name ( field . column ) ,
definition ,
) )
2013-12-28 22:05:56 +08:00
# Autoincrement SQL (for backends with post table definition variant)
2013-08-14 03:54:57 +08:00
if field . get_internal_type ( ) == " AutoField " :
autoinc_sql = self . connection . ops . autoinc_sql ( model . _meta . db_table , field . column )
if autoinc_sql :
self . deferred_sql . extend ( autoinc_sql )
2012-08-10 19:38:18 +08:00
# Add any unique_togethers
for fields in model . _meta . unique_together :
columns = [ model . _meta . get_field_by_name ( field ) [ 0 ] . column for field in fields ]
column_sqls . append ( self . sql_create_table_unique % {
" columns " : " , " . join ( self . quote_name ( column ) for column in columns ) ,
} )
2012-06-19 00:32:03 +08:00
# Make the table
sql = self . sql_create_table % {
2013-12-23 03:44:49 +08:00
" table " : self . quote_name ( model . _meta . db_table ) ,
2012-06-19 00:32:03 +08:00
" definition " : " , " . join ( column_sqls )
}
2012-06-19 20:25:22 +08:00
self . execute ( sql , params )
2013-08-11 21:23:31 +08:00
# Add any index_togethers
for fields in model . _meta . index_together :
columns = [ model . _meta . get_field_by_name ( field ) [ 0 ] . column for field in fields ]
self . execute ( self . sql_create_index % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : self . _create_index_name ( model , columns , suffix = " _idx " ) ,
" columns " : " , " . join ( self . quote_name ( column ) for column in columns ) ,
" extra " : " " ,
} )
2012-09-08 02:39:22 +08:00
# Make M2M tables
for field in model . _meta . local_many_to_many :
2014-03-09 07:57:25 +08:00
if field . rel . through . _meta . auto_created :
self . create_model ( field . rel . through )
2012-06-19 00:32:03 +08:00
2012-09-22 08:17:08 +08:00
def delete_model ( self , model ) :
2012-06-19 00:32:03 +08:00
"""
2012-06-19 20:25:22 +08:00
Deletes a model from the database .
2012-06-19 00:32:03 +08:00
"""
2014-02-18 06:51:40 +08:00
# Handle auto-created intermediary models
for field in model . _meta . local_many_to_many :
if field . rel . through . _meta . auto_created :
self . delete_model ( field . rel . through )
2012-06-19 20:25:22 +08:00
# Delete the table
2012-06-19 00:32:03 +08:00
self . execute ( self . sql_delete_table % {
" table " : self . quote_name ( model . _meta . db_table ) ,
} )
2012-06-19 20:25:22 +08:00
2012-08-10 19:38:18 +08:00
def alter_unique_together ( self , model , old_unique_together , new_unique_together ) :
"""
Deals with a model changing its unique_together .
Note : The input unique_togethers must be doubly - nested , not the single -
nested [ " foo " , " bar " ] format .
"""
2013-07-03 01:02:20 +08:00
olds = set ( tuple ( fields ) for fields in old_unique_together )
news = set ( tuple ( fields ) for fields in new_unique_together )
2012-08-10 19:38:18 +08:00
# Deleted uniques
for fields in olds . difference ( news ) :
columns = [ model . _meta . get_field_by_name ( field ) [ 0 ] . column for field in fields ]
2013-07-03 01:02:20 +08:00
constraint_names = self . _constraint_names ( model , columns , unique = True )
2012-08-10 19:38:18 +08:00
if len ( constraint_names ) != 1 :
raise ValueError ( " Found wrong number ( %s ) of constraints for %s ( %s ) " % (
len ( constraint_names ) ,
model . _meta . db_table ,
" , " . join ( columns ) ,
) )
self . execute (
self . sql_delete_unique % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : constraint_names [ 0 ] ,
} ,
)
# Created uniques
for fields in news . difference ( olds ) :
columns = [ model . _meta . get_field_by_name ( field ) [ 0 ] . column for field in fields ]
self . execute ( self . sql_create_unique % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : self . _create_index_name ( model , columns , suffix = " _uniq " ) ,
" columns " : " , " . join ( self . quote_name ( column ) for column in columns ) ,
} )
2013-07-02 18:43:44 +08:00
def alter_index_together ( self , model , old_index_together , new_index_together ) :
"""
Deals with a model changing its index_together .
Note : The input index_togethers must be doubly - nested , not the single -
nested [ " foo " , " bar " ] format .
"""
2013-07-03 01:02:20 +08:00
olds = set ( tuple ( fields ) for fields in old_index_together )
news = set ( tuple ( fields ) for fields in new_index_together )
2013-07-02 18:43:44 +08:00
# Deleted indexes
for fields in olds . difference ( news ) :
columns = [ model . _meta . get_field_by_name ( field ) [ 0 ] . column for field in fields ]
constraint_names = self . _constraint_names ( model , list ( columns ) , index = True )
if len ( constraint_names ) != 1 :
raise ValueError ( " Found wrong number ( %s ) of constraints for %s ( %s ) " % (
len ( constraint_names ) ,
model . _meta . db_table ,
" , " . join ( columns ) ,
) )
self . execute (
self . sql_delete_index % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : constraint_names [ 0 ] ,
} ,
)
# Created indexes
for fields in news . difference ( olds ) :
columns = [ model . _meta . get_field_by_name ( field ) [ 0 ] . column for field in fields ]
self . execute ( self . sql_create_index % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : self . _create_index_name ( model , columns , suffix = " _idx " ) ,
" columns " : " , " . join ( self . quote_name ( column ) for column in columns ) ,
" extra " : " " ,
} )
2012-08-10 22:03:18 +08:00
def alter_db_table ( self , model , old_db_table , new_db_table ) :
"""
Renames the table a model points to .
"""
self . execute ( self . sql_rename_table % {
" old_table " : self . quote_name ( old_db_table ) ,
" new_table " : self . quote_name ( new_db_table ) ,
} )
def alter_db_tablespace ( self , model , old_db_tablespace , new_db_tablespace ) :
"""
Moves a model ' s table between tablespaces
"""
2013-08-11 21:27:42 +08:00
self . execute ( self . sql_retablespace_table % {
2012-08-10 22:03:18 +08:00
" table " : self . quote_name ( model . _meta . db_table ) ,
" old_tablespace " : self . quote_name ( old_db_tablespace ) ,
" new_tablespace " : self . quote_name ( new_db_tablespace ) ,
} )
2013-08-11 21:28:55 +08:00
def add_field ( self , model , field ) :
2012-06-19 20:25:22 +08:00
"""
Creates a field on a model .
Usually involves adding a column , but may involve adding a
table instead ( for M2M fields )
"""
# Special-case implicit M2M tables
if isinstance ( field , ManyToManyField ) and field . rel . through . _meta . auto_created :
2012-09-22 08:17:08 +08:00
return self . create_model ( field . rel . through )
2012-06-19 20:25:22 +08:00
# Get the column's definition
definition , params = self . column_sql ( model , field , include_default = True )
# It might not actually have a column behind it
if definition is None :
return
2012-09-08 03:40:59 +08:00
# Check constraints can go on the column SQL here
db_params = field . db_parameters ( connection = self . connection )
if db_params [ ' check ' ] :
definition + = " CHECK ( %s ) " % db_params [ ' check ' ]
2012-06-19 20:25:22 +08:00
# Build the SQL and run it
sql = self . sql_create_column % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" column " : self . quote_name ( field . column ) ,
" definition " : definition ,
}
self . execute ( sql , params )
# Drop the default if we need to
# (Django usually does not use in-database defaults)
2013-08-11 21:28:55 +08:00
if field . default is not None :
2012-06-19 20:25:22 +08:00
sql = self . sql_alter_column % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" changes " : self . sql_alter_column_no_default % {
" column " : self . quote_name ( field . column ) ,
}
}
2012-09-24 19:16:16 +08:00
self . execute ( sql )
2013-08-14 03:54:57 +08:00
# Add an index, if required
if field . db_index and not field . unique :
self . deferred_sql . append (
self . sql_create_index % {
" name " : self . _create_index_name ( model , [ field . column ] , suffix = " " ) ,
" table " : self . quote_name ( model . _meta . db_table ) ,
" columns " : self . quote_name ( field . column ) ,
" extra " : " " ,
}
)
2012-06-19 20:25:22 +08:00
# Add any FK constraints later
2012-09-08 00:51:11 +08:00
if field . rel and self . connection . features . supports_foreign_keys :
2012-06-19 20:25:22 +08:00
to_table = field . rel . to . _meta . db_table
to_column = field . rel . to . _meta . get_field ( field . rel . field_name ) . column
self . deferred_sql . append (
self . sql_create_fk % {
2014-04-25 06:30:43 +08:00
" name " : self . quote_name ( ' %s _refs_ %s _ %x ' % (
2012-06-19 20:25:22 +08:00
field . column ,
to_column ,
abs ( hash ( ( model . _meta . db_table , to_table ) ) )
2014-04-25 06:30:43 +08:00
) ) ,
2012-06-19 20:25:22 +08:00
" table " : self . quote_name ( model . _meta . db_table ) ,
" column " : self . quote_name ( field . column ) ,
" to_table " : self . quote_name ( to_table ) ,
" to_column " : self . quote_name ( to_column ) ,
}
)
2013-08-23 19:07:55 +08:00
# Reset connection if required
if self . connection . features . connection_persists_old_columns :
self . connection . close ( )
2012-06-19 20:25:22 +08:00
2013-05-30 00:47:10 +08:00
def remove_field ( self , model , field ) :
2012-06-19 20:25:22 +08:00
"""
Removes a field from a model . Usually involves deleting a column ,
but for M2Ms may involve deleting a table .
"""
# Special-case implicit M2M tables
if isinstance ( field , ManyToManyField ) and field . rel . through . _meta . auto_created :
return self . delete_model ( field . rel . through )
# It might not actually have a column behind it
2012-09-24 19:19:32 +08:00
if field . db_parameters ( connection = self . connection ) [ ' type ' ] is None :
2012-06-19 20:25:22 +08:00
return
2014-04-16 22:55:34 +08:00
# Drop any FK constraints, MySQL requires explicit deletion
if field . rel :
fk_names = self . _constraint_names ( model , [ field . column ] , foreign_key = True )
for fk_name in fk_names :
self . execute (
self . sql_delete_fk % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : fk_name ,
}
)
2012-06-19 20:25:22 +08:00
# Delete the column
sql = self . sql_delete_column % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" column " : self . quote_name ( field . column ) ,
}
self . execute ( sql )
2013-08-23 19:07:55 +08:00
# Reset connection if required
if self . connection . features . connection_persists_old_columns :
self . connection . close ( )
2012-06-19 20:25:22 +08:00
2012-08-31 06:11:56 +08:00
def alter_field ( self , model , old_field , new_field , strict = False ) :
2012-06-19 20:25:22 +08:00
"""
Allows a field ' s type, uniqueness, nullability, default, column,
constraints etc . to be modified .
Requires a copy of the old field as well so we can only perform
changes that are required .
2012-08-31 06:11:56 +08:00
If strict is true , raises errors if the old column does not match old_field precisely .
2012-06-19 20:25:22 +08:00
"""
# Ensure this field is even column-based
2012-09-08 03:40:59 +08:00
old_db_params = old_field . db_parameters ( connection = self . connection )
old_type = old_db_params [ ' type ' ]
new_db_params = new_field . db_parameters ( connection = self . connection )
new_type = new_db_params [ ' type ' ]
2012-09-08 02:39:22 +08:00
if old_type is None and new_type is None and ( old_field . rel . through and new_field . rel . through and old_field . rel . through . _meta . auto_created and new_field . rel . through . _meta . auto_created ) :
return self . _alter_many_to_many ( model , old_field , new_field , strict )
2012-06-19 20:25:22 +08:00
elif old_type is None or new_type is None :
2012-09-08 02:39:22 +08:00
raise ValueError ( " Cannot alter field %s into %s - they are not compatible types (probably means only one is an M2M with implicit through model) " % (
2013-06-20 21:54:11 +08:00
old_field ,
new_field ,
) )
2012-08-02 22:08:39 +08:00
# Has unique been removed?
2013-08-14 03:54:57 +08:00
if old_field . unique and ( not new_field . unique or ( not old_field . primary_key and new_field . primary_key ) ) :
2012-08-02 22:08:39 +08:00
# Find the unique constraint for this field
constraint_names = self . _constraint_names ( model , [ old_field . column ] , unique = True )
2012-08-31 06:11:56 +08:00
if strict and len ( constraint_names ) != 1 :
2012-09-24 19:51:50 +08:00
raise ValueError ( " Found wrong number ( %s ) of unique constraints for %s . %s " % (
2012-08-02 22:08:39 +08:00
len ( constraint_names ) ,
model . _meta . db_table ,
old_field . column ,
) )
2012-08-31 06:11:56 +08:00
for constraint_name in constraint_names :
self . execute (
self . sql_delete_unique % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : constraint_name ,
} ,
)
# Removed an index?
2012-09-24 20:15:08 +08:00
if old_field . db_index and not new_field . db_index and not old_field . unique and not ( not new_field . unique and old_field . unique ) :
2012-08-31 06:11:56 +08:00
# Find the index for this field
index_names = self . _constraint_names ( model , [ old_field . column ] , index = True )
if strict and len ( index_names ) != 1 :
raise ValueError ( " Found wrong number ( %s ) of indexes for %s . %s " % (
len ( index_names ) ,
model . _meta . db_table ,
old_field . column ,
) )
for index_name in index_names :
self . execute (
self . sql_delete_index % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : index_name ,
}
)
2012-09-08 01:31:05 +08:00
# Drop any FK constraints, we'll remake them later
2013-08-11 07:01:30 +08:00
if old_field . rel :
2012-09-08 01:31:05 +08:00
fk_names = self . _constraint_names ( model , [ old_field . column ] , foreign_key = True )
if strict and len ( fk_names ) != 1 :
raise ValueError ( " Found wrong number ( %s ) of foreign key constraints for %s . %s " % (
len ( fk_names ) ,
model . _meta . db_table ,
old_field . column ,
) )
for fk_name in fk_names :
self . execute (
self . sql_delete_fk % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : fk_name ,
}
)
2013-12-11 22:19:05 +08:00
# Drop incoming FK constraints if we're a primary key and things are going
# to change.
if old_field . primary_key and new_field . primary_key and old_type != new_type :
for rel in new_field . model . _meta . get_all_related_objects ( ) :
rel_fk_names = self . _constraint_names ( rel . model , [ rel . field . column ] , foreign_key = True )
for fk_name in rel_fk_names :
self . execute (
self . sql_delete_fk % {
" table " : self . quote_name ( rel . model . _meta . db_table ) ,
" name " : fk_name ,
}
)
2012-09-08 03:40:59 +08:00
# Change check constraints?
if old_db_params [ ' check ' ] != new_db_params [ ' check ' ] and old_db_params [ ' check ' ] :
constraint_names = self . _constraint_names ( model , [ old_field . column ] , check = True )
if strict and len ( constraint_names ) != 1 :
raise ValueError ( " Found wrong number ( %s ) of check constraints for %s . %s " % (
len ( constraint_names ) ,
model . _meta . db_table ,
old_field . column ,
) )
for constraint_name in constraint_names :
self . execute (
self . sql_delete_check % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : constraint_name ,
}
)
2012-08-02 22:08:39 +08:00
# Have they renamed the column?
2012-06-19 20:25:22 +08:00
if old_field . column != new_field . column :
self . execute ( self . sql_rename_column % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" old_column " : self . quote_name ( old_field . column ) ,
" new_column " : self . quote_name ( new_field . column ) ,
2012-08-18 20:48:54 +08:00
" type " : new_type ,
2012-06-19 20:25:22 +08:00
} )
# Next, start accumulating actions to do
actions = [ ]
2013-12-11 22:19:05 +08:00
post_actions = [ ]
2012-06-19 20:25:22 +08:00
# Type change?
if old_type != new_type :
2013-12-11 22:19:05 +08:00
fragment , other_actions = self . _alter_column_type_sql ( model . _meta . db_table , new_field . column , new_type )
actions . append ( fragment )
post_actions . extend ( other_actions )
2012-06-19 20:25:22 +08:00
# Default change?
old_default = self . effective_default ( old_field )
new_default = self . effective_default ( new_field )
if old_default != new_default :
if new_default is None :
actions . append ( (
self . sql_alter_column_no_default % {
" column " : self . quote_name ( new_field . column ) ,
} ,
[ ] ,
) )
else :
2013-08-23 19:07:55 +08:00
if self . connection . features . requires_literal_defaults :
# Some databases can't take defaults as a parameter (oracle)
# If this is the case, the individual schema backend should
# implement prepare_default
actions . append ( (
self . sql_alter_column_default % {
" column " : self . quote_name ( new_field . column ) ,
" default " : self . prepare_default ( new_default ) ,
} ,
[ ] ,
) )
else :
actions . append ( (
self . sql_alter_column_default % {
" column " : self . quote_name ( new_field . column ) ,
" default " : " %s " ,
} ,
[ new_default ] ,
) )
2012-06-19 20:25:22 +08:00
# Nullability change?
if old_field . null != new_field . null :
if new_field . null :
actions . append ( (
self . sql_alter_column_null % {
" column " : self . quote_name ( new_field . column ) ,
2012-08-18 20:48:54 +08:00
" type " : new_type ,
2012-06-19 20:25:22 +08:00
} ,
[ ] ,
) )
else :
actions . append ( (
2012-09-24 19:53:37 +08:00
self . sql_alter_column_not_null % {
2012-06-19 20:25:22 +08:00
" column " : self . quote_name ( new_field . column ) ,
2012-08-18 20:48:54 +08:00
" type " : new_type ,
2012-06-19 20:25:22 +08:00
} ,
[ ] ,
) )
2012-08-02 22:08:39 +08:00
if actions :
# Combine actions together if we can (e.g. postgres)
if self . connection . features . supports_combined_alters :
sql , params = tuple ( zip ( * actions ) )
2013-07-02 18:51:18 +08:00
actions = [ ( " , " . join ( sql ) , reduce ( operator . add , params ) ) ]
2012-08-02 22:08:39 +08:00
# Apply those actions
for sql , params in actions :
self . execute (
self . sql_alter_column % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" changes " : sql ,
} ,
params ,
)
2013-12-11 22:19:05 +08:00
if post_actions :
for sql , params in post_actions :
self . execute ( sql , params )
2012-08-02 22:08:39 +08:00
# Added a unique?
if not old_field . unique and new_field . unique :
2012-06-19 20:25:22 +08:00
self . execute (
2012-08-02 22:08:39 +08:00
self . sql_create_unique % {
2012-06-19 20:25:22 +08:00
" table " : self . quote_name ( model . _meta . db_table ) ,
2012-08-02 22:08:39 +08:00
" name " : self . _create_index_name ( model , [ new_field . column ] , suffix = " _uniq " ) ,
" columns " : self . quote_name ( new_field . column ) ,
}
2012-06-19 20:25:22 +08:00
)
2012-08-31 06:11:56 +08:00
# Added an index?
2012-09-24 20:15:08 +08:00
if not old_field . db_index and new_field . db_index and not new_field . unique and not ( not old_field . unique and new_field . unique ) :
2012-08-31 06:11:56 +08:00
self . execute (
self . sql_create_index % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : self . _create_index_name ( model , [ new_field . column ] , suffix = " _uniq " ) ,
" columns " : self . quote_name ( new_field . column ) ,
" extra " : " " ,
}
)
2013-12-11 21:16:29 +08:00
# Type alteration on primary key? Then we need to alter the column
# referring to us.
rels_to_update = [ ]
if old_field . primary_key and new_field . primary_key and old_type != new_type :
2013-12-11 22:19:05 +08:00
rels_to_update . extend ( new_field . model . _meta . get_all_related_objects ( ) )
2012-09-05 00:53:31 +08:00
# Changed to become primary key?
# Note that we don't detect unsetting of a PK, as we assume another field
# will always come along and replace it.
if not old_field . primary_key and new_field . primary_key :
# First, drop the old PK
constraint_names = self . _constraint_names ( model , primary_key = True )
if strict and len ( constraint_names ) != 1 :
raise ValueError ( " Found wrong number ( %s ) of PK constraints for %s " % (
len ( constraint_names ) ,
model . _meta . db_table ,
) )
for constraint_name in constraint_names :
self . execute (
self . sql_delete_pk % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : constraint_name ,
} ,
)
# Make the new one
self . execute (
self . sql_create_pk % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : self . _create_index_name ( model , [ new_field . column ] , suffix = " _pk " ) ,
" columns " : self . quote_name ( new_field . column ) ,
}
)
2013-12-11 21:16:29 +08:00
# Update all referencing columns
2013-12-11 22:19:05 +08:00
rels_to_update . extend ( new_field . model . _meta . get_all_related_objects ( ) )
# Handle our type alters on the other end of rels from the PK stuff above
2013-12-11 21:16:29 +08:00
for rel in rels_to_update :
rel_db_params = rel . field . db_parameters ( connection = self . connection )
rel_type = rel_db_params [ ' type ' ]
self . execute (
self . sql_alter_column % {
" table " : self . quote_name ( rel . model . _meta . db_table ) ,
" changes " : self . sql_alter_column_type % {
" column " : self . quote_name ( rel . field . column ) ,
" type " : rel_type ,
}
}
)
2012-09-08 01:31:05 +08:00
# Does it have a foreign key?
2013-08-11 07:01:30 +08:00
if new_field . rel :
2012-09-08 01:31:05 +08:00
self . execute (
self . sql_create_fk % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : self . _create_index_name ( model , [ new_field . column ] , suffix = " _fk " ) ,
" column " : self . quote_name ( new_field . column ) ,
" to_table " : self . quote_name ( new_field . rel . to . _meta . db_table ) ,
" to_column " : self . quote_name ( new_field . rel . get_related_field ( ) . column ) ,
}
)
2013-12-11 22:19:05 +08:00
# Rebuild FKs that pointed to us if we previously had to drop them
if old_field . primary_key and new_field . primary_key and old_type != new_type :
for rel in new_field . model . _meta . get_all_related_objects ( ) :
self . execute (
self . sql_create_fk % {
" table " : self . quote_name ( rel . model . _meta . db_table ) ,
" name " : self . _create_index_name ( rel . model , [ rel . field . column ] , suffix = " _fk " ) ,
" column " : self . quote_name ( rel . field . column ) ,
" to_table " : self . quote_name ( model . _meta . db_table ) ,
" to_column " : self . quote_name ( new_field . column ) ,
}
)
2012-09-08 03:40:59 +08:00
# Does it have check constraints we need to add?
if old_db_params [ ' check ' ] != new_db_params [ ' check ' ] and new_db_params [ ' check ' ] :
self . execute (
self . sql_create_check % {
" table " : self . quote_name ( model . _meta . db_table ) ,
" name " : self . _create_index_name ( model , [ new_field . column ] , suffix = " _check " ) ,
" column " : self . quote_name ( new_field . column ) ,
" check " : new_db_params [ ' check ' ] ,
}
)
2013-08-23 19:07:55 +08:00
# Reset connection if required
if self . connection . features . connection_persists_old_columns :
self . connection . close ( )
2012-08-02 22:08:39 +08:00
2013-12-11 22:19:05 +08:00
def _alter_column_type_sql ( self , table , column , type ) :
"""
2014-03-02 22:25:53 +08:00
Hook to specialize column type alteration for different backends ,
2013-12-11 22:19:05 +08:00
for cases when a creation type is different to an alteration type
( e . g . SERIAL in PostgreSQL , PostGIS fields ) .
Should return two things ; an SQL fragment of ( sql , params ) to insert
into an ALTER TABLE statement , and a list of extra ( sql , params ) tuples
to run once the field is altered .
"""
return (
(
self . sql_alter_column_type % {
" column " : self . quote_name ( column ) ,
" type " : type ,
} ,
[ ] ,
) ,
[ ] ,
)
2012-09-08 02:39:22 +08:00
def _alter_many_to_many ( self , model , old_field , new_field , strict ) :
2012-09-18 17:37:30 +08:00
"""
Alters M2Ms to repoint their to = endpoints .
"""
2012-09-08 02:39:22 +08:00
# Rename the through table
2014-03-20 12:08:28 +08:00
if old_field . rel . through . _meta . db_table != new_field . rel . through . _meta . db_table :
self . alter_db_table ( old_field . rel . through , old_field . rel . through . _meta . db_table , new_field . rel . through . _meta . db_table )
2012-09-08 02:39:22 +08:00
# Repoint the FK to the other side
self . alter_field (
new_field . rel . through ,
2012-09-24 20:17:38 +08:00
# We need the field that points to the target model, so we can tell alter_field to change it -
# this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model)
2012-09-08 02:39:22 +08:00
old_field . rel . through . _meta . get_field_by_name ( old_field . m2m_reverse_field_name ( ) ) [ 0 ] ,
new_field . rel . through . _meta . get_field_by_name ( new_field . m2m_reverse_field_name ( ) ) [ 0 ] ,
)
2012-08-02 22:08:39 +08:00
def _create_index_name ( self , model , column_names , suffix = " " ) :
2012-09-18 17:37:30 +08:00
"""
Generates a unique name for an index / unique constraint .
"""
2012-08-02 22:08:39 +08:00
# If there is just one column in the index, use a default algorithm from Django
if len ( column_names ) == 1 and not suffix :
return truncate_name (
' %s _ %s ' % ( model . _meta . db_table , BaseDatabaseCreation . _digest ( column_names [ 0 ] ) ) ,
self . connection . ops . max_name_length ( )
)
2012-09-24 20:18:14 +08:00
# Else generate the name for the index using a different algorithm
2012-08-02 22:08:39 +08:00
table_name = model . _meta . db_table . replace ( ' " ' , ' ' ) . replace ( ' . ' , ' _ ' )
index_unique_name = ' _ %x ' % abs ( hash ( ( table_name , ' , ' . join ( column_names ) ) ) )
2014-02-09 20:33:52 +08:00
max_length = self . connection . ops . max_name_length ( ) or 200
2012-08-02 22:08:39 +08:00
# If the index name is too long, truncate it
index_name = ( ' %s _ %s %s %s ' % ( table_name , column_names [ 0 ] , index_unique_name , suffix ) ) . replace ( ' " ' , ' ' ) . replace ( ' . ' , ' _ ' )
2014-02-09 20:33:52 +08:00
if len ( index_name ) > max_length :
2012-08-02 22:08:39 +08:00
part = ( ' _ %s %s %s ' % ( column_names [ 0 ] , index_unique_name , suffix ) )
2014-02-09 20:33:52 +08:00
index_name = ' %s %s ' % ( table_name [ : ( max_length - len ( part ) ) ] , part )
2013-08-14 03:54:57 +08:00
# It shouldn't start with an underscore (Oracle hates this)
if index_name [ 0 ] == " _ " :
index_name = index_name [ 1 : ]
2012-09-24 20:23:25 +08:00
# If it's STILL too long, just hash it down
2014-02-09 20:33:52 +08:00
if len ( index_name ) > max_length :
index_name = hashlib . md5 ( force_bytes ( index_name ) ) . hexdigest ( ) [ : max_length ]
2013-08-14 03:54:57 +08:00
# It can't start with a number on Oracle, so prepend D if we need to
if index_name [ 0 ] . isdigit ( ) :
index_name = " D %s " % index_name [ : - 1 ]
2012-08-02 22:08:39 +08:00
return index_name
2012-09-08 03:40:59 +08:00
def _constraint_names ( self , model , column_names = None , unique = None , primary_key = None , index = None , foreign_key = None , check = None ) :
2012-09-18 17:37:30 +08:00
"""
Returns all constraint names matching the columns and conditions
"""
2013-07-03 01:02:20 +08:00
column_names = list ( column_names ) if column_names else None
2014-01-09 23:05:15 +08:00
with self . connection . cursor ( ) as cursor :
constraints = self . connection . introspection . get_constraints ( cursor , model . _meta . db_table )
2012-08-02 22:08:39 +08:00
result = [ ]
for name , infodict in constraints . items ( ) :
2012-09-05 00:53:31 +08:00
if column_names is None or column_names == infodict [ ' columns ' ] :
2012-08-02 22:08:39 +08:00
if unique is not None and infodict [ ' unique ' ] != unique :
continue
2012-09-05 00:53:31 +08:00
if primary_key is not None and infodict [ ' primary_key ' ] != primary_key :
2012-08-02 22:08:39 +08:00
continue
2012-08-31 06:11:56 +08:00
if index is not None and infodict [ ' index ' ] != index :
continue
2012-09-08 03:40:59 +08:00
if check is not None and infodict [ ' check ' ] != check :
continue
2012-09-08 01:31:05 +08:00
if foreign_key is not None and not infodict [ ' foreign_key ' ] :
continue
2012-08-02 22:08:39 +08:00
result . append ( name )
return result