Merged boulder-oracle-sprint branch (r3965:5512) back into trunk. All

expected tests pass for all databases.


git-svn-id: http://code.djangoproject.com/svn/django/trunk@5519 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
Malcolm Tredinnick 2007-06-23 14:16:00 +00:00
parent 553a20075e
commit ac64e91a0c
28 changed files with 1240 additions and 173 deletions

View File

@ -9,7 +9,7 @@ DELETION = 3
class LogEntryManager(models.Manager): class LogEntryManager(models.Manager):
def log_action(self, user_id, content_type_id, object_id, object_repr, action_flag, change_message=''): def log_action(self, user_id, content_type_id, object_id, object_repr, action_flag, change_message=''):
e = self.model(None, None, user_id, content_type_id, object_id, object_repr[:200], action_flag, change_message) e = self.model(None, None, user_id, content_type_id, str(object_id), object_repr[:200], action_flag, change_message)
e.save() e.save()
class LogEntry(models.Model): class LogEntry(models.Model):

View File

@ -59,12 +59,16 @@ def _is_valid_dir_name(s):
def _get_installed_models(table_list): def _get_installed_models(table_list):
"Gets a set of all models that are installed, given a list of existing tables" "Gets a set of all models that are installed, given a list of existing tables"
from django.db import models from django.db import backend, models
all_models = [] all_models = []
for app in models.get_apps(): for app in models.get_apps():
for model in models.get_models(app): for model in models.get_models(app):
all_models.append(model) all_models.append(model)
return set([m for m in all_models if m._meta.db_table in table_list]) if backend.uses_case_insensitive_names:
converter = str.upper
else:
converter = lambda x: x
return set([m for m in all_models if converter(m._meta.db_table) in map(converter, table_list)])
def _get_table_list(): def _get_table_list():
"Gets a list of all db tables that are physically installed." "Gets a list of all db tables that are physically installed."
@ -100,6 +104,7 @@ get_rel_data_type = lambda f: (f.get_internal_type() in ('AutoField', 'PositiveI
def get_sql_create(app): def get_sql_create(app):
"Returns a list of the CREATE TABLE SQL statements for the given app." "Returns a list of the CREATE TABLE SQL statements for the given app."
from django.db import get_creation_module, models from django.db import get_creation_module, models
data_types = get_creation_module().DATA_TYPES data_types = get_creation_module().DATA_TYPES
if not data_types: if not data_types:
@ -171,15 +176,20 @@ def _get_sql_model_create(model, known_models=set()):
rel_field = f rel_field = f
data_type = f.get_internal_type() data_type = f.get_internal_type()
col_type = data_types[data_type] col_type = data_types[data_type]
tablespace = f.db_tablespace or opts.db_tablespace
if col_type is not None: if col_type is not None:
# Make the definition (e.g. 'foo VARCHAR(30)') for this field. # Make the definition (e.g. 'foo VARCHAR(30)') for this field.
field_output = [style.SQL_FIELD(backend.quote_name(f.column)), field_output = [style.SQL_FIELD(backend.quote_name(f.column)),
style.SQL_COLTYPE(col_type % rel_field.__dict__)] style.SQL_COLTYPE(col_type % rel_field.__dict__)]
field_output.append(style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or ''))) field_output.append(style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or '')))
if f.unique: if f.unique and (not f.primary_key or backend.allows_unique_and_pk):
field_output.append(style.SQL_KEYWORD('UNIQUE')) field_output.append(style.SQL_KEYWORD('UNIQUE'))
if f.primary_key: if f.primary_key:
field_output.append(style.SQL_KEYWORD('PRIMARY KEY')) field_output.append(style.SQL_KEYWORD('PRIMARY KEY'))
if tablespace and backend.supports_tablespaces and (f.unique or f.primary_key) and backend.autoindexes_primary_keys:
# We must specify the index tablespace inline, because we
# won't be generating a CREATE INDEX statement for this field.
field_output.append(backend.get_tablespace_sql(tablespace, inline=True))
if f.rel: if f.rel:
if f.rel.to in known_models: if f.rel.to in known_models:
field_output.append(style.SQL_KEYWORD('REFERENCES') + ' ' + \ field_output.append(style.SQL_KEYWORD('REFERENCES') + ' ' + \
@ -203,9 +213,19 @@ def _get_sql_model_create(model, known_models=set()):
full_statement = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + style.SQL_TABLE(backend.quote_name(opts.db_table)) + ' ('] full_statement = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + style.SQL_TABLE(backend.quote_name(opts.db_table)) + ' (']
for i, line in enumerate(table_output): # Combine and add commas. for i, line in enumerate(table_output): # Combine and add commas.
full_statement.append(' %s%s' % (line, i < len(table_output)-1 and ',' or '')) full_statement.append(' %s%s' % (line, i < len(table_output)-1 and ',' or ''))
full_statement.append(');') full_statement.append(')')
if opts.db_tablespace and backend.supports_tablespaces:
full_statement.append(backend.get_tablespace_sql(opts.db_tablespace))
full_statement.append(';')
final_output.append('\n'.join(full_statement)) final_output.append('\n'.join(full_statement))
if opts.has_auto_field and hasattr(backend, 'get_autoinc_sql'):
# Add any extra SQL needed to support auto-incrementing primary keys
autoinc_sql = backend.get_autoinc_sql(opts.db_table)
if autoinc_sql:
for stmt in autoinc_sql:
final_output.append(stmt)
return final_output, pending_references return final_output, pending_references
def _get_sql_for_pending_references(model, pending_references): def _get_sql_for_pending_references(model, pending_references):
@ -213,6 +233,7 @@ def _get_sql_for_pending_references(model, pending_references):
Get any ALTER TABLE statements to add constraints after the fact. Get any ALTER TABLE statements to add constraints after the fact.
""" """
from django.db import backend, get_creation_module from django.db import backend, get_creation_module
from django.db.backends.util import truncate_name
data_types = get_creation_module().DATA_TYPES data_types = get_creation_module().DATA_TYPES
final_output = [] final_output = []
@ -229,7 +250,7 @@ def _get_sql_for_pending_references(model, pending_references):
# So we are careful with character usage here. # So we are careful with character usage here.
r_name = '%s_refs_%s_%x' % (r_col, col, abs(hash((r_table, table)))) r_name = '%s_refs_%s_%x' % (r_col, col, abs(hash((r_table, table))))
final_output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % \ final_output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % \
(backend.quote_name(r_table), r_name, (backend.quote_name(r_table), truncate_name(r_name, backend.get_max_name_length()),
backend.quote_name(r_col), backend.quote_name(table), backend.quote_name(col), backend.quote_name(r_col), backend.quote_name(table), backend.quote_name(col),
backend.get_deferrable_sql())) backend.get_deferrable_sql()))
del pending_references[model] del pending_references[model]
@ -245,12 +266,18 @@ def _get_many_to_many_sql_for_model(model):
final_output = [] final_output = []
for f in opts.many_to_many: for f in opts.many_to_many:
if not isinstance(f.rel, generic.GenericRel): if not isinstance(f.rel, generic.GenericRel):
tablespace = f.db_tablespace or opts.db_tablespace
if tablespace and backend.supports_tablespaces and backend.autoindexes_primary_keys:
tablespace_sql = ' ' + backend.get_tablespace_sql(tablespace, inline=True)
else:
tablespace_sql = ''
table_output = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + \ table_output = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + \
style.SQL_TABLE(backend.quote_name(f.m2m_db_table())) + ' ('] style.SQL_TABLE(backend.quote_name(f.m2m_db_table())) + ' (']
table_output.append(' %s %s %s,' % \ table_output.append(' %s %s %s%s,' % \
(style.SQL_FIELD(backend.quote_name('id')), (style.SQL_FIELD(backend.quote_name('id')),
style.SQL_COLTYPE(data_types['AutoField']), style.SQL_COLTYPE(data_types['AutoField']),
style.SQL_KEYWORD('NOT NULL PRIMARY KEY'))) style.SQL_KEYWORD('NOT NULL PRIMARY KEY'),
tablespace_sql))
table_output.append(' %s %s %s %s (%s)%s,' % \ table_output.append(' %s %s %s %s (%s)%s,' % \
(style.SQL_FIELD(backend.quote_name(f.m2m_column_name())), (style.SQL_FIELD(backend.quote_name(f.m2m_column_name())),
style.SQL_COLTYPE(data_types[get_rel_data_type(opts.pk)] % opts.pk.__dict__), style.SQL_COLTYPE(data_types[get_rel_data_type(opts.pk)] % opts.pk.__dict__),
@ -265,17 +292,30 @@ def _get_many_to_many_sql_for_model(model):
style.SQL_TABLE(backend.quote_name(f.rel.to._meta.db_table)), style.SQL_TABLE(backend.quote_name(f.rel.to._meta.db_table)),
style.SQL_FIELD(backend.quote_name(f.rel.to._meta.pk.column)), style.SQL_FIELD(backend.quote_name(f.rel.to._meta.pk.column)),
backend.get_deferrable_sql())) backend.get_deferrable_sql()))
table_output.append(' %s (%s, %s)' % \ table_output.append(' %s (%s, %s)%s' % \
(style.SQL_KEYWORD('UNIQUE'), (style.SQL_KEYWORD('UNIQUE'),
style.SQL_FIELD(backend.quote_name(f.m2m_column_name())), style.SQL_FIELD(backend.quote_name(f.m2m_column_name())),
style.SQL_FIELD(backend.quote_name(f.m2m_reverse_name())))) style.SQL_FIELD(backend.quote_name(f.m2m_reverse_name())),
table_output.append(');') tablespace_sql))
table_output.append(')')
if opts.db_tablespace and backend.supports_tablespaces:
# f.db_tablespace is only for indices, so ignore its value here.
table_output.append(backend.get_tablespace_sql(opts.db_tablespace))
table_output.append(';')
final_output.append('\n'.join(table_output)) final_output.append('\n'.join(table_output))
# Add any extra SQL needed to support auto-incrementing PKs
autoinc_sql = backend.get_autoinc_sql(f.m2m_db_table())
if autoinc_sql:
for stmt in autoinc_sql:
final_output.append(stmt)
return final_output return final_output
def get_sql_delete(app): def get_sql_delete(app):
"Returns a list of the DROP TABLE SQL statements for the given app." "Returns a list of the DROP TABLE SQL statements for the given app."
from django.db import backend, connection, models, get_introspection_module from django.db import backend, connection, models, get_introspection_module
from django.db.backends.util import truncate_name
introspection = get_introspection_module() introspection = get_introspection_module()
# This should work even if a connection isn't available # This should work even if a connection isn't available
@ -289,6 +329,10 @@ def get_sql_delete(app):
table_names = introspection.get_table_list(cursor) table_names = introspection.get_table_list(cursor)
else: else:
table_names = [] table_names = []
if backend.uses_case_insensitive_names:
table_name_converter = str.upper
else:
table_name_converter = lambda x: x
output = [] output = []
@ -298,7 +342,7 @@ def get_sql_delete(app):
references_to_delete = {} references_to_delete = {}
app_models = models.get_models(app) app_models = models.get_models(app)
for model in app_models: for model in app_models:
if cursor and model._meta.db_table in table_names: if cursor and table_name_converter(model._meta.db_table) in table_names:
# The table exists, so it needs to be dropped # The table exists, so it needs to be dropped
opts = model._meta opts = model._meta
for f in opts.fields: for f in opts.fields:
@ -308,7 +352,7 @@ def get_sql_delete(app):
to_delete.add(model) to_delete.add(model)
for model in app_models: for model in app_models:
if cursor and model._meta.db_table in table_names: if cursor and table_name_converter(model._meta.db_table) in table_names:
# Drop the table now # Drop the table now
output.append('%s %s;' % (style.SQL_KEYWORD('DROP TABLE'), output.append('%s %s;' % (style.SQL_KEYWORD('DROP TABLE'),
style.SQL_TABLE(backend.quote_name(model._meta.db_table)))) style.SQL_TABLE(backend.quote_name(model._meta.db_table))))
@ -318,20 +362,26 @@ def get_sql_delete(app):
col = f.column col = f.column
r_table = model._meta.db_table r_table = model._meta.db_table
r_col = model._meta.get_field(f.rel.field_name).column r_col = model._meta.get_field(f.rel.field_name).column
r_name = '%s_refs_%s_%x' % (col, r_col, abs(hash((table, r_table))))
output.append('%s %s %s %s;' % \ output.append('%s %s %s %s;' % \
(style.SQL_KEYWORD('ALTER TABLE'), (style.SQL_KEYWORD('ALTER TABLE'),
style.SQL_TABLE(backend.quote_name(table)), style.SQL_TABLE(backend.quote_name(table)),
style.SQL_KEYWORD(backend.get_drop_foreignkey_sql()), style.SQL_KEYWORD(backend.get_drop_foreignkey_sql()),
style.SQL_FIELD(backend.quote_name('%s_refs_%s_%x' % (col, r_col, abs(hash((table, r_table)))))))) style.SQL_FIELD(truncate_name(r_name, backend.get_max_name_length()))))
del references_to_delete[model] del references_to_delete[model]
if model._meta.has_auto_field and hasattr(backend, 'get_drop_sequence'):
output.append(backend.get_drop_sequence(model._meta.db_table))
# Output DROP TABLE statements for many-to-many tables. # Output DROP TABLE statements for many-to-many tables.
for model in app_models: for model in app_models:
opts = model._meta opts = model._meta
for f in opts.many_to_many: for f in opts.many_to_many:
if cursor and f.m2m_db_table() in table_names: if cursor and table_name_converter(f.m2m_db_table()) in table_names:
output.append("%s %s;" % (style.SQL_KEYWORD('DROP TABLE'), output.append("%s %s;" % (style.SQL_KEYWORD('DROP TABLE'),
style.SQL_TABLE(backend.quote_name(f.m2m_db_table())))) style.SQL_TABLE(backend.quote_name(f.m2m_db_table()))))
if hasattr(backend, 'get_drop_sequence'):
output.append(backend.get_drop_sequence("%s_%s" % (model._meta.db_table, f.column)))
app_label = app_models[0]._meta.app_label app_label = app_models[0]._meta.app_label
@ -430,14 +480,20 @@ def get_sql_indexes_for_model(model):
output = [] output = []
for f in model._meta.fields: for f in model._meta.fields:
if f.db_index: if f.db_index and not ((f.primary_key or f.unique) and backend.autoindexes_primary_keys):
unique = f.unique and 'UNIQUE ' or '' unique = f.unique and 'UNIQUE ' or ''
tablespace = f.db_tablespace or model._meta.db_tablespace
if tablespace and backend.supports_tablespaces:
tablespace_sql = ' ' + backend.get_tablespace_sql(tablespace)
else:
tablespace_sql = ''
output.append( output.append(
style.SQL_KEYWORD('CREATE %sINDEX' % unique) + ' ' + \ style.SQL_KEYWORD('CREATE %sINDEX' % unique) + ' ' + \
style.SQL_TABLE(backend.quote_name('%s_%s' % (model._meta.db_table, f.column))) + ' ' + \ style.SQL_TABLE(backend.quote_name('%s_%s' % (model._meta.db_table, f.column))) + ' ' + \
style.SQL_KEYWORD('ON') + ' ' + \ style.SQL_KEYWORD('ON') + ' ' + \
style.SQL_TABLE(backend.quote_name(model._meta.db_table)) + ' ' + \ style.SQL_TABLE(backend.quote_name(model._meta.db_table)) + ' ' + \
"(%s);" % style.SQL_FIELD(backend.quote_name(f.column)) "(%s)" % style.SQL_FIELD(backend.quote_name(f.column)) + \
"%s;" % tablespace_sql
) )
return output return output
@ -461,7 +517,7 @@ def _emit_post_sync_signal(created_models, verbosity, interactive):
def syncdb(verbosity=1, interactive=True): def syncdb(verbosity=1, interactive=True):
"Creates the database tables for all apps in INSTALLED_APPS whose tables haven't already been created." "Creates the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
from django.db import connection, transaction, models, get_creation_module from django.db import backend, connection, transaction, models, get_creation_module
from django.conf import settings from django.conf import settings
disable_termcolors() disable_termcolors()
@ -484,6 +540,10 @@ def syncdb(verbosity=1, interactive=True):
# Get a list of all existing database tables, # Get a list of all existing database tables,
# so we know what needs to be added. # so we know what needs to be added.
table_list = _get_table_list() table_list = _get_table_list()
if backend.uses_case_insensitive_names:
table_name_converter = str.upper
else:
table_name_converter = lambda x: x
# Get a list of already installed *models* so that references work right. # Get a list of already installed *models* so that references work right.
seen_models = _get_installed_models(table_list) seen_models = _get_installed_models(table_list)
@ -498,7 +558,7 @@ def syncdb(verbosity=1, interactive=True):
# Create the model's database table, if it doesn't already exist. # Create the model's database table, if it doesn't already exist.
if verbosity >= 2: if verbosity >= 2:
print "Processing %s.%s model" % (app_name, model._meta.object_name) print "Processing %s.%s model" % (app_name, model._meta.object_name)
if model._meta.db_table in table_list: if table_name_converter(model._meta.db_table) in table_list:
continue continue
sql, references = _get_sql_model_create(model, seen_models) sql, references = _get_sql_model_create(model, seen_models)
seen_models.add(model) seen_models.add(model)
@ -510,7 +570,7 @@ def syncdb(verbosity=1, interactive=True):
print "Creating table %s" % model._meta.db_table print "Creating table %s" % model._meta.db_table
for statement in sql: for statement in sql:
cursor.execute(statement) cursor.execute(statement)
table_list.append(model._meta.db_table) table_list.append(table_name_converter(model._meta.db_table))
# Create the m2m tables. This must be done after all tables have been created # Create the m2m tables. This must be done after all tables have been created
# to ensure that all referred tables will exist. # to ensure that all referred tables will exist.
@ -829,7 +889,7 @@ def inspectdb():
except NotImplementedError: except NotImplementedError:
indexes = {} indexes = {}
for i, row in enumerate(introspection_module.get_table_description(cursor, table_name)): for i, row in enumerate(introspection_module.get_table_description(cursor, table_name)):
att_name = row[0] att_name = row[0].lower()
comment_notes = [] # Holds Field notes, to be displayed in a Python comment. comment_notes = [] # Holds Field notes, to be displayed in a Python comment.
extra_params = {} # Holds Field parameters such as 'db_column'. extra_params = {} # Holds Field parameters such as 'db_column'.
@ -1626,7 +1686,9 @@ def execute_from_command_line(action_mapping=DEFAULT_ACTION_MAPPING, argv=None):
if not mod_list: if not mod_list:
parser.print_usage_and_exit() parser.print_usage_and_exit()
if action not in NO_SQL_TRANSACTION: if action not in NO_SQL_TRANSACTION:
print style.SQL_KEYWORD("BEGIN;") from django.db import backend
if backend.get_start_transaction_sql():
print style.SQL_KEYWORD(backend.get_start_transaction_sql())
for mod in mod_list: for mod in mod_list:
if action == 'reset': if action == 'reset':
output = action_mapping[action](mod, options.interactive) output = action_mapping[action](mod, options.interactive)

View File

@ -89,7 +89,14 @@ class DatabaseWrapper(local):
self.connection.close() self.connection.close()
self.connection = None self.connection = None
allows_group_by_ordinal = True
allows_unique_and_pk = True
autoindexes_primary_keys = True
needs_datetime_string_cast = True
needs_upper_for_iops = False
supports_constraints = True supports_constraints = True
supports_tablespaces = True
uses_case_insensitive_names = False
def quote_name(name): def quote_name(name):
if name.startswith('[') and name.endswith(']'): if name.startswith('[') and name.endswith(']'):
@ -117,6 +124,9 @@ def get_date_trunc_sql(lookup_type, field_name):
if lookup_type=='day': if lookup_type=='day':
return "Convert(datetime, Convert(varchar(12), %s))" % field_name return "Convert(datetime, Convert(varchar(12), %s))" % field_name
def get_datetime_cast_sql():
return None
def get_limit_offset_sql(limit, offset=None): def get_limit_offset_sql(limit, offset=None):
# TODO: This is a guess. Make sure this is correct. # TODO: This is a guess. Make sure this is correct.
sql = "LIMIT %s" % limit sql = "LIMIT %s" % limit
@ -139,6 +149,18 @@ def get_drop_foreignkey_sql():
def get_pk_default_value(): def get_pk_default_value():
return "DEFAULT" return "DEFAULT"
def get_max_name_length():
return None
def get_start_transaction_sql():
return "BEGIN;"
def get_tablespace_sql(tablespace, inline=False):
return "ON %s" % quote_name(tablespace)
def get_autoinc_sql(table):
return None
def get_sql_flush(style, tables, sequences): def get_sql_flush(style, tables, sequences):
"""Return a list of SQL statements required to remove all data from """Return a list of SQL statements required to remove all data from
all tables in the database (without actually removing the tables all tables in the database (without actually removing the tables

View File

@ -33,6 +33,7 @@ class DatabaseWrapper:
pass # close() pass # close()
supports_constraints = False supports_constraints = False
supports_tablespaces = False
quote_name = complain quote_name = complain
dictfetchone = complain dictfetchone = complain
dictfetchmany = complain dictfetchmany = complain

View File

@ -134,7 +134,14 @@ class DatabaseWrapper(local):
self.server_version = tuple([int(x) for x in m.groups()]) self.server_version = tuple([int(x) for x in m.groups()])
return self.server_version return self.server_version
allows_group_by_ordinal = True
allows_unique_and_pk = True
autoindexes_primary_keys = False
needs_datetime_string_cast = True # MySQLdb requires a typecast for dates
needs_upper_for_iops = False
supports_constraints = True supports_constraints = True
supports_tablespaces = False
uses_case_insensitive_names = False
def quote_name(name): def quote_name(name):
if name.startswith("`") and name.endswith("`"): if name.startswith("`") and name.endswith("`"):
@ -167,6 +174,9 @@ def get_date_trunc_sql(lookup_type, field_name):
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str) sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
return sql return sql
def get_datetime_cast_sql():
return None
def get_limit_offset_sql(limit, offset=None): def get_limit_offset_sql(limit, offset=None):
sql = "LIMIT " sql = "LIMIT "
if offset and offset != 0: if offset and offset != 0:
@ -188,6 +198,15 @@ def get_drop_foreignkey_sql():
def get_pk_default_value(): def get_pk_default_value():
return "DEFAULT" return "DEFAULT"
def get_max_name_length():
return None;
def get_start_transaction_sql():
return "BEGIN;"
def get_autoinc_sql(table):
return None
def get_sql_flush(style, tables, sequences): def get_sql_flush(style, tables, sequences):
"""Return a list of SQL statements required to remove all data from """Return a list of SQL statements required to remove all data from
all tables in the database (without actually removing the tables all tables in the database (without actually removing the tables

View File

@ -135,7 +135,14 @@ class DatabaseWrapper(local):
self.server_version = tuple([int(x) for x in m.groups()]) self.server_version = tuple([int(x) for x in m.groups()])
return self.server_version return self.server_version
allows_group_by_ordinal = True
allows_unique_and_pk = True
autoindexes_primary_keys = False
needs_datetime_string_cast = True # MySQLdb requires a typecast for dates
needs_upper_for_iops = False
supports_constraints = True supports_constraints = True
supports_tablespaces = False
uses_case_insensitive_names = False
def quote_name(name): def quote_name(name):
if name.startswith("`") and name.endswith("`"): if name.startswith("`") and name.endswith("`"):
@ -168,6 +175,9 @@ def get_date_trunc_sql(lookup_type, field_name):
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str) sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
return sql return sql
def get_datetime_cast_sql():
return None
def get_limit_offset_sql(limit, offset=None): def get_limit_offset_sql(limit, offset=None):
sql = "LIMIT " sql = "LIMIT "
if offset and offset != 0: if offset and offset != 0:
@ -189,6 +199,15 @@ def get_drop_foreignkey_sql():
def get_pk_default_value(): def get_pk_default_value():
return "DEFAULT" return "DEFAULT"
def get_max_name_length():
return None;
def get_start_transaction_sql():
return "BEGIN;"
def get_autoinc_sql(table):
return None
def get_sql_flush(style, tables, sequences): def get_sql_flush(style, tables, sequences):
"""Return a list of SQL statements required to remove all data from """Return a list of SQL statements required to remove all data from
all tables in the database (without actually removing the tables all tables in the database (without actually removing the tables

View File

@ -4,12 +4,16 @@ Oracle database backend for Django.
Requires cx_Oracle: http://www.python.net/crew/atuining/cx_Oracle/ Requires cx_Oracle: http://www.python.net/crew/atuining/cx_Oracle/
""" """
from django.conf import settings
from django.db.backends import util from django.db.backends import util
try: try:
import cx_Oracle as Database import cx_Oracle as Database
except ImportError, e: except ImportError, e:
from django.core.exceptions import ImproperlyConfigured from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured, "Error loading cx_Oracle module: %s" % e raise ImproperlyConfigured, "Error loading cx_Oracle module: %s" % e
import datetime
from django.utils.datastructures import SortedDict
DatabaseError = Database.Error DatabaseError = Database.Error
IntegrityError = Database.IntegrityError IntegrityError = Database.IntegrityError
@ -31,7 +35,6 @@ class DatabaseWrapper(local):
return self.connection is not None return self.connection is not None
def cursor(self): def cursor(self):
from django.conf import settings
if not self._valid_connection(): if not self._valid_connection():
if len(settings.DATABASE_HOST.strip()) == 0: if len(settings.DATABASE_HOST.strip()) == 0:
settings.DATABASE_HOST = 'localhost' settings.DATABASE_HOST = 'localhost'
@ -41,25 +44,37 @@ class DatabaseWrapper(local):
else: else:
conn_string = "%s/%s@%s" % (settings.DATABASE_USER, settings.DATABASE_PASSWORD, settings.DATABASE_NAME) conn_string = "%s/%s@%s" % (settings.DATABASE_USER, settings.DATABASE_PASSWORD, settings.DATABASE_NAME)
self.connection = Database.connect(conn_string, **self.options) self.connection = Database.connect(conn_string, **self.options)
return FormatStylePlaceholderCursor(self.connection) cursor = FormatStylePlaceholderCursor(self.connection)
# default arraysize of 1 is highly sub-optimal
cursor.arraysize = 100
# set oracle date to ansi date format
cursor.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD'")
cursor.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'")
if settings.DEBUG:
return util.CursorDebugWrapper(cursor, self)
return cursor
def _commit(self): def _commit(self):
if self.connection is not None: if self.connection is not None:
self.connection.commit() return self.connection.commit()
def _rollback(self): def _rollback(self):
if self.connection is not None: if self.connection is not None:
try: return self.connection.rollback()
self.connection.rollback()
except Database.NotSupportedError:
pass
def close(self): def close(self):
if self.connection is not None: if self.connection is not None:
self.connection.close() self.connection.close()
self.connection = None self.connection = None
allows_group_by_ordinal = False
allows_unique_and_pk = False # Suppress UNIQUE/PK for Oracle (ORA-02259)
autoindexes_primary_keys = True
needs_datetime_string_cast = False
needs_upper_for_iops = True
supports_constraints = True supports_constraints = True
supports_tablespaces = True
uses_case_insensitive_names = True
class FormatStylePlaceholderCursor(Database.Cursor): class FormatStylePlaceholderCursor(Database.Cursor):
""" """
@ -67,45 +82,75 @@ class FormatStylePlaceholderCursor(Database.Cursor):
This fixes it -- but note that if you want to use a literal "%s" in a query, This fixes it -- but note that if you want to use a literal "%s" in a query,
you'll need to use "%%s". you'll need to use "%%s".
""" """
def _rewrite_args(self, query, params=None):
if params is None:
params = []
else:
# cx_Oracle can't handle unicode parameters, so cast to str for now
for i, param in enumerate(params):
if type(param) == unicode:
try:
params[i] = param.encode('utf-8')
except UnicodeError:
params[i] = str(param)
args = [(':arg%d' % i) for i in range(len(params))]
query = query % tuple(args)
# cx_Oracle wants no trailing ';' for SQL statements. For PL/SQL, it
# it does want a trailing ';' but not a trailing '/'. However, these
# characters must be included in the original query in case the query
# is being passed to SQL*Plus.
if query.endswith(';') or query.endswith('/'):
query = query[:-1]
return query, params
def execute(self, query, params=None): def execute(self, query, params=None):
if params is None: params = [] query, params = self._rewrite_args(query, params)
query = self.convert_arguments(query, len(params))
return Database.Cursor.execute(self, query, params) return Database.Cursor.execute(self, query, params)
def executemany(self, query, params=None): def executemany(self, query, params=None):
if params is None: params = [] query, params = self._rewrite_args(query, params)
query = self.convert_arguments(query, len(params[0]))
return Database.Cursor.executemany(self, query, params) return Database.Cursor.executemany(self, query, params)
def convert_arguments(self, query, num_params):
# replace occurances of "%s" with ":arg" - Oracle requires colons for parameter placeholders.
args = [':arg' for i in range(num_params)]
return query % tuple(args)
def quote_name(name): def quote_name(name):
return name # SQL92 requires delimited (quoted) names to be case-sensitive. When
# not quoted, Oracle has case-insensitive behavior for identifiers, but
# always defaults to uppercase.
# We simplify things by making Oracle identifiers always uppercase.
if not name.startswith('"') and not name.endswith('"'):
name = '"%s"' % util.truncate_name(name.upper(), get_max_name_length())
return name.upper()
dictfetchone = util.dictfetchone dictfetchone = util.dictfetchone
dictfetchmany = util.dictfetchmany dictfetchmany = util.dictfetchmany
dictfetchall = util.dictfetchall dictfetchall = util.dictfetchall
def get_last_insert_id(cursor, table_name, pk_name): def get_last_insert_id(cursor, table_name, pk_name):
query = "SELECT %s_sq.currval from dual" % table_name sq_name = util.truncate_name(table_name, get_max_name_length()-3)
cursor.execute(query) cursor.execute('SELECT %s_sq.currval FROM dual' % sq_name)
return cursor.fetchone()[0] return cursor.fetchone()[0]
def get_date_extract_sql(lookup_type, table_name): def get_date_extract_sql(lookup_type, table_name):
# lookup_type is 'year', 'month', 'day' # lookup_type is 'year', 'month', 'day'
# http://www.psoug.org/reference/date_func.html # http://download-east.oracle.com/docs/cd/B10501_01/server.920/a96540/functions42a.htm#1017163
return "EXTRACT(%s FROM %s)" % (lookup_type, table_name) return "EXTRACT(%s FROM %s)" % (lookup_type, table_name)
def get_date_trunc_sql(lookup_type, field_name): def get_date_trunc_sql(lookup_type, field_name):
return "EXTRACT(%s FROM TRUNC(%s))" % (lookup_type, field_name) # lookup_type is 'year', 'month', 'day'
# Oracle uses TRUNC() for both dates and numbers.
# http://download-east.oracle.com/docs/cd/B10501_01/server.920/a96540/functions155a.htm#SQLRF06151
if lookup_type == 'day':
sql = 'TRUNC(%s)' % (field_name,)
else:
sql = "TRUNC(%s, '%s')" % (field_name, lookup_type)
return sql
def get_datetime_cast_sql():
return "TO_TIMESTAMP(%s, 'YYYY-MM-DD HH24:MI:SS.FF')"
def get_limit_offset_sql(limit, offset=None): def get_limit_offset_sql(limit, offset=None):
# Limits and offset are too complicated to be handled here. # Limits and offset are too complicated to be handled here.
# Instead, they are handled in django/db/query.py. # Instead, they are handled in django/db/backends/oracle/query.py.
pass return ""
def get_random_function_sql(): def get_random_function_sql():
return "DBMS_RANDOM.RANDOM" return "DBMS_RANDOM.RANDOM"
@ -117,40 +162,363 @@ def get_fulltext_search_sql(field_name):
raise NotImplementedError raise NotImplementedError
def get_drop_foreignkey_sql(): def get_drop_foreignkey_sql():
return "DROP FOREIGN KEY" return "DROP CONSTRAINT"
def get_pk_default_value(): def get_pk_default_value():
return "DEFAULT" return "DEFAULT"
def get_max_name_length():
return 30
def get_start_transaction_sql():
return None
def get_tablespace_sql(tablespace, inline=False):
return "%sTABLESPACE %s" % ((inline and "USING INDEX " or ""), quote_name(tablespace))
def get_autoinc_sql(table):
# To simulate auto-incrementing primary keys in Oracle, we have to
# create a sequence and a trigger.
sq_name = get_sequence_name(table)
tr_name = get_trigger_name(table)
sequence_sql = 'CREATE SEQUENCE %s;' % sq_name
trigger_sql = """CREATE OR REPLACE TRIGGER %s
BEFORE INSERT ON %s
FOR EACH ROW
WHEN (new.id IS NULL)
BEGIN
SELECT %s.nextval INTO :new.id FROM dual;
END;
/""" % (tr_name, quote_name(table), sq_name)
return sequence_sql, trigger_sql
def get_drop_sequence(table):
return "DROP SEQUENCE %s;" % quote_name(get_sequence_name(table))
def _get_sequence_reset_sql():
# TODO: colorize this SQL code with style.SQL_KEYWORD(), etc.
return """
DECLARE
startvalue integer;
cval integer;
BEGIN
LOCK TABLE %(table)s IN SHARE MODE;
SELECT NVL(MAX(id), 0) INTO startvalue FROM %(table)s;
SELECT %(sequence)s.nextval INTO cval FROM dual;
cval := startvalue - cval;
IF cval != 0 THEN
EXECUTE IMMEDIATE 'ALTER SEQUENCE %(sequence)s MINVALUE 0 INCREMENT BY '||cval;
SELECT %(sequence)s.nextval INTO cval FROM dual;
EXECUTE IMMEDIATE 'ALTER SEQUENCE %(sequence)s INCREMENT BY 1';
END IF;
COMMIT;
END;
/"""
def get_sql_flush(style, tables, sequences): def get_sql_flush(style, tables, sequences):
"""Return a list of SQL statements required to remove all data from """Return a list of SQL statements required to remove all data from
all tables in the database (without actually removing the tables all tables in the database (without actually removing the tables
themselves) and put the database in an empty 'initial' state themselves) and put the database in an empty 'initial' state
""" """
# Return a list of 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements # Return a list of 'TRUNCATE x;', 'TRUNCATE y;',
# TODO - SQL not actually tested against Oracle yet! # 'TRUNCATE z;'... style SQL statements
# TODO - autoincrement indices reset required? See other get_sql_flush() implementations if tables:
sql = ['%s %s;' % \ # Oracle does support TRUNCATE, but it seems to get us into
(style.SQL_KEYWORD('TRUNCATE'), # FK referential trouble, whereas DELETE FROM table works.
sql = ['%s %s %s;' % \
(style.SQL_KEYWORD('DELETE'),
style.SQL_KEYWORD('FROM'),
style.SQL_FIELD(quote_name(table)) style.SQL_FIELD(quote_name(table))
) for table in tables] ) for table in tables]
# Since we've just deleted all the rows, running our sequence
# ALTER code will reset the sequence to 0.
for sequence_info in sequences:
table_name = sequence_info['table']
seq_name = get_sequence_name(table_name)
query = _get_sequence_reset_sql() % {'sequence':seq_name,
'table':quote_name(table_name)}
sql.append(query)
return sql
else:
return []
def get_sequence_name(table):
name_length = get_max_name_length() - 3
return '%s_SQ' % util.truncate_name(table, name_length).upper()
def get_sql_sequence_reset(style, model_list): def get_sql_sequence_reset(style, model_list):
"Returns a list of the SQL statements to reset sequences for the given models." "Returns a list of the SQL statements to reset sequences for the given models."
# No sequence reset required from django.db import models
return [] output = []
query = _get_sequence_reset_sql()
for model in model_list:
for f in model._meta.fields:
if isinstance(f, models.AutoField):
sequence_name = get_sequence_name(model._meta.db_table)
output.append(query % {'sequence':sequence_name,
'table':model._meta.db_table})
break # Only one AutoField is allowed per model, so don't bother continuing.
for f in model._meta.many_to_many:
sequence_name = get_sequence_name(f.m2m_db_table())
output.append(query % {'sequence':sequence_name,
'table':f.m2m_db_table()})
return output
def get_trigger_name(table):
name_length = get_max_name_length() - 3
return '%s_TR' % util.truncate_name(table, name_length).upper()
def get_query_set_class(DefaultQuerySet):
"Create a custom QuerySet class for Oracle."
from django.db import backend, connection
from django.db.models.query import EmptyResultSet, GET_ITERATOR_CHUNK_SIZE, quote_only_if_word
class OracleQuerySet(DefaultQuerySet):
def iterator(self):
"Performs the SELECT database lookup of this QuerySet."
from django.db.models.query import get_cached_row
# self._select is a dictionary, and dictionaries' key order is
# undefined, so we convert it to a list of tuples.
extra_select = self._select.items()
full_query = None
try:
try:
select, sql, params, full_query = self._get_sql_clause(get_full_query=True)
except TypeError:
select, sql, params = self._get_sql_clause()
except EmptyResultSet:
raise StopIteration
if not full_query:
full_query = "SELECT %s%s\n%s" % \
((self._distinct and "DISTINCT " or ""),
', '.join(select), sql)
cursor = connection.cursor()
cursor.execute(full_query, params)
fill_cache = self._select_related
fields = self.model._meta.fields
index_end = len(fields)
# so here's the logic;
# 1. retrieve each row in turn
# 2. convert NCLOBs
while 1:
rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)
if not rows:
raise StopIteration
for row in rows:
row = self.resolve_columns(row, fields)
if fill_cache:
obj, index_end = get_cached_row(klass=self.model, row=row,
index_start=0, max_depth=self._max_related_depth)
else:
obj = self.model(*row[:index_end])
for i, k in enumerate(extra_select):
setattr(obj, k[0], row[index_end+i])
yield obj
def _get_sql_clause(self, get_full_query=False):
from django.db.models.query import fill_table_cache, \
handle_legacy_orderlist, orderfield2column
opts = self.model._meta
# Construct the fundamental parts of the query: SELECT X FROM Y WHERE Z.
select = ["%s.%s" % (backend.quote_name(opts.db_table), backend.quote_name(f.column)) for f in opts.fields]
tables = [quote_only_if_word(t) for t in self._tables]
joins = SortedDict()
where = self._where[:]
params = self._params[:]
# Convert self._filters into SQL.
joins2, where2, params2 = self._filters.get_sql(opts)
joins.update(joins2)
where.extend(where2)
params.extend(params2)
# Add additional tables and WHERE clauses based on select_related.
if self._select_related:
fill_table_cache(opts, select, tables, where, opts.db_table, [opts.db_table])
# Add any additional SELECTs.
if self._select:
select.extend(['(%s) AS %s' % (quote_only_if_word(s[1]), backend.quote_name(s[0])) for s in self._select.items()])
# Start composing the body of the SQL statement.
sql = [" FROM", backend.quote_name(opts.db_table)]
# Compose the join dictionary into SQL describing the joins.
if joins:
sql.append(" ".join(["%s %s %s ON %s" % (join_type, table, alias, condition)
for (alias, (table, join_type, condition)) in joins.items()]))
# Compose the tables clause into SQL.
if tables:
sql.append(", " + ", ".join(tables))
# Compose the where clause into SQL.
if where:
sql.append(where and "WHERE " + " AND ".join(where))
# ORDER BY clause
order_by = []
if self._order_by is not None:
ordering_to_use = self._order_by
else:
ordering_to_use = opts.ordering
for f in handle_legacy_orderlist(ordering_to_use):
if f == '?': # Special case.
order_by.append(backend.get_random_function_sql())
else:
if f.startswith('-'):
col_name = f[1:]
order = "DESC"
else:
col_name = f
order = "ASC"
if "." in col_name:
table_prefix, col_name = col_name.split('.', 1)
table_prefix = backend.quote_name(table_prefix) + '.'
else:
# Use the database table as a column prefix if it wasn't given,
# and if the requested column isn't a custom SELECT.
if "." not in col_name and col_name not in (self._select or ()):
table_prefix = backend.quote_name(opts.db_table) + '.'
else:
table_prefix = ''
order_by.append('%s%s %s' % (table_prefix, backend.quote_name(orderfield2column(col_name, opts)), order))
if order_by:
sql.append("ORDER BY " + ", ".join(order_by))
# Look for column name collisions in the select elements
# and fix them with an AS alias. This allows us to do a
# SELECT * later in the paging query.
cols = [clause.split('.')[-1] for clause in select]
for index, col in enumerate(cols):
if cols.count(col) > 1:
col = '%s%d' % (col.replace('"', ''), index)
cols[index] = col
select[index] = '%s AS %s' % (select[index], col)
# LIMIT and OFFSET clauses
# To support limits and offsets, Oracle requires some funky rewriting of an otherwise normal looking query.
select_clause = ",".join(select)
distinct = (self._distinct and "DISTINCT " or "")
if order_by:
order_by_clause = " OVER (ORDER BY %s )" % (", ".join(order_by))
else:
#Oracle's row_number() function always requires an order-by clause.
#So we need to define a default order-by, since none was provided.
order_by_clause = " OVER (ORDER BY %s.%s)" % \
(backend.quote_name(opts.db_table),
backend.quote_name(opts.fields[0].db_column or opts.fields[0].column))
# limit_and_offset_clause
if self._limit is None:
assert self._offset is None, "'offset' is not allowed without 'limit'"
if self._offset is not None:
offset = int(self._offset)
else:
offset = 0
if self._limit is not None:
limit = int(self._limit)
else:
limit = None
limit_and_offset_clause = ''
if limit is not None:
limit_and_offset_clause = "WHERE rn > %s AND rn <= %s" % (offset, limit+offset)
elif offset:
limit_and_offset_clause = "WHERE rn > %s" % (offset)
if len(limit_and_offset_clause) > 0:
fmt = \
"""SELECT * FROM
(SELECT %s%s,
ROW_NUMBER()%s AS rn
%s)
%s"""
full_query = fmt % (distinct, select_clause,
order_by_clause, ' '.join(sql).strip(),
limit_and_offset_clause)
else:
full_query = None
if get_full_query:
return select, " ".join(sql), params, full_query
else:
return select, " ".join(sql), params
def resolve_columns(self, row, fields=()):
from django.db.models.fields import DateField, DateTimeField, \
TimeField, BooleanField, NullBooleanField, DecimalField, Field
values = []
for value, field in map(None, row, fields):
if isinstance(value, Database.LOB):
value = value.read()
# Oracle stores empty strings as null. We need to undo this in
# order to adhere to the Django convention of using the empty
# string instead of null, but only if the field accepts the
# empty string.
if value is None and isinstance(field, Field) and field.empty_strings_allowed:
value = ''
# Convert 1 or 0 to True or False
elif value in (1, 0) and isinstance(field, (BooleanField, NullBooleanField)):
value = bool(value)
# Convert floats to decimals
elif value is not None and isinstance(field, DecimalField):
value = util.typecast_decimal(field.format_number(value))
# cx_Oracle always returns datetime.datetime objects for
# DATE and TIMESTAMP columns, but Django wants to see a
# python datetime.date, .time, or .datetime. We use the type
# of the Field to determine which to cast to, but it's not
# always available.
# As a workaround, we cast to date if all the time-related
# values are 0, or to time if the date is 1/1/1900.
# This could be cleaned a bit by adding a method to the Field
# classes to normalize values from the database (the to_python
# method is used for validation and isn't what we want here).
elif isinstance(value, Database.Timestamp):
# In Python 2.3, the cx_Oracle driver returns its own
# Timestamp object that we must convert to a datetime class.
if not isinstance(value, datetime.datetime):
value = datetime.datetime(value.year, value.month, value.day, value.hour,
value.minute, value.second, value.fsecond)
if isinstance(field, DateTimeField):
pass # DateTimeField subclasses DateField so must be checked first.
elif isinstance(field, DateField):
value = value.date()
elif isinstance(field, TimeField) or (value.year == 1900 and value.month == value.day == 1):
value = value.time()
elif value.hour == value.minute == value.second == value.microsecond == 0:
value = value.date()
values.append(value)
return values
return OracleQuerySet
OPERATOR_MAPPING = { OPERATOR_MAPPING = {
'exact': '= %s', 'exact': '= %s',
'iexact': 'LIKE %s', 'iexact': '= UPPER(%s)',
'contains': 'LIKE %s', 'contains': "LIKE %s ESCAPE '\\'",
'icontains': 'LIKE %s', 'icontains': "LIKE UPPER(%s) ESCAPE '\\'",
'gt': '> %s', 'gt': '> %s',
'gte': '>= %s', 'gte': '>= %s',
'lt': '< %s', 'lt': '< %s',
'lte': '<= %s', 'lte': '<= %s',
'startswith': 'LIKE %s', 'startswith': "LIKE %s ESCAPE '\\'",
'endswith': 'LIKE %s', 'endswith': "LIKE %s ESCAPE '\\'",
'istartswith': 'LIKE %s', 'istartswith': "LIKE UPPER(%s) ESCAPE '\\'",
'iendswith': 'LIKE %s', 'iendswith': "LIKE UPPER(%s) ESCAPE '\\'",
} }

View File

@ -2,9 +2,10 @@ from django.conf import settings
import os import os
def runshell(): def runshell():
args = '' dsn = settings.DATABASE_USER
args += settings.DATABASE_USER
if settings.DATABASE_PASSWORD: if settings.DATABASE_PASSWORD:
args += "/%s" % settings.DATABASE_PASSWORD dsn += "/%s" % settings.DATABASE_PASSWORD
args += "@%s" % settings.DATABASE_NAME if settings.DATABASE_NAME:
os.execvp('sqlplus', args) dsn += "@%s" % settings.DATABASE_NAME
args = ["sqlplus", "-L", dsn]
os.execvp("sqlplus", args)

View File

@ -1,26 +1,304 @@
import sys, time
from django.core import management
# This dictionary maps Field objects to their associated Oracle column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
DATA_TYPES = { DATA_TYPES = {
'AutoField': 'number(38)', 'AutoField': 'NUMBER(11)',
'BooleanField': 'number(1)', 'BooleanField': 'NUMBER(1) CHECK (%(column)s IN (0,1))',
'CharField': 'varchar2(%(maxlength)s)', 'CharField': 'VARCHAR2(%(maxlength)s)',
'CommaSeparatedIntegerField': 'varchar2(%(maxlength)s)', 'CommaSeparatedIntegerField': 'VARCHAR2(%(maxlength)s)',
'DateField': 'date', 'DateField': 'DATE',
'DateTimeField': 'date', 'DateTimeField': 'TIMESTAMP',
'DecimalField': 'number(%(max_digits)s, %(decimal_places)s)', 'DecimalField': 'NUMBER(%(max_digits)s, %(decimal_places)s)',
'FileField': 'varchar2(100)', 'FileField': 'VARCHAR2(100)',
'FilePathField': 'varchar2(100)', 'FilePathField': 'VARCHAR2(100)',
'FloatField': 'double precision', 'FloatField': 'DOUBLE PRECISION',
'ImageField': 'varchar2(100)', 'ImageField': 'VARCHAR2(100)',
'IntegerField': 'integer', 'IntegerField': 'NUMBER(11)',
'IPAddressField': 'char(15)', 'IPAddressField': 'VARCHAR2(15)',
'ManyToManyField': None, 'ManyToManyField': None,
'NullBooleanField': 'integer', 'NullBooleanField': 'NUMBER(1) CHECK ((%(column)s IN (0,1)) OR (%(column)s IS NULL))',
'OneToOneField': 'integer', 'OneToOneField': 'NUMBER(11)',
'PhoneNumberField': 'varchar(20)', 'PhoneNumberField': 'VARCHAR2(20)',
'PositiveIntegerField': 'integer', 'PositiveIntegerField': 'NUMBER(11) CHECK (%(column)s >= 0)',
'PositiveSmallIntegerField': 'smallint', 'PositiveSmallIntegerField': 'NUMBER(11) CHECK (%(column)s >= 0)',
'SlugField': 'varchar(50)', 'SlugField': 'VARCHAR2(50)',
'SmallIntegerField': 'smallint', 'SmallIntegerField': 'NUMBER(11)',
'TextField': 'long', 'TextField': 'NCLOB',
'TimeField': 'timestamp', 'TimeField': 'TIMESTAMP',
'USStateField': 'varchar(2)', 'URLField': 'VARCHAR2(200)',
'USStateField': 'CHAR(2)',
} }
TEST_DATABASE_PREFIX = 'test_'
PASSWORD = 'Im_a_lumberjack'
REMEMBER = {}
def create_test_db(settings, connection, backend, verbosity=1, autoclobber=False):
TEST_DATABASE_NAME = _test_database_name(settings)
TEST_DATABASE_USER = _test_database_user(settings)
TEST_DATABASE_PASSWD = _test_database_passwd(settings)
TEST_DATABASE_TBLSPACE = _test_database_tblspace(settings)
TEST_DATABASE_TBLSPACE_TMP = _test_database_tblspace_tmp(settings)
parameters = {
'dbname': TEST_DATABASE_NAME,
'user': TEST_DATABASE_USER,
'password': TEST_DATABASE_PASSWD,
'tblspace': TEST_DATABASE_TBLSPACE,
'tblspace_temp': TEST_DATABASE_TBLSPACE_TMP,
}
REMEMBER['user'] = settings.DATABASE_USER
REMEMBER['passwd'] = settings.DATABASE_PASSWORD
cursor = connection.cursor()
if _test_database_create(settings):
if verbosity >= 1:
print 'Creating test database...'
try:
_create_test_db(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = raw_input("It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_DATABASE_NAME)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test database..."
_destroy_test_db(cursor, parameters, verbosity)
if verbosity >= 1:
print "Creating test database..."
_create_test_db(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
if _test_user_create(settings):
if verbosity >= 1:
print "Creating test user..."
try:
_create_test_user(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error creating the test user: %s\n" % e)
if not autoclobber:
confirm = raw_input("It appears the test user, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_DATABASE_USER)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test user..."
_destroy_test_user(cursor, parameters, verbosity)
if verbosity >= 1:
print "Creating test user..."
_create_test_user(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error recreating the test user: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
connection.close()
settings.DATABASE_USER = TEST_DATABASE_USER
settings.DATABASE_PASSWORD = TEST_DATABASE_PASSWD
management.syncdb(verbosity, interactive=False)
# Get a cursor (even though we don't need one yet). This has
# the side effect of initializing the test database.
cursor = connection.cursor()
def destroy_test_db(settings, connection, backend, old_database_name, verbosity=1):
connection.close()
TEST_DATABASE_NAME = _test_database_name(settings)
TEST_DATABASE_USER = _test_database_user(settings)
TEST_DATABASE_PASSWD = _test_database_passwd(settings)
TEST_DATABASE_TBLSPACE = _test_database_tblspace(settings)
TEST_DATABASE_TBLSPACE_TMP = _test_database_tblspace_tmp(settings)
settings.DATABASE_NAME = old_database_name
settings.DATABASE_USER = REMEMBER['user']
settings.DATABASE_PASSWORD = REMEMBER['passwd']
parameters = {
'dbname': TEST_DATABASE_NAME,
'user': TEST_DATABASE_USER,
'password': TEST_DATABASE_PASSWD,
'tblspace': TEST_DATABASE_TBLSPACE,
'tblspace_temp': TEST_DATABASE_TBLSPACE_TMP,
}
REMEMBER['user'] = settings.DATABASE_USER
REMEMBER['passwd'] = settings.DATABASE_PASSWORD
cursor = connection.cursor()
time.sleep(1) # To avoid "database is being accessed by other users" errors.
if _test_user_create(settings):
if verbosity >= 1:
print 'Destroying test user...'
_destroy_test_user(cursor, parameters, verbosity)
if _test_database_create(settings):
if verbosity >= 1:
print 'Destroying test database...'
_destroy_test_db(cursor, parameters, verbosity)
connection.close()
def _create_test_db(cursor, parameters, verbosity):
if verbosity >= 2:
print "_create_test_db(): dbname = %s" % parameters['dbname']
statements = [
"""CREATE TABLESPACE %(tblspace)s
DATAFILE '%(tblspace)s.dbf' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 100M
""",
"""CREATE TEMPORARY TABLESPACE %(tblspace_temp)s
TEMPFILE '%(tblspace_temp)s.dbf' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 100M
""",
]
_execute_statements(cursor, statements, parameters, verbosity)
def _create_test_user(cursor, parameters, verbosity):
if verbosity >= 2:
print "_create_test_user(): username = %s" % parameters['user']
statements = [
"""CREATE USER %(user)s
IDENTIFIED BY %(password)s
DEFAULT TABLESPACE %(tblspace)s
TEMPORARY TABLESPACE %(tblspace_temp)s
""",
"""GRANT CONNECT, RESOURCE TO %(user)s""",
]
_execute_statements(cursor, statements, parameters, verbosity)
def _destroy_test_db(cursor, parameters, verbosity):
if verbosity >= 2:
print "_destroy_test_db(): dbname=%s" % parameters['dbname']
statements = [
'DROP TABLESPACE %(tblspace)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
'DROP TABLESPACE %(tblspace_temp)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
]
_execute_statements(cursor, statements, parameters, verbosity)
def _destroy_test_user(cursor, parameters, verbosity):
if verbosity >= 2:
print "_destroy_test_user(): user=%s" % parameters['user']
print "Be patient. This can take some time..."
statements = [
'DROP USER %(user)s CASCADE',
]
_execute_statements(cursor, statements, parameters, verbosity)
def _execute_statements(cursor, statements, parameters, verbosity):
for template in statements:
stmt = template % parameters
if verbosity >= 2:
print stmt
try:
cursor.execute(stmt)
except Exception, err:
sys.stderr.write("Failed (%s)\n" % (err))
raise
def _test_database_name(settings):
name = TEST_DATABASE_PREFIX + settings.DATABASE_NAME
try:
if settings.TEST_DATABASE_NAME:
name = settings.TEST_DATABASE_NAME
except AttributeError:
pass
except:
raise
return name
def _test_database_create(settings):
name = True
try:
if settings.TEST_DATABASE_CREATE:
name = True
else:
name = False
except AttributeError:
pass
except:
raise
return name
def _test_user_create(settings):
name = True
try:
if settings.TEST_USER_CREATE:
name = True
else:
name = False
except AttributeError:
pass
except:
raise
return name
def _test_database_user(settings):
name = TEST_DATABASE_PREFIX + settings.DATABASE_NAME
try:
if settings.TEST_DATABASE_USER:
name = settings.TEST_DATABASE_USER
except AttributeError:
pass
except:
raise
return name
def _test_database_passwd(settings):
name = PASSWORD
try:
if settings.TEST_DATABASE_PASSWD:
name = settings.TEST_DATABASE_PASSWD
except AttributeError:
pass
except:
raise
return name
def _test_database_tblspace(settings):
name = TEST_DATABASE_PREFIX + settings.DATABASE_NAME
try:
if settings.TEST_DATABASE_TBLSPACE:
name = settings.TEST_DATABASE_TBLSPACE
except AttributeError:
pass
except:
raise
return name
def _test_database_tblspace_tmp(settings):
name = TEST_DATABASE_PREFIX + settings.DATABASE_NAME + '_temp'
try:
if settings.TEST_DATABASE_TBLSPACE_TMP:
name = settings.TEST_DATABASE_TBLSPACE_TMP
except AttributeError:
pass
except:
raise
return name

View File

@ -1,14 +1,19 @@
from django.db.backends.oracle.base import quote_name
import re import re
import cx_Oracle
foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)") foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)")
def get_table_list(cursor): def get_table_list(cursor):
"Returns a list of table names in the current database." "Returns a list of table names in the current database."
cursor.execute("SELECT TABLE_NAME FROM USER_TABLES") cursor.execute("SELECT TABLE_NAME FROM USER_TABLES")
return [row[0] for row in cursor.fetchall()] return [row[0].upper() for row in cursor.fetchall()]
def get_table_description(cursor, table_name): def get_table_description(cursor, table_name):
return table_name "Returns a description of the table, with the DB-API cursor.description interface."
cursor.execute("SELECT * FROM %s WHERE ROWNUM < 2" % quote_name(table_name))
return cursor.description
def _name_to_index(cursor, table_name): def _name_to_index(cursor, table_name):
""" """
@ -22,7 +27,24 @@ def get_relations(cursor, table_name):
Returns a dictionary of {field_index: (field_index_other_table, other_table)} Returns a dictionary of {field_index: (field_index_other_table, other_table)}
representing all relationships to the given table. Indexes are 0-based. representing all relationships to the given table. Indexes are 0-based.
""" """
raise NotImplementedError cursor.execute("""
SELECT ta.column_id - 1, tb.table_name, tb.column_id - 1
FROM user_constraints, USER_CONS_COLUMNS ca, USER_CONS_COLUMNS cb,
user_tab_cols ta, user_tab_cols tb
WHERE user_constraints.table_name = %s AND
ta.table_name = %s AND
ta.column_name = ca.column_name AND
ca.table_name = %s AND
user_constraints.constraint_name = ca.constraint_name AND
user_constraints.r_constraint_name = cb.constraint_name AND
cb.table_name = tb.table_name AND
cb.column_name = tb.column_name AND
ca.position = cb.position""", [table_name, table_name, table_name])
relations = {}
for row in cursor.fetchall():
relations[row[0]] = (row[2], row[1])
return relations
def get_indexes(cursor, table_name): def get_indexes(cursor, table_name):
""" """
@ -31,20 +53,46 @@ def get_indexes(cursor, table_name):
{'primary_key': boolean representing whether it's the primary key, {'primary_key': boolean representing whether it's the primary key,
'unique': boolean representing whether it's a unique index} 'unique': boolean representing whether it's a unique index}
""" """
raise NotImplementedError # This query retrieves each index on the given table, including the
# first associated field name
# "We were in the nick of time; you were in great peril!"
sql = """
WITH primarycols AS (
SELECT user_cons_columns.table_name, user_cons_columns.column_name, 1 AS PRIMARYCOL
FROM user_cons_columns, user_constraints
WHERE user_cons_columns.constraint_name = user_constraints.constraint_name AND
user_constraints.constraint_type = 'P' AND
user_cons_columns.table_name = %s),
uniquecols AS (
SELECT user_ind_columns.table_name, user_ind_columns.column_name, 1 AS UNIQUECOL
FROM user_indexes, user_ind_columns
WHERE uniqueness = 'UNIQUE' AND
user_indexes.index_name = user_ind_columns.index_name AND
user_ind_columns.table_name = %s)
SELECT allcols.column_name, primarycols.primarycol, uniquecols.UNIQUECOL
FROM (SELECT column_name FROM primarycols UNION SELECT column_name FROM
uniquecols) allcols,
primarycols, uniquecols
WHERE allcols.column_name = primarycols.column_name (+) AND
allcols.column_name = uniquecols.column_name (+)
"""
cursor.execute(sql, [table_name, table_name])
indexes = {}
for row in cursor.fetchall():
# row[1] (idx.indkey) is stored in the DB as an array. It comes out as
# a string of space-separated integers. This designates the field
# indexes (1-based) of the fields that have indexes on the table.
# Here, we skip any indexes across multiple fields.
indexes[row[0]] = {'primary_key': row[1], 'unique': row[2]}
return indexes
# Maps type codes to Django Field types. # Maps type objects to Django Field types.
DATA_TYPES_REVERSE = { DATA_TYPES_REVERSE = {
16: 'BooleanField', cx_Oracle.CLOB: 'TextField',
21: 'SmallIntegerField', cx_Oracle.DATETIME: 'DateTimeField',
23: 'IntegerField', cx_Oracle.FIXED_CHAR: 'CharField',
25: 'TextField', cx_Oracle.NCLOB: 'TextField',
869: 'IPAddressField', cx_Oracle.NUMBER: 'DecimalField',
1043: 'CharField', cx_Oracle.STRING: 'CharField',
1082: 'DateField', cx_Oracle.TIMESTAMP: 'DateTimeField',
1083: 'TimeField',
1114: 'DateTimeField',
1184: 'DateTimeField',
1266: 'TimeField',
1700: 'DecimalField',
} }

View File

@ -105,7 +105,14 @@ class DatabaseWrapper(local):
self.connection.close() self.connection.close()
self.connection = None self.connection = None
allows_group_by_ordinal = True
allows_unique_and_pk = True
autoindexes_primary_keys = True
needs_datetime_string_cast = True
needs_upper_for_iops = False
supports_constraints = True supports_constraints = True
supports_tablespaces = False
uses_case_insensitive_names = False
def quote_name(name): def quote_name(name):
if name.startswith('"') and name.endswith('"'): if name.startswith('"') and name.endswith('"'):
@ -138,6 +145,9 @@ def get_date_trunc_sql(lookup_type, field_name):
# http://www.postgresql.org/docs/8.0/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC # http://www.postgresql.org/docs/8.0/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name) return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
def get_datetime_cast_sql():
return None
def get_limit_offset_sql(limit, offset=None): def get_limit_offset_sql(limit, offset=None):
sql = "LIMIT %s" % limit sql = "LIMIT %s" % limit
if offset and offset != 0: if offset and offset != 0:
@ -159,6 +169,15 @@ def get_drop_foreignkey_sql():
def get_pk_default_value(): def get_pk_default_value():
return "DEFAULT" return "DEFAULT"
def get_max_name_length():
return None
def get_start_transaction_sql():
return "BEGIN;"
def get_autoinc_sql(table):
return None
def get_sql_flush(style, tables, sequences): def get_sql_flush(style, tables, sequences):
"""Return a list of SQL statements required to remove all data from """Return a list of SQL statements required to remove all data from
all tables in the database (without actually removing the tables all tables in the database (without actually removing the tables

View File

@ -73,7 +73,14 @@ class DatabaseWrapper(local):
self.connection.close() self.connection.close()
self.connection = None self.connection = None
allows_group_by_ordinal = True
allows_unique_and_pk = True
autoindexes_primary_keys = True
needs_datetime_string_cast = False
needs_upper_for_iops = False
supports_constraints = True supports_constraints = True
supports_tablespaces = False
uses_case_insensitive_names = True
def quote_name(name): def quote_name(name):
if name.startswith('"') and name.endswith('"'): if name.startswith('"') and name.endswith('"'):
@ -98,6 +105,9 @@ def get_date_trunc_sql(lookup_type, field_name):
# http://www.postgresql.org/docs/8.0/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC # http://www.postgresql.org/docs/8.0/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name) return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
def get_datetime_cast_sql():
return None
def get_limit_offset_sql(limit, offset=None): def get_limit_offset_sql(limit, offset=None):
sql = "LIMIT %s" % limit sql = "LIMIT %s" % limit
if offset and offset != 0: if offset and offset != 0:
@ -119,6 +129,15 @@ def get_drop_foreignkey_sql():
def get_pk_default_value(): def get_pk_default_value():
return "DEFAULT" return "DEFAULT"
def get_max_name_length():
return None
def get_start_transaction_sql():
return "BEGIN;"
def get_autoinc_sql(table):
return None
def get_sql_flush(style, tables, sequences): def get_sql_flush(style, tables, sequences):
"""Return a list of SQL statements required to remove all data from """Return a list of SQL statements required to remove all data from
all tables in the database (without actually removing the tables all tables in the database (without actually removing the tables

View File

@ -107,7 +107,14 @@ class SQLiteCursorWrapper(Database.Cursor):
def convert_query(self, query, num_params): def convert_query(self, query, num_params):
return query % tuple("?" * num_params) return query % tuple("?" * num_params)
allows_group_by_ordinal = True
allows_unique_and_pk = True
autoindexes_primary_keys = True
needs_datetime_string_cast = True
needs_upper_for_iops = False
supports_constraints = False supports_constraints = False
supports_tablespaces = False
uses_case_insensitive_names = False
def quote_name(name): def quote_name(name):
if name.startswith('"') and name.endswith('"'): if name.startswith('"') and name.endswith('"'):
@ -139,6 +146,9 @@ def get_date_trunc_sql(lookup_type, field_name):
# sqlite doesn't support DATE_TRUNC, so we fake it as above. # sqlite doesn't support DATE_TRUNC, so we fake it as above.
return 'django_date_trunc("%s", %s)' % (lookup_type.lower(), field_name) return 'django_date_trunc("%s", %s)' % (lookup_type.lower(), field_name)
def get_datetime_cast_sql():
return None
def get_limit_offset_sql(limit, offset=None): def get_limit_offset_sql(limit, offset=None):
sql = "LIMIT %s" % limit sql = "LIMIT %s" % limit
if offset and offset != 0: if offset and offset != 0:
@ -160,6 +170,15 @@ def get_drop_foreignkey_sql():
def get_pk_default_value(): def get_pk_default_value():
return "NULL" return "NULL"
def get_max_name_length():
return None
def get_start_transaction_sql():
return "BEGIN;"
def get_autoinc_sql(table):
return None
def get_sql_flush(style, tables, sequences): def get_sql_flush(style, tables, sequences):
"""Return a list of SQL statements required to remove all data from """Return a list of SQL statements required to remove all data from
all tables in the database (without actually removing the tables all tables in the database (without actually removing the tables

View File

@ -1,4 +1,5 @@
import datetime import datetime
import md5
from time import time from time import time
try: try:
@ -107,6 +108,16 @@ def rev_typecast_decimal(d):
return None return None
return str(d) return str(d)
def truncate_name(name, length=None):
"""Shortens a string to a repeatable mangled version with the given length.
"""
if length is None or len(name) <= length:
return name
hash = md5.md5(name).hexdigest()[:4]
return '%s%s' % (name[:length-4], hash)
################################################################################## ##################################################################################
# Helper functions for dictfetch* for databases that don't natively support them # # Helper functions for dictfetch* for databases that don't natively support them #
################################################################################## ##################################################################################

View File

@ -210,17 +210,18 @@ class Model(object):
record_exists = True record_exists = True
if pk_set: if pk_set:
# Determine whether a record with the primary key already exists. # Determine whether a record with the primary key already exists.
cursor.execute("SELECT 1 FROM %s WHERE %s=%%s LIMIT 1" % \ cursor.execute("SELECT COUNT(*) FROM %s WHERE %s=%%s" % \
(backend.quote_name(self._meta.db_table), backend.quote_name(self._meta.pk.column)), [pk_val]) (backend.quote_name(self._meta.db_table), backend.quote_name(self._meta.pk.column)),
self._meta.pk.get_db_prep_lookup('exact', pk_val))
# If it does already exist, do an UPDATE. # If it does already exist, do an UPDATE.
if cursor.fetchone(): if cursor.fetchone()[0] > 0:
db_values = [f.get_db_prep_save(f.pre_save(self, False)) for f in non_pks] db_values = [f.get_db_prep_save(f.pre_save(self, False)) for f in non_pks]
if db_values: if db_values:
cursor.execute("UPDATE %s SET %s WHERE %s=%%s" % \ cursor.execute("UPDATE %s SET %s WHERE %s=%%s" % \
(backend.quote_name(self._meta.db_table), (backend.quote_name(self._meta.db_table),
','.join(['%s=%%s' % backend.quote_name(f.column) for f in non_pks]), ','.join(['%s=%%s' % backend.quote_name(f.column) for f in non_pks]),
backend.quote_name(self._meta.pk.column)), backend.quote_name(self._meta.pk.column)),
db_values + [pk_val]) db_values + self._meta.pk.get_db_prep_lookup('exact', pk_val))
else: else:
record_exists = False record_exists = False
if not pk_set or not record_exists: if not pk_set or not record_exists:

View File

@ -74,12 +74,16 @@ class Field(object):
core=False, rel=None, default=NOT_PROVIDED, editable=True, serialize=True, core=False, rel=None, default=NOT_PROVIDED, editable=True, serialize=True,
prepopulate_from=None, unique_for_date=None, unique_for_month=None, prepopulate_from=None, unique_for_date=None, unique_for_month=None,
unique_for_year=None, validator_list=None, choices=None, radio_admin=None, unique_for_year=None, validator_list=None, choices=None, radio_admin=None,
help_text='', db_column=None): help_text='', db_column=None, db_tablespace=None):
self.name = name self.name = name
self.verbose_name = verbose_name self.verbose_name = verbose_name
self.primary_key = primary_key self.primary_key = primary_key
self.maxlength, self.unique = maxlength, unique self.maxlength, self.unique = maxlength, unique
self.blank, self.null = blank, null self.blank, self.null = blank, null
# Oracle treats the empty string ('') as null, so coerce the null
# option whenever '' is a possible value.
if self.empty_strings_allowed and settings.DATABASE_ENGINE == 'oracle':
self.null = True
self.core, self.rel, self.default = core, rel, default self.core, self.rel, self.default = core, rel, default
self.editable = editable self.editable = editable
self.serialize = serialize self.serialize = serialize
@ -91,6 +95,7 @@ class Field(object):
self.radio_admin = radio_admin self.radio_admin = radio_admin
self.help_text = help_text self.help_text = help_text
self.db_column = db_column self.db_column = db_column
self.db_tablespace = db_tablespace
# Set db_index to True if the field has a relationship and doesn't explicitly set db_index. # Set db_index to True if the field has a relationship and doesn't explicitly set db_index.
self.db_index = db_index self.db_index = db_index
@ -201,7 +206,7 @@ class Field(object):
if callable(self.default): if callable(self.default):
return self.default() return self.default()
return self.default return self.default
if not self.empty_strings_allowed or self.null: if not self.empty_strings_allowed or (self.null and settings.DATABASE_ENGINE != 'oracle'):
return None return None
return "" return ""
@ -806,6 +811,7 @@ class IPAddressField(Field):
validators.isValidIPAddress4(field_data, None) validators.isValidIPAddress4(field_data, None)
class NullBooleanField(Field): class NullBooleanField(Field):
empty_strings_allowed = False
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
kwargs['null'] = True kwargs['null'] = True
Field.__init__(self, *args, **kwargs) Field.__init__(self, *args, **kwargs)
@ -875,10 +881,18 @@ class TimeField(Field):
Field.__init__(self, verbose_name, name, **kwargs) Field.__init__(self, verbose_name, name, **kwargs)
def get_db_prep_lookup(self, lookup_type, value): def get_db_prep_lookup(self, lookup_type, value):
if lookup_type == 'range': if settings.DATABASE_ENGINE == 'oracle':
value = [str(v) for v in value] # Oracle requires a date in order to parse.
def prep(value):
if isinstance(value, datetime.time):
value = datetime.datetime.combine(datetime.date(1900, 1, 1), value)
return str(value)
else: else:
value = str(value) prep = str
if lookup_type == 'range':
value = [prep(v) for v in value]
else:
value = prep(value)
return Field.get_db_prep_lookup(self, lookup_type, value) return Field.get_db_prep_lookup(self, lookup_type, value)
def pre_save(self, model_instance, add): def pre_save(self, model_instance, add):
@ -896,6 +910,14 @@ class TimeField(Field):
# doesn't support microseconds. # doesn't support microseconds.
if settings.DATABASE_ENGINE == 'mysql' and hasattr(value, 'microsecond'): if settings.DATABASE_ENGINE == 'mysql' and hasattr(value, 'microsecond'):
value = value.replace(microsecond=0) value = value.replace(microsecond=0)
if settings.DATABASE_ENGINE == 'oracle':
# cx_Oracle expects a datetime.datetime to persist into TIMESTAMP field.
if isinstance(value, datetime.time):
value = datetime.datetime(1900, 1, 1, value.hour, value.minute,
value.second, value.microsecond)
elif isinstance(value, basestring):
value = datetime.datetime(*(time.strptime(value, '%H:%M:%S')[:6]))
else:
value = str(value) value = str(value)
return Field.get_db_prep_save(self, value) return Field.get_db_prep_save(self, value)

View File

@ -336,10 +336,7 @@ def create_many_related_manager(superclass):
(target_col_name, self.join_table, source_col_name, (target_col_name, self.join_table, source_col_name,
target_col_name, ",".join(['%s'] * len(new_ids))), target_col_name, ",".join(['%s'] * len(new_ids))),
[self._pk_val] + list(new_ids)) [self._pk_val] + list(new_ids))
if cursor.rowcount is not None and cursor.rowcount != 0: existing_ids = set([row[0] for row in cursor.fetchall()])
existing_ids = set([row[0] for row in cursor.fetchmany(cursor.rowcount)])
else:
existing_ids = set()
# Add the ones that aren't there already # Add the ones that aren't there already
for obj_id in (new_ids - existing_ids): for obj_id in (new_ids - existing_ids):

View File

@ -13,7 +13,7 @@ get_verbose_name = lambda class_name: re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|
DEFAULT_NAMES = ('verbose_name', 'db_table', 'ordering', DEFAULT_NAMES = ('verbose_name', 'db_table', 'ordering',
'unique_together', 'permissions', 'get_latest_by', 'unique_together', 'permissions', 'get_latest_by',
'order_with_respect_to', 'app_label') 'order_with_respect_to', 'app_label', 'db_tablespace')
class Options(object): class Options(object):
def __init__(self, meta): def __init__(self, meta):
@ -27,6 +27,7 @@ class Options(object):
self.object_name, self.app_label = None, None self.object_name, self.app_label = None, None
self.get_latest_by = None self.get_latest_by = None
self.order_with_respect_to = None self.order_with_respect_to = None
self.db_tablespace = None
self.admin = None self.admin = None
self.meta = meta self.meta = meta
self.pk = None self.pk = None
@ -59,6 +60,8 @@ class Options(object):
del self.meta del self.meta
def _prepare(self, model): def _prepare(self, model):
from django.db import backend
from django.db.backends.util import truncate_name
if self.order_with_respect_to: if self.order_with_respect_to:
self.order_with_respect_to = self.get_field(self.order_with_respect_to) self.order_with_respect_to = self.get_field(self.order_with_respect_to)
self.ordering = ('_order',) self.ordering = ('_order',)
@ -73,6 +76,8 @@ class Options(object):
# If the db_table wasn't provided, use the app_label + module_name. # If the db_table wasn't provided, use the app_label + module_name.
if not self.db_table: if not self.db_table:
self.db_table = "%s_%s" % (self.app_label, self.module_name) self.db_table = "%s_%s" % (self.app_label, self.module_name)
self.db_table = truncate_name(self.db_table,
backend.get_max_name_length())
def add_field(self, field): def add_field(self, field):
# Insert the given field in the order in which it was created, using # Insert the given field in the order in which it was created, using

View File

@ -4,6 +4,7 @@ from django.db.models import signals, loading
from django.dispatch import dispatcher from django.dispatch import dispatcher
from django.utils.datastructures import SortedDict from django.utils.datastructures import SortedDict
from django.contrib.contenttypes import generic from django.contrib.contenttypes import generic
import datetime
import operator import operator
import re import re
@ -78,7 +79,7 @@ def quote_only_if_word(word):
else: else:
return backend.quote_name(word) return backend.quote_name(word)
class QuerySet(object): class _QuerySet(object):
"Represents a lazy database lookup for a set of objects" "Represents a lazy database lookup for a set of objects"
def __init__(self, model=None): def __init__(self, model=None):
self.model = model self.model = model
@ -182,13 +183,18 @@ class QuerySet(object):
cursor = connection.cursor() cursor = connection.cursor()
cursor.execute("SELECT " + (self._distinct and "DISTINCT " or "") + ",".join(select) + sql, params) cursor.execute("SELECT " + (self._distinct and "DISTINCT " or "") + ",".join(select) + sql, params)
fill_cache = self._select_related fill_cache = self._select_related
index_end = len(self.model._meta.fields) fields = self.model._meta.fields
index_end = len(fields)
has_resolve_columns = hasattr(self, 'resolve_columns')
while 1: while 1:
rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE) rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)
if not rows: if not rows:
raise StopIteration raise StopIteration
for row in rows: for row in rows:
if has_resolve_columns:
row = self.resolve_columns(row, fields)
if fill_cache: if fill_cache:
obj, index_end = get_cached_row(klass=self.model, row=row, obj, index_end = get_cached_row(klass=self.model, row=row,
index_start=0, max_depth=self._max_related_depth) index_start=0, max_depth=self._max_related_depth)
@ -552,6 +558,12 @@ class QuerySet(object):
return select, " ".join(sql), params return select, " ".join(sql), params
# Use the backend's QuerySet class if it defines one, otherwise use _QuerySet.
if hasattr(backend, 'get_query_set_class'):
QuerySet = backend.get_query_set_class(_QuerySet)
else:
QuerySet = _QuerySet
class ValuesQuerySet(QuerySet): class ValuesQuerySet(QuerySet):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(ValuesQuerySet, self).__init__(*args, **kwargs) super(ValuesQuerySet, self).__init__(*args, **kwargs)
@ -566,35 +578,38 @@ class ValuesQuerySet(QuerySet):
# self._fields is a list of field names to fetch. # self._fields is a list of field names to fetch.
if self._fields: if self._fields:
#columns = [self.model._meta.get_field(f, many_to_many=False).column for f in self._fields]
if not self._select: if not self._select:
columns = [self.model._meta.get_field(f, many_to_many=False).column for f in self._fields] fields = [self.model._meta.get_field(f, many_to_many=False) for f in self._fields]
else: else:
columns = [] fields = []
for f in self._fields: for f in self._fields:
if f in [field.name for field in self.model._meta.fields]: if f in [field.name for field in self.model._meta.fields]:
columns.append( self.model._meta.get_field(f, many_to_many=False).column ) fields.append(self.model._meta.get_field(f, many_to_many=False))
elif not self._select.has_key( f ): elif not self._select.has_key( f ):
raise FieldDoesNotExist, '%s has no field named %r' % ( self.model._meta.object_name, f ) raise FieldDoesNotExist, '%s has no field named %r' % ( self.model._meta.object_name, f )
field_names = self._fields field_names = self._fields
else: # Default to all fields. else: # Default to all fields.
columns = [f.column for f in self.model._meta.fields] fields = self.model._meta.fields
field_names = [f.attname for f in self.model._meta.fields] field_names = [f.attname for f in fields]
columns = [f.column for f in fields]
select = ['%s.%s' % (backend.quote_name(self.model._meta.db_table), backend.quote_name(c)) for c in columns] select = ['%s.%s' % (backend.quote_name(self.model._meta.db_table), backend.quote_name(c)) for c in columns]
# Add any additional SELECTs. # Add any additional SELECTs.
if self._select: if self._select:
select.extend(['(%s) AS %s' % (quote_only_if_word(s[1]), backend.quote_name(s[0])) for s in self._select.items()]) select.extend(['(%s) AS %s' % (quote_only_if_word(s[1]), backend.quote_name(s[0])) for s in self._select.items()])
cursor = connection.cursor() cursor = connection.cursor()
cursor.execute("SELECT " + (self._distinct and "DISTINCT " or "") + ",".join(select) + sql, params) cursor.execute("SELECT " + (self._distinct and "DISTINCT " or "") + ",".join(select) + sql, params)
has_resolve_columns = hasattr(self, 'resolve_columns')
while 1: while 1:
rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE) rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)
if not rows: if not rows:
raise StopIteration raise StopIteration
for row in rows: for row in rows:
if has_resolve_columns:
row = self.resolve_columns(row, fields)
yield dict(zip(field_names, row)) yield dict(zip(field_names, row))
def _clone(self, klass=None, **kwargs): def _clone(self, klass=None, **kwargs):
@ -605,25 +620,49 @@ class ValuesQuerySet(QuerySet):
class DateQuerySet(QuerySet): class DateQuerySet(QuerySet):
def iterator(self): def iterator(self):
from django.db.backends.util import typecast_timestamp from django.db.backends.util import typecast_timestamp
from django.db.models.fields import DateTimeField
self._order_by = () # Clear this because it'll mess things up otherwise. self._order_by = () # Clear this because it'll mess things up otherwise.
if self._field.null: if self._field.null:
self._where.append('%s.%s IS NOT NULL' % \ self._where.append('%s.%s IS NOT NULL' % \
(backend.quote_name(self.model._meta.db_table), backend.quote_name(self._field.column))) (backend.quote_name(self.model._meta.db_table), backend.quote_name(self._field.column)))
try: try:
select, sql, params = self._get_sql_clause() select, sql, params = self._get_sql_clause()
except EmptyResultSet: except EmptyResultSet:
raise StopIteration raise StopIteration
sql = 'SELECT %s %s GROUP BY 1 ORDER BY 1 %s' % \ table_name = backend.quote_name(self.model._meta.db_table)
field_name = backend.quote_name(self._field.column)
if backend.allows_group_by_ordinal:
group_by = '1'
else:
group_by = backend.get_date_trunc_sql(self._kind,
'%s.%s' % (table_name, field_name))
sql = 'SELECT %s %s GROUP BY %s ORDER BY 1 %s' % \
(backend.get_date_trunc_sql(self._kind, '%s.%s' % (backend.quote_name(self.model._meta.db_table), (backend.get_date_trunc_sql(self._kind, '%s.%s' % (backend.quote_name(self.model._meta.db_table),
backend.quote_name(self._field.column))), sql, self._order) backend.quote_name(self._field.column))), sql, group_by, self._order)
cursor = connection.cursor() cursor = connection.cursor()
cursor.execute(sql, params) cursor.execute(sql, params)
# We have to manually run typecast_timestamp(str()) on the results, because
# MySQL doesn't automatically cast the result of date functions as datetime has_resolve_columns = hasattr(self, 'resolve_columns')
# objects -- MySQL returns the values as strings, instead. needs_datetime_string_cast = backend.needs_datetime_string_cast
return [typecast_timestamp(str(row[0])) for row in cursor.fetchall()] dates = []
# It would be better to use self._field here instead of DateTimeField(),
# but in Oracle that will result in a list of datetime.date instead of
# datetime.datetime.
fields = [DateTimeField()]
while 1:
rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)
if not rows:
return dates
for row in rows:
date = row[0]
if has_resolve_columns:
date = self.resolve_columns([date], fields)[0]
elif needs_datetime_string_cast:
date = typecast_timestamp(str(date))
dates.append(date)
def _clone(self, klass=None, **kwargs): def _clone(self, klass=None, **kwargs):
c = super(DateQuerySet, self)._clone(klass, **kwargs) c = super(DateQuerySet, self)._clone(klass, **kwargs)
@ -731,8 +770,17 @@ def get_where_clause(lookup_type, table_prefix, field_name, value):
if table_prefix.endswith('.'): if table_prefix.endswith('.'):
table_prefix = backend.quote_name(table_prefix[:-1])+'.' table_prefix = backend.quote_name(table_prefix[:-1])+'.'
field_name = backend.quote_name(field_name) field_name = backend.quote_name(field_name)
if type(value) == datetime.datetime and backend.get_datetime_cast_sql():
cast_sql = backend.get_datetime_cast_sql()
else:
cast_sql = '%s'
if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith') and backend.needs_upper_for_iops:
format = 'UPPER(%s%s) %s'
else:
format = '%s%s %s'
try: try:
return '%s%s %s' % (table_prefix, field_name, (backend.OPERATOR_MAPPING[lookup_type] % '%s')) return format % (table_prefix, field_name,
backend.OPERATOR_MAPPING[lookup_type] % cast_sql)
except KeyError: except KeyError:
pass pass
if lookup_type == 'in': if lookup_type == 'in':

View File

@ -8,12 +8,10 @@ except NameError:
from sets import Set as set # Python 2.3 fallback from sets import Set as set # Python 2.3 fallback
from itertools import chain from itertools import chain
from django.utils.datastructures import MultiValueDict from django.utils.datastructures import MultiValueDict
from django.utils.html import escape from django.utils.html import escape
from django.utils.translation import gettext from django.utils.translation import gettext
from django.utils.encoding import StrAndUnicode, smart_unicode from django.utils.encoding import StrAndUnicode, smart_unicode
from util import flatatt from util import flatatt
__all__ = ( __all__ = (

View File

@ -1,6 +1,7 @@
import sys, time import sys, time
from django.conf import settings from django.conf import settings
from django.db import connection, transaction, backend from django.db import connection, backend, get_creation_module
from django.core import management, mail
from django.core import management, mail from django.core import management, mail
from django.dispatch import dispatcher from django.dispatch import dispatcher
from django.test import signals from django.test import signals
@ -88,6 +89,12 @@ def get_postgresql_create_suffix():
return '' return ''
def create_test_db(verbosity=1, autoclobber=False): def create_test_db(verbosity=1, autoclobber=False):
# If the database backend wants to create the test DB itself, let it
creation_module = get_creation_module()
if hasattr(creation_module, "create_test_db"):
creation_module.create_test_db(settings, connection, backend, verbosity, autoclobber)
return
if verbosity >= 1: if verbosity >= 1:
print "Creating test database..." print "Creating test database..."
# If we're using SQLite, it's more convenient to test against an # If we're using SQLite, it's more convenient to test against an
@ -142,6 +149,12 @@ def create_test_db(verbosity=1, autoclobber=False):
cursor = connection.cursor() cursor = connection.cursor()
def destroy_test_db(old_database_name, verbosity=1): def destroy_test_db(old_database_name, verbosity=1):
# If the database wants to drop the test DB itself, let it
creation_module = get_creation_module()
if hasattr(creation_module, "destroy_test_db"):
creation_module.destroy_test_db(settings, connection, backend, old_database_name, verbosity)
return
# Unless we're using SQLite, remove the test database to clean up after # Unless we're using SQLite, remove the test database to clean up after
# ourselves. Connect to the previous database (not the test database) # ourselves. Connect to the previous database (not the test database)
# to do so, because it's not allowed to delete a database while being # to do so, because it's not allowed to delete a database while being

View File

@ -301,7 +301,7 @@ means it can run on a variety of server platforms.
If you want to use Django with a database, which is probably the case, you'll If you want to use Django with a database, which is probably the case, you'll
also need a database engine. PostgreSQL_ is recommended, because we're also need a database engine. PostgreSQL_ is recommended, because we're
PostgreSQL fans, and MySQL_ and `SQLite 3`_ are also supported. PostgreSQL fans, and MySQL_, `SQLite 3`_, and Oracle_ are also supported.
.. _Python: http://www.python.org/ .. _Python: http://www.python.org/
.. _Apache 2: http://httpd.apache.org/ .. _Apache 2: http://httpd.apache.org/
@ -310,6 +310,7 @@ PostgreSQL fans, and MySQL_ and `SQLite 3`_ are also supported.
.. _PostgreSQL: http://www.postgresql.org/ .. _PostgreSQL: http://www.postgresql.org/
.. _MySQL: http://www.mysql.com/ .. _MySQL: http://www.mysql.com/
.. _`SQLite 3`: http://www.sqlite.org/ .. _`SQLite 3`: http://www.sqlite.org/
.. _Oracle: http://www.oracle.com/
Do I lose anything by using Python 2.3 versus newer Python versions, such as Python 2.5? Do I lose anything by using Python 2.3 versus newer Python versions, such as Python 2.5?
---------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------

View File

@ -64,6 +64,8 @@ installed.
* If you're using SQLite, you'll need pysqlite_. Use version 2.0.3 or higher. * If you're using SQLite, you'll need pysqlite_. Use version 2.0.3 or higher.
* If you're using Oracle, you'll need cx_Oracle_, version 4.3.1 or higher.
.. _PostgreSQL: http://www.postgresql.org/ .. _PostgreSQL: http://www.postgresql.org/
.. _MySQL: http://www.mysql.com/ .. _MySQL: http://www.mysql.com/
.. _Django's ticket system: http://code.djangoproject.com/report/1 .. _Django's ticket system: http://code.djangoproject.com/report/1
@ -73,6 +75,7 @@ installed.
.. _SQLite: http://www.sqlite.org/ .. _SQLite: http://www.sqlite.org/
.. _pysqlite: http://initd.org/tracker/pysqlite .. _pysqlite: http://initd.org/tracker/pysqlite
.. _MySQL backend: ../databases/ .. _MySQL backend: ../databases/
.. _cx_Oracle: http://www.python.net/crew/atuining/cx_Oracle/
Remove any old versions of Django Remove any old versions of Django
================================= =================================

View File

@ -492,6 +492,11 @@ has ``null=True``, that means it has two possible values for "no data":
possible values for "no data;" Django convention is to use the empty possible values for "no data;" Django convention is to use the empty
string, not ``NULL``. string, not ``NULL``.
.. note::
Due to database limitations, when using the Oracle backend the
``null=True`` option will be coerced for string-based fields that can
blank, and the value ``NULL`` will be stored to denote the empty string.
``blank`` ``blank``
~~~~~~~~~ ~~~~~~~~~
@ -586,6 +591,13 @@ scenes.
If ``True``, ``django-admin.py sqlindexes`` will output a ``CREATE INDEX`` If ``True``, ``django-admin.py sqlindexes`` will output a ``CREATE INDEX``
statement for this field. statement for this field.
``db_tablespace``
~~~~~~~~~~~~~~~~~
If this field is indexed, the name of the database tablespace to use for the
index. The default is the ``db_tablespace`` of the model, if any. If the
backend doesn't support tablespaces, this option is ignored.
``default`` ``default``
~~~~~~~~~~~ ~~~~~~~~~~~
@ -996,6 +1008,12 @@ If your database table name is an SQL reserved word, or contains characters
that aren't allowed in Python variable names -- notably, the hyphen -- that aren't allowed in Python variable names -- notably, the hyphen --
that's OK. Django quotes column and table names behind the scenes. that's OK. Django quotes column and table names behind the scenes.
``db_tablespace``
-----------------
The name of the database tablespace to use for the model. If the backend
doesn't support tablespaces, this option is ignored.
``get_latest_by`` ``get_latest_by``
----------------- -----------------

View File

@ -244,9 +244,9 @@ DATABASE_ENGINE
Default: ``''`` (Empty string) Default: ``''`` (Empty string)
Which database backend to use. Either ``'postgresql_psycopg2'``, The database backend to use. Either ``'postgresql_psycopg2'``,
``'postgresql'``, ``'mysql'``, ``'mysql_old'``, ``'sqlite3'`` or ``'postgresql'``, ``'mysql'``, ``'mysql_old'``, ``'sqlite3'``,
``'ado_mssql'``. ``'oracle'``, or ``'ado_mssql'``.
DATABASE_HOST DATABASE_HOST
------------- -------------

View File

@ -0,0 +1,59 @@
"""
This is a basic model to test saving and loading boolean and date-related
types, which in the past were problematic for some database backends.
"""
from django.db import models
from django.conf import settings
class Donut(models.Model):
name = models.CharField(maxlength=100)
is_frosted = models.BooleanField(default=False)
has_sprinkles = models.NullBooleanField()
baked_date = models.DateField(null=True)
baked_time = models.TimeField(null=True)
consumed_at = models.DateTimeField(null=True)
class Meta:
ordering = ('consumed_at',)
def __str__(self):
return self.name
__test__ = {'API_TESTS': """
# No donuts are in the system yet.
>>> Donut.objects.all()
[]
>>> d = Donut(name='Apple Fritter')
# Ensure we're getting True and False, not 0 and 1
>>> d.is_frosted
False
>>> d.has_sprinkles
>>> d.has_sprinkles = True
>>> d.has_sprinkles == True
True
>>> d.save()
>>> d2 = Donut.objects.all()[0]
>>> d2
<Donut: Apple Fritter>
>>> d2.is_frosted == False
True
>>> d2.has_sprinkles == True
True
>>> import datetime
>>> d2.baked_date = datetime.date(year=1938, month=6, day=4)
>>> d2.baked_time = datetime.time(hour=5, minute=30)
>>> d2.consumed_at = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59)
>>> d2.save()
>>> d3 = Donut.objects.all()[0]
>>> d3.baked_date
datetime.date(1938, 6, 4)
>>> d3.baked_time
datetime.time(5, 30)
>>> d3.consumed_at
datetime.datetime(2007, 4, 20, 16, 19, 59)
"""}

View File

@ -15,6 +15,7 @@ from django.utils.functional import curry
from django.core import serializers from django.core import serializers
from django.db import transaction from django.db import transaction
from django.core import management from django.core import management
from django.conf import settings
from models import * from models import *
try: try:
@ -116,10 +117,13 @@ test_data = [
(data_obj, 31, DateTimeData, None), (data_obj, 31, DateTimeData, None),
(data_obj, 40, EmailData, "hovercraft@example.com"), (data_obj, 40, EmailData, "hovercraft@example.com"),
(data_obj, 41, EmailData, None), (data_obj, 41, EmailData, None),
(data_obj, 42, EmailData, ""),
(data_obj, 50, FileData, 'file:///foo/bar/whiz.txt'), (data_obj, 50, FileData, 'file:///foo/bar/whiz.txt'),
(data_obj, 51, FileData, None), (data_obj, 51, FileData, None),
(data_obj, 52, FileData, ""),
(data_obj, 60, FilePathData, "/foo/bar/whiz.txt"), (data_obj, 60, FilePathData, "/foo/bar/whiz.txt"),
(data_obj, 61, FilePathData, None), (data_obj, 61, FilePathData, None),
(data_obj, 62, FilePathData, ""),
(data_obj, 70, DecimalData, decimal.Decimal('12.345')), (data_obj, 70, DecimalData, decimal.Decimal('12.345')),
(data_obj, 71, DecimalData, decimal.Decimal('-12.345')), (data_obj, 71, DecimalData, decimal.Decimal('-12.345')),
(data_obj, 72, DecimalData, decimal.Decimal('0.0')), (data_obj, 72, DecimalData, decimal.Decimal('0.0')),
@ -146,6 +150,7 @@ test_data = [
(data_obj, 131, PositiveSmallIntegerData, None), (data_obj, 131, PositiveSmallIntegerData, None),
(data_obj, 140, SlugData, "this-is-a-slug"), (data_obj, 140, SlugData, "this-is-a-slug"),
(data_obj, 141, SlugData, None), (data_obj, 141, SlugData, None),
(data_obj, 142, SlugData, ""),
(data_obj, 150, SmallData, 12), (data_obj, 150, SmallData, 12),
(data_obj, 151, SmallData, -12), (data_obj, 151, SmallData, -12),
(data_obj, 152, SmallData, 0), (data_obj, 152, SmallData, 0),
@ -160,8 +165,10 @@ The end."""),
(data_obj, 171, TimeData, None), (data_obj, 171, TimeData, None),
(data_obj, 180, USStateData, "MA"), (data_obj, 180, USStateData, "MA"),
(data_obj, 181, USStateData, None), (data_obj, 181, USStateData, None),
(data_obj, 182, USStateData, ""),
(data_obj, 190, XMLData, "<foo></foo>"), (data_obj, 190, XMLData, "<foo></foo>"),
(data_obj, 191, XMLData, None), (data_obj, 191, XMLData, None),
(data_obj, 192, XMLData, ""),
(generic_obj, 200, GenericData, ['Generic Object 1', 'tag1', 'tag2']), (generic_obj, 200, GenericData, ['Generic Object 1', 'tag1', 'tag2']),
(generic_obj, 201, GenericData, ['Generic Object 2', 'tag2', 'tag3']), (generic_obj, 201, GenericData, ['Generic Object 2', 'tag2', 'tag3']),
@ -241,6 +248,15 @@ The end."""),
# (pk_obj, 790, XMLPKData, "<foo></foo>"), # (pk_obj, 790, XMLPKData, "<foo></foo>"),
] ]
# Because Oracle treats the empty string as NULL, Oracle is expected to fail
# when field.empty_strings_allowed is True and the value is None; skip these
# tests.
if settings.DATABASE_ENGINE == 'oracle':
test_data = [data for data in test_data
if not (data[0] == data_obj and
data[2]._meta.get_field('data').empty_strings_allowed and
data[3] is None)]
# Dynamically create serializer tests to ensure that all # Dynamically create serializer tests to ensure that all
# registered serializers are automatically tested. # registered serializers are automatically tested.
class SerializerTests(unittest.TestCase): class SerializerTests(unittest.TestCase):