From ac64e91a0cadc57f4bc5cd5d66955832320ca7a1 Mon Sep 17 00:00:00 2001 From: Malcolm Tredinnick Date: Sat, 23 Jun 2007 14:16:00 +0000 Subject: [PATCH] Merged boulder-oracle-sprint branch (r3965:5512) back into trunk. All expected tests pass for all databases. git-svn-id: http://code.djangoproject.com/svn/django/trunk@5519 bcc190cf-cafb-0310-a4f2-bffc1f526a37 --- django/contrib/admin/models.py | 2 +- django/core/management.py | 112 ++++- django/db/backends/ado_mssql/base.py | 22 + django/db/backends/dummy/base.py | 1 + django/db/backends/mysql/base.py | 23 +- django/db/backends/mysql_old/base.py | 23 +- django/db/backends/oracle/base.py | 448 ++++++++++++++++-- django/db/backends/oracle/client.py | 11 +- django/db/backends/oracle/creation.py | 326 ++++++++++++- django/db/backends/oracle/introspection.py | 82 +++- django/db/backends/postgresql/base.py | 31 +- .../db/backends/postgresql_psycopg2/base.py | 25 +- django/db/backends/sqlite3/base.py | 23 +- django/db/backends/util.py | 11 + django/db/models/base.py | 19 +- django/db/models/fields/__init__.py | 34 +- django/db/models/fields/related.py | 5 +- django/db/models/options.py | 11 +- django/db/models/query.py | 82 +++- django/newforms/widgets.py | 2 - django/test/utils.py | 15 +- docs/faq.txt | 3 +- docs/install.txt | 3 + docs/model-api.txt | 18 + docs/settings.txt | 6 +- tests/regressiontests/datatypes/__init__.py | 0 tests/regressiontests/datatypes/models.py | 59 +++ .../serializers_regress/tests.py | 16 + 28 files changed, 1240 insertions(+), 173 deletions(-) create mode 100644 tests/regressiontests/datatypes/__init__.py create mode 100644 tests/regressiontests/datatypes/models.py diff --git a/django/contrib/admin/models.py b/django/contrib/admin/models.py index 022d20bed9..7144c0b20b 100644 --- a/django/contrib/admin/models.py +++ b/django/contrib/admin/models.py @@ -9,7 +9,7 @@ DELETION = 3 class LogEntryManager(models.Manager): def log_action(self, user_id, content_type_id, object_id, object_repr, action_flag, change_message=''): - e = self.model(None, None, user_id, content_type_id, object_id, object_repr[:200], action_flag, change_message) + e = self.model(None, None, user_id, content_type_id, str(object_id), object_repr[:200], action_flag, change_message) e.save() class LogEntry(models.Model): diff --git a/django/core/management.py b/django/core/management.py index d8bf8ea00b..816b51352e 100644 --- a/django/core/management.py +++ b/django/core/management.py @@ -59,12 +59,16 @@ def _is_valid_dir_name(s): def _get_installed_models(table_list): "Gets a set of all models that are installed, given a list of existing tables" - from django.db import models + from django.db import backend, models all_models = [] for app in models.get_apps(): for model in models.get_models(app): all_models.append(model) - return set([m for m in all_models if m._meta.db_table in table_list]) + if backend.uses_case_insensitive_names: + converter = str.upper + else: + converter = lambda x: x + return set([m for m in all_models if converter(m._meta.db_table) in map(converter, table_list)]) def _get_table_list(): "Gets a list of all db tables that are physically installed." @@ -100,6 +104,7 @@ get_rel_data_type = lambda f: (f.get_internal_type() in ('AutoField', 'PositiveI def get_sql_create(app): "Returns a list of the CREATE TABLE SQL statements for the given app." from django.db import get_creation_module, models + data_types = get_creation_module().DATA_TYPES if not data_types: @@ -171,15 +176,20 @@ def _get_sql_model_create(model, known_models=set()): rel_field = f data_type = f.get_internal_type() col_type = data_types[data_type] + tablespace = f.db_tablespace or opts.db_tablespace if col_type is not None: # Make the definition (e.g. 'foo VARCHAR(30)') for this field. field_output = [style.SQL_FIELD(backend.quote_name(f.column)), style.SQL_COLTYPE(col_type % rel_field.__dict__)] field_output.append(style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or ''))) - if f.unique: + if f.unique and (not f.primary_key or backend.allows_unique_and_pk): field_output.append(style.SQL_KEYWORD('UNIQUE')) if f.primary_key: field_output.append(style.SQL_KEYWORD('PRIMARY KEY')) + if tablespace and backend.supports_tablespaces and (f.unique or f.primary_key) and backend.autoindexes_primary_keys: + # We must specify the index tablespace inline, because we + # won't be generating a CREATE INDEX statement for this field. + field_output.append(backend.get_tablespace_sql(tablespace, inline=True)) if f.rel: if f.rel.to in known_models: field_output.append(style.SQL_KEYWORD('REFERENCES') + ' ' + \ @@ -203,9 +213,19 @@ def _get_sql_model_create(model, known_models=set()): full_statement = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + style.SQL_TABLE(backend.quote_name(opts.db_table)) + ' ('] for i, line in enumerate(table_output): # Combine and add commas. full_statement.append(' %s%s' % (line, i < len(table_output)-1 and ',' or '')) - full_statement.append(');') + full_statement.append(')') + if opts.db_tablespace and backend.supports_tablespaces: + full_statement.append(backend.get_tablespace_sql(opts.db_tablespace)) + full_statement.append(';') final_output.append('\n'.join(full_statement)) + if opts.has_auto_field and hasattr(backend, 'get_autoinc_sql'): + # Add any extra SQL needed to support auto-incrementing primary keys + autoinc_sql = backend.get_autoinc_sql(opts.db_table) + if autoinc_sql: + for stmt in autoinc_sql: + final_output.append(stmt) + return final_output, pending_references def _get_sql_for_pending_references(model, pending_references): @@ -213,6 +233,7 @@ def _get_sql_for_pending_references(model, pending_references): Get any ALTER TABLE statements to add constraints after the fact. """ from django.db import backend, get_creation_module + from django.db.backends.util import truncate_name data_types = get_creation_module().DATA_TYPES final_output = [] @@ -229,7 +250,7 @@ def _get_sql_for_pending_references(model, pending_references): # So we are careful with character usage here. r_name = '%s_refs_%s_%x' % (r_col, col, abs(hash((r_table, table)))) final_output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % \ - (backend.quote_name(r_table), r_name, + (backend.quote_name(r_table), truncate_name(r_name, backend.get_max_name_length()), backend.quote_name(r_col), backend.quote_name(table), backend.quote_name(col), backend.get_deferrable_sql())) del pending_references[model] @@ -245,12 +266,18 @@ def _get_many_to_many_sql_for_model(model): final_output = [] for f in opts.many_to_many: if not isinstance(f.rel, generic.GenericRel): + tablespace = f.db_tablespace or opts.db_tablespace + if tablespace and backend.supports_tablespaces and backend.autoindexes_primary_keys: + tablespace_sql = ' ' + backend.get_tablespace_sql(tablespace, inline=True) + else: + tablespace_sql = '' table_output = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + \ style.SQL_TABLE(backend.quote_name(f.m2m_db_table())) + ' ('] - table_output.append(' %s %s %s,' % \ + table_output.append(' %s %s %s%s,' % \ (style.SQL_FIELD(backend.quote_name('id')), style.SQL_COLTYPE(data_types['AutoField']), - style.SQL_KEYWORD('NOT NULL PRIMARY KEY'))) + style.SQL_KEYWORD('NOT NULL PRIMARY KEY'), + tablespace_sql)) table_output.append(' %s %s %s %s (%s)%s,' % \ (style.SQL_FIELD(backend.quote_name(f.m2m_column_name())), style.SQL_COLTYPE(data_types[get_rel_data_type(opts.pk)] % opts.pk.__dict__), @@ -265,17 +292,30 @@ def _get_many_to_many_sql_for_model(model): style.SQL_TABLE(backend.quote_name(f.rel.to._meta.db_table)), style.SQL_FIELD(backend.quote_name(f.rel.to._meta.pk.column)), backend.get_deferrable_sql())) - table_output.append(' %s (%s, %s)' % \ + table_output.append(' %s (%s, %s)%s' % \ (style.SQL_KEYWORD('UNIQUE'), style.SQL_FIELD(backend.quote_name(f.m2m_column_name())), - style.SQL_FIELD(backend.quote_name(f.m2m_reverse_name())))) - table_output.append(');') + style.SQL_FIELD(backend.quote_name(f.m2m_reverse_name())), + tablespace_sql)) + table_output.append(')') + if opts.db_tablespace and backend.supports_tablespaces: + # f.db_tablespace is only for indices, so ignore its value here. + table_output.append(backend.get_tablespace_sql(opts.db_tablespace)) + table_output.append(';') final_output.append('\n'.join(table_output)) + + # Add any extra SQL needed to support auto-incrementing PKs + autoinc_sql = backend.get_autoinc_sql(f.m2m_db_table()) + if autoinc_sql: + for stmt in autoinc_sql: + final_output.append(stmt) + return final_output def get_sql_delete(app): "Returns a list of the DROP TABLE SQL statements for the given app." from django.db import backend, connection, models, get_introspection_module + from django.db.backends.util import truncate_name introspection = get_introspection_module() # This should work even if a connection isn't available @@ -289,6 +329,10 @@ def get_sql_delete(app): table_names = introspection.get_table_list(cursor) else: table_names = [] + if backend.uses_case_insensitive_names: + table_name_converter = str.upper + else: + table_name_converter = lambda x: x output = [] @@ -298,7 +342,7 @@ def get_sql_delete(app): references_to_delete = {} app_models = models.get_models(app) for model in app_models: - if cursor and model._meta.db_table in table_names: + if cursor and table_name_converter(model._meta.db_table) in table_names: # The table exists, so it needs to be dropped opts = model._meta for f in opts.fields: @@ -308,7 +352,7 @@ def get_sql_delete(app): to_delete.add(model) for model in app_models: - if cursor and model._meta.db_table in table_names: + if cursor and table_name_converter(model._meta.db_table) in table_names: # Drop the table now output.append('%s %s;' % (style.SQL_KEYWORD('DROP TABLE'), style.SQL_TABLE(backend.quote_name(model._meta.db_table)))) @@ -318,20 +362,26 @@ def get_sql_delete(app): col = f.column r_table = model._meta.db_table r_col = model._meta.get_field(f.rel.field_name).column + r_name = '%s_refs_%s_%x' % (col, r_col, abs(hash((table, r_table)))) output.append('%s %s %s %s;' % \ (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(backend.quote_name(table)), style.SQL_KEYWORD(backend.get_drop_foreignkey_sql()), - style.SQL_FIELD(backend.quote_name('%s_refs_%s_%x' % (col, r_col, abs(hash((table, r_table)))))))) + style.SQL_FIELD(truncate_name(r_name, backend.get_max_name_length())))) del references_to_delete[model] + if model._meta.has_auto_field and hasattr(backend, 'get_drop_sequence'): + output.append(backend.get_drop_sequence(model._meta.db_table)) # Output DROP TABLE statements for many-to-many tables. for model in app_models: opts = model._meta for f in opts.many_to_many: - if cursor and f.m2m_db_table() in table_names: + if cursor and table_name_converter(f.m2m_db_table()) in table_names: output.append("%s %s;" % (style.SQL_KEYWORD('DROP TABLE'), style.SQL_TABLE(backend.quote_name(f.m2m_db_table())))) + if hasattr(backend, 'get_drop_sequence'): + output.append(backend.get_drop_sequence("%s_%s" % (model._meta.db_table, f.column))) + app_label = app_models[0]._meta.app_label @@ -430,14 +480,20 @@ def get_sql_indexes_for_model(model): output = [] for f in model._meta.fields: - if f.db_index: + if f.db_index and not ((f.primary_key or f.unique) and backend.autoindexes_primary_keys): unique = f.unique and 'UNIQUE ' or '' + tablespace = f.db_tablespace or model._meta.db_tablespace + if tablespace and backend.supports_tablespaces: + tablespace_sql = ' ' + backend.get_tablespace_sql(tablespace) + else: + tablespace_sql = '' output.append( style.SQL_KEYWORD('CREATE %sINDEX' % unique) + ' ' + \ style.SQL_TABLE(backend.quote_name('%s_%s' % (model._meta.db_table, f.column))) + ' ' + \ style.SQL_KEYWORD('ON') + ' ' + \ style.SQL_TABLE(backend.quote_name(model._meta.db_table)) + ' ' + \ - "(%s);" % style.SQL_FIELD(backend.quote_name(f.column)) + "(%s)" % style.SQL_FIELD(backend.quote_name(f.column)) + \ + "%s;" % tablespace_sql ) return output @@ -461,7 +517,7 @@ def _emit_post_sync_signal(created_models, verbosity, interactive): def syncdb(verbosity=1, interactive=True): "Creates the database tables for all apps in INSTALLED_APPS whose tables haven't already been created." - from django.db import connection, transaction, models, get_creation_module + from django.db import backend, connection, transaction, models, get_creation_module from django.conf import settings disable_termcolors() @@ -484,6 +540,10 @@ def syncdb(verbosity=1, interactive=True): # Get a list of all existing database tables, # so we know what needs to be added. table_list = _get_table_list() + if backend.uses_case_insensitive_names: + table_name_converter = str.upper + else: + table_name_converter = lambda x: x # Get a list of already installed *models* so that references work right. seen_models = _get_installed_models(table_list) @@ -498,7 +558,7 @@ def syncdb(verbosity=1, interactive=True): # Create the model's database table, if it doesn't already exist. if verbosity >= 2: print "Processing %s.%s model" % (app_name, model._meta.object_name) - if model._meta.db_table in table_list: + if table_name_converter(model._meta.db_table) in table_list: continue sql, references = _get_sql_model_create(model, seen_models) seen_models.add(model) @@ -510,7 +570,7 @@ def syncdb(verbosity=1, interactive=True): print "Creating table %s" % model._meta.db_table for statement in sql: cursor.execute(statement) - table_list.append(model._meta.db_table) + table_list.append(table_name_converter(model._meta.db_table)) # Create the m2m tables. This must be done after all tables have been created # to ensure that all referred tables will exist. @@ -829,7 +889,7 @@ def inspectdb(): except NotImplementedError: indexes = {} for i, row in enumerate(introspection_module.get_table_description(cursor, table_name)): - att_name = row[0] + att_name = row[0].lower() comment_notes = [] # Holds Field notes, to be displayed in a Python comment. extra_params = {} # Holds Field parameters such as 'db_column'. @@ -1322,7 +1382,7 @@ def load_data(fixture_labels, verbosity=1): # Keep a count of the installed objects and fixtures count = [0,0] models = set() - + humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path' # Get a cursor (even though we don't need one yet). This has @@ -1400,7 +1460,7 @@ def load_data(fixture_labels, verbosity=1): if verbosity > 1: print "No %s fixture '%s' in %s." % \ (format, fixture_name, humanize(fixture_dir)) - + if count[0] > 0: sequence_sql = backend.get_sql_sequence_reset(style, models) if sequence_sql: @@ -1408,10 +1468,10 @@ def load_data(fixture_labels, verbosity=1): print "Resetting sequences" for line in sequence_sql: cursor.execute(line) - + transaction.commit() transaction.leave_transaction_management() - + if count[0] == 0: if verbosity > 0: print "No fixtures found." @@ -1626,7 +1686,9 @@ def execute_from_command_line(action_mapping=DEFAULT_ACTION_MAPPING, argv=None): if not mod_list: parser.print_usage_and_exit() if action not in NO_SQL_TRANSACTION: - print style.SQL_KEYWORD("BEGIN;") + from django.db import backend + if backend.get_start_transaction_sql(): + print style.SQL_KEYWORD(backend.get_start_transaction_sql()) for mod in mod_list: if action == 'reset': output = action_mapping[action](mod, options.interactive) diff --git a/django/db/backends/ado_mssql/base.py b/django/db/backends/ado_mssql/base.py index 52363ed705..0deb6aae64 100644 --- a/django/db/backends/ado_mssql/base.py +++ b/django/db/backends/ado_mssql/base.py @@ -89,7 +89,14 @@ class DatabaseWrapper(local): self.connection.close() self.connection = None +allows_group_by_ordinal = True +allows_unique_and_pk = True +autoindexes_primary_keys = True +needs_datetime_string_cast = True +needs_upper_for_iops = False supports_constraints = True +supports_tablespaces = True +uses_case_insensitive_names = False def quote_name(name): if name.startswith('[') and name.endswith(']'): @@ -117,6 +124,9 @@ def get_date_trunc_sql(lookup_type, field_name): if lookup_type=='day': return "Convert(datetime, Convert(varchar(12), %s))" % field_name +def get_datetime_cast_sql(): + return None + def get_limit_offset_sql(limit, offset=None): # TODO: This is a guess. Make sure this is correct. sql = "LIMIT %s" % limit @@ -139,6 +149,18 @@ def get_drop_foreignkey_sql(): def get_pk_default_value(): return "DEFAULT" +def get_max_name_length(): + return None + +def get_start_transaction_sql(): + return "BEGIN;" + +def get_tablespace_sql(tablespace, inline=False): + return "ON %s" % quote_name(tablespace) + +def get_autoinc_sql(table): + return None + def get_sql_flush(style, tables, sequences): """Return a list of SQL statements required to remove all data from all tables in the database (without actually removing the tables diff --git a/django/db/backends/dummy/base.py b/django/db/backends/dummy/base.py index d0ec897407..6a190cf59c 100644 --- a/django/db/backends/dummy/base.py +++ b/django/db/backends/dummy/base.py @@ -33,6 +33,7 @@ class DatabaseWrapper: pass # close() supports_constraints = False +supports_tablespaces = False quote_name = complain dictfetchone = complain dictfetchmany = complain diff --git a/django/db/backends/mysql/base.py b/django/db/backends/mysql/base.py index d4cb1fa964..b0ca7994b7 100644 --- a/django/db/backends/mysql/base.py +++ b/django/db/backends/mysql/base.py @@ -134,7 +134,14 @@ class DatabaseWrapper(local): self.server_version = tuple([int(x) for x in m.groups()]) return self.server_version +allows_group_by_ordinal = True +allows_unique_and_pk = True +autoindexes_primary_keys = False +needs_datetime_string_cast = True # MySQLdb requires a typecast for dates +needs_upper_for_iops = False supports_constraints = True +supports_tablespaces = False +uses_case_insensitive_names = False def quote_name(name): if name.startswith("`") and name.endswith("`"): @@ -167,6 +174,9 @@ def get_date_trunc_sql(lookup_type, field_name): sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str) return sql +def get_datetime_cast_sql(): + return None + def get_limit_offset_sql(limit, offset=None): sql = "LIMIT " if offset and offset != 0: @@ -188,11 +198,20 @@ def get_drop_foreignkey_sql(): def get_pk_default_value(): return "DEFAULT" +def get_max_name_length(): + return None; + +def get_start_transaction_sql(): + return "BEGIN;" + +def get_autoinc_sql(table): + return None + def get_sql_flush(style, tables, sequences): """Return a list of SQL statements required to remove all data from all tables in the database (without actually removing the tables themselves) and put the database in an empty 'initial' state - + """ # NB: The generated SQL below is specific to MySQL # 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements @@ -204,7 +223,7 @@ def get_sql_flush(style, tables, sequences): style.SQL_FIELD(quote_name(table)) ) for table in tables] + \ ['SET FOREIGN_KEY_CHECKS = 1;'] - + # 'ALTER TABLE table AUTO_INCREMENT = 1;'... style SQL statements # to reset sequence indices sql.extend(["%s %s %s %s %s;" % \ diff --git a/django/db/backends/mysql_old/base.py b/django/db/backends/mysql_old/base.py index ac3b75efde..33960827ee 100644 --- a/django/db/backends/mysql_old/base.py +++ b/django/db/backends/mysql_old/base.py @@ -135,7 +135,14 @@ class DatabaseWrapper(local): self.server_version = tuple([int(x) for x in m.groups()]) return self.server_version +allows_group_by_ordinal = True +allows_unique_and_pk = True +autoindexes_primary_keys = False +needs_datetime_string_cast = True # MySQLdb requires a typecast for dates +needs_upper_for_iops = False supports_constraints = True +supports_tablespaces = False +uses_case_insensitive_names = False def quote_name(name): if name.startswith("`") and name.endswith("`"): @@ -168,6 +175,9 @@ def get_date_trunc_sql(lookup_type, field_name): sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str) return sql +def get_datetime_cast_sql(): + return None + def get_limit_offset_sql(limit, offset=None): sql = "LIMIT " if offset and offset != 0: @@ -189,11 +199,20 @@ def get_drop_foreignkey_sql(): def get_pk_default_value(): return "DEFAULT" +def get_max_name_length(): + return None; + +def get_start_transaction_sql(): + return "BEGIN;" + +def get_autoinc_sql(table): + return None + def get_sql_flush(style, tables, sequences): """Return a list of SQL statements required to remove all data from all tables in the database (without actually removing the tables themselves) and put the database in an empty 'initial' state - + """ # NB: The generated SQL below is specific to MySQL # 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements @@ -205,7 +224,7 @@ def get_sql_flush(style, tables, sequences): style.SQL_FIELD(quote_name(table)) ) for table in tables] + \ ['SET FOREIGN_KEY_CHECKS = 1;'] - + # 'ALTER TABLE table AUTO_INCREMENT = 1;'... style SQL statements # to reset sequence indices sql.extend(["%s %s %s %s %s;" % \ diff --git a/django/db/backends/oracle/base.py b/django/db/backends/oracle/base.py index 2bc88bb7b9..48b3c27355 100644 --- a/django/db/backends/oracle/base.py +++ b/django/db/backends/oracle/base.py @@ -4,12 +4,16 @@ Oracle database backend for Django. Requires cx_Oracle: http://www.python.net/crew/atuining/cx_Oracle/ """ +from django.conf import settings from django.db.backends import util try: import cx_Oracle as Database except ImportError, e: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured, "Error loading cx_Oracle module: %s" % e +import datetime +from django.utils.datastructures import SortedDict + DatabaseError = Database.Error IntegrityError = Database.IntegrityError @@ -31,7 +35,6 @@ class DatabaseWrapper(local): return self.connection is not None def cursor(self): - from django.conf import settings if not self._valid_connection(): if len(settings.DATABASE_HOST.strip()) == 0: settings.DATABASE_HOST = 'localhost' @@ -41,25 +44,37 @@ class DatabaseWrapper(local): else: conn_string = "%s/%s@%s" % (settings.DATABASE_USER, settings.DATABASE_PASSWORD, settings.DATABASE_NAME) self.connection = Database.connect(conn_string, **self.options) - return FormatStylePlaceholderCursor(self.connection) + cursor = FormatStylePlaceholderCursor(self.connection) + # default arraysize of 1 is highly sub-optimal + cursor.arraysize = 100 + # set oracle date to ansi date format + cursor.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD'") + cursor.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'") + if settings.DEBUG: + return util.CursorDebugWrapper(cursor, self) + return cursor def _commit(self): if self.connection is not None: - self.connection.commit() + return self.connection.commit() def _rollback(self): if self.connection is not None: - try: - self.connection.rollback() - except Database.NotSupportedError: - pass + return self.connection.rollback() def close(self): if self.connection is not None: self.connection.close() self.connection = None +allows_group_by_ordinal = False +allows_unique_and_pk = False # Suppress UNIQUE/PK for Oracle (ORA-02259) +autoindexes_primary_keys = True +needs_datetime_string_cast = False +needs_upper_for_iops = True supports_constraints = True +supports_tablespaces = True +uses_case_insensitive_names = True class FormatStylePlaceholderCursor(Database.Cursor): """ @@ -67,45 +82,75 @@ class FormatStylePlaceholderCursor(Database.Cursor): This fixes it -- but note that if you want to use a literal "%s" in a query, you'll need to use "%%s". """ + def _rewrite_args(self, query, params=None): + if params is None: + params = [] + else: + # cx_Oracle can't handle unicode parameters, so cast to str for now + for i, param in enumerate(params): + if type(param) == unicode: + try: + params[i] = param.encode('utf-8') + except UnicodeError: + params[i] = str(param) + args = [(':arg%d' % i) for i in range(len(params))] + query = query % tuple(args) + # cx_Oracle wants no trailing ';' for SQL statements. For PL/SQL, it + # it does want a trailing ';' but not a trailing '/'. However, these + # characters must be included in the original query in case the query + # is being passed to SQL*Plus. + if query.endswith(';') or query.endswith('/'): + query = query[:-1] + return query, params + def execute(self, query, params=None): - if params is None: params = [] - query = self.convert_arguments(query, len(params)) + query, params = self._rewrite_args(query, params) return Database.Cursor.execute(self, query, params) def executemany(self, query, params=None): - if params is None: params = [] - query = self.convert_arguments(query, len(params[0])) + query, params = self._rewrite_args(query, params) return Database.Cursor.executemany(self, query, params) - def convert_arguments(self, query, num_params): - # replace occurances of "%s" with ":arg" - Oracle requires colons for parameter placeholders. - args = [':arg' for i in range(num_params)] - return query % tuple(args) - def quote_name(name): - return name + # SQL92 requires delimited (quoted) names to be case-sensitive. When + # not quoted, Oracle has case-insensitive behavior for identifiers, but + # always defaults to uppercase. + # We simplify things by making Oracle identifiers always uppercase. + if not name.startswith('"') and not name.endswith('"'): + name = '"%s"' % util.truncate_name(name.upper(), get_max_name_length()) + return name.upper() dictfetchone = util.dictfetchone dictfetchmany = util.dictfetchmany dictfetchall = util.dictfetchall def get_last_insert_id(cursor, table_name, pk_name): - query = "SELECT %s_sq.currval from dual" % table_name - cursor.execute(query) + sq_name = util.truncate_name(table_name, get_max_name_length()-3) + cursor.execute('SELECT %s_sq.currval FROM dual' % sq_name) return cursor.fetchone()[0] def get_date_extract_sql(lookup_type, table_name): # lookup_type is 'year', 'month', 'day' - # http://www.psoug.org/reference/date_func.html + # http://download-east.oracle.com/docs/cd/B10501_01/server.920/a96540/functions42a.htm#1017163 return "EXTRACT(%s FROM %s)" % (lookup_type, table_name) def get_date_trunc_sql(lookup_type, field_name): - return "EXTRACT(%s FROM TRUNC(%s))" % (lookup_type, field_name) + # lookup_type is 'year', 'month', 'day' + # Oracle uses TRUNC() for both dates and numbers. + # http://download-east.oracle.com/docs/cd/B10501_01/server.920/a96540/functions155a.htm#SQLRF06151 + if lookup_type == 'day': + sql = 'TRUNC(%s)' % (field_name,) + else: + sql = "TRUNC(%s, '%s')" % (field_name, lookup_type) + return sql + +def get_datetime_cast_sql(): + return "TO_TIMESTAMP(%s, 'YYYY-MM-DD HH24:MI:SS.FF')" def get_limit_offset_sql(limit, offset=None): # Limits and offset are too complicated to be handled here. - # Instead, they are handled in django/db/query.py. - pass + # Instead, they are handled in django/db/backends/oracle/query.py. + return "" def get_random_function_sql(): return "DBMS_RANDOM.RANDOM" @@ -117,40 +162,363 @@ def get_fulltext_search_sql(field_name): raise NotImplementedError def get_drop_foreignkey_sql(): - return "DROP FOREIGN KEY" + return "DROP CONSTRAINT" def get_pk_default_value(): return "DEFAULT" +def get_max_name_length(): + return 30 + +def get_start_transaction_sql(): + return None + +def get_tablespace_sql(tablespace, inline=False): + return "%sTABLESPACE %s" % ((inline and "USING INDEX " or ""), quote_name(tablespace)) + +def get_autoinc_sql(table): + # To simulate auto-incrementing primary keys in Oracle, we have to + # create a sequence and a trigger. + sq_name = get_sequence_name(table) + tr_name = get_trigger_name(table) + sequence_sql = 'CREATE SEQUENCE %s;' % sq_name + trigger_sql = """CREATE OR REPLACE TRIGGER %s + BEFORE INSERT ON %s + FOR EACH ROW + WHEN (new.id IS NULL) + BEGIN + SELECT %s.nextval INTO :new.id FROM dual; + END; + /""" % (tr_name, quote_name(table), sq_name) + return sequence_sql, trigger_sql + +def get_drop_sequence(table): + return "DROP SEQUENCE %s;" % quote_name(get_sequence_name(table)) + +def _get_sequence_reset_sql(): + # TODO: colorize this SQL code with style.SQL_KEYWORD(), etc. + return """ + DECLARE + startvalue integer; + cval integer; + BEGIN + LOCK TABLE %(table)s IN SHARE MODE; + SELECT NVL(MAX(id), 0) INTO startvalue FROM %(table)s; + SELECT %(sequence)s.nextval INTO cval FROM dual; + cval := startvalue - cval; + IF cval != 0 THEN + EXECUTE IMMEDIATE 'ALTER SEQUENCE %(sequence)s MINVALUE 0 INCREMENT BY '||cval; + SELECT %(sequence)s.nextval INTO cval FROM dual; + EXECUTE IMMEDIATE 'ALTER SEQUENCE %(sequence)s INCREMENT BY 1'; + END IF; + COMMIT; + END; + /""" + def get_sql_flush(style, tables, sequences): """Return a list of SQL statements required to remove all data from all tables in the database (without actually removing the tables themselves) and put the database in an empty 'initial' state """ - # Return a list of 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements - # TODO - SQL not actually tested against Oracle yet! - # TODO - autoincrement indices reset required? See other get_sql_flush() implementations - sql = ['%s %s;' % \ - (style.SQL_KEYWORD('TRUNCATE'), - style.SQL_FIELD(quote_name(table)) - ) for table in tables] + # Return a list of 'TRUNCATE x;', 'TRUNCATE y;', + # 'TRUNCATE z;'... style SQL statements + if tables: + # Oracle does support TRUNCATE, but it seems to get us into + # FK referential trouble, whereas DELETE FROM table works. + sql = ['%s %s %s;' % \ + (style.SQL_KEYWORD('DELETE'), + style.SQL_KEYWORD('FROM'), + style.SQL_FIELD(quote_name(table)) + ) for table in tables] + # Since we've just deleted all the rows, running our sequence + # ALTER code will reset the sequence to 0. + for sequence_info in sequences: + table_name = sequence_info['table'] + seq_name = get_sequence_name(table_name) + query = _get_sequence_reset_sql() % {'sequence':seq_name, + 'table':quote_name(table_name)} + sql.append(query) + return sql + else: + return [] + +def get_sequence_name(table): + name_length = get_max_name_length() - 3 + return '%s_SQ' % util.truncate_name(table, name_length).upper() def get_sql_sequence_reset(style, model_list): "Returns a list of the SQL statements to reset sequences for the given models." - # No sequence reset required - return [] + from django.db import models + output = [] + query = _get_sequence_reset_sql() + for model in model_list: + for f in model._meta.fields: + if isinstance(f, models.AutoField): + sequence_name = get_sequence_name(model._meta.db_table) + output.append(query % {'sequence':sequence_name, + 'table':model._meta.db_table}) + break # Only one AutoField is allowed per model, so don't bother continuing. + for f in model._meta.many_to_many: + sequence_name = get_sequence_name(f.m2m_db_table()) + output.append(query % {'sequence':sequence_name, + 'table':f.m2m_db_table()}) + return output + +def get_trigger_name(table): + name_length = get_max_name_length() - 3 + return '%s_TR' % util.truncate_name(table, name_length).upper() + +def get_query_set_class(DefaultQuerySet): + "Create a custom QuerySet class for Oracle." + + from django.db import backend, connection + from django.db.models.query import EmptyResultSet, GET_ITERATOR_CHUNK_SIZE, quote_only_if_word + + class OracleQuerySet(DefaultQuerySet): + + def iterator(self): + "Performs the SELECT database lookup of this QuerySet." + + from django.db.models.query import get_cached_row + + # self._select is a dictionary, and dictionaries' key order is + # undefined, so we convert it to a list of tuples. + extra_select = self._select.items() + + full_query = None + + try: + try: + select, sql, params, full_query = self._get_sql_clause(get_full_query=True) + except TypeError: + select, sql, params = self._get_sql_clause() + except EmptyResultSet: + raise StopIteration + if not full_query: + full_query = "SELECT %s%s\n%s" % \ + ((self._distinct and "DISTINCT " or ""), + ', '.join(select), sql) + + cursor = connection.cursor() + cursor.execute(full_query, params) + + fill_cache = self._select_related + fields = self.model._meta.fields + index_end = len(fields) + + # so here's the logic; + # 1. retrieve each row in turn + # 2. convert NCLOBs + + while 1: + rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE) + if not rows: + raise StopIteration + for row in rows: + row = self.resolve_columns(row, fields) + if fill_cache: + obj, index_end = get_cached_row(klass=self.model, row=row, + index_start=0, max_depth=self._max_related_depth) + else: + obj = self.model(*row[:index_end]) + for i, k in enumerate(extra_select): + setattr(obj, k[0], row[index_end+i]) + yield obj + + + def _get_sql_clause(self, get_full_query=False): + from django.db.models.query import fill_table_cache, \ + handle_legacy_orderlist, orderfield2column + + opts = self.model._meta + + # Construct the fundamental parts of the query: SELECT X FROM Y WHERE Z. + select = ["%s.%s" % (backend.quote_name(opts.db_table), backend.quote_name(f.column)) for f in opts.fields] + tables = [quote_only_if_word(t) for t in self._tables] + joins = SortedDict() + where = self._where[:] + params = self._params[:] + + # Convert self._filters into SQL. + joins2, where2, params2 = self._filters.get_sql(opts) + joins.update(joins2) + where.extend(where2) + params.extend(params2) + + # Add additional tables and WHERE clauses based on select_related. + if self._select_related: + fill_table_cache(opts, select, tables, where, opts.db_table, [opts.db_table]) + + # Add any additional SELECTs. + if self._select: + select.extend(['(%s) AS %s' % (quote_only_if_word(s[1]), backend.quote_name(s[0])) for s in self._select.items()]) + + # Start composing the body of the SQL statement. + sql = [" FROM", backend.quote_name(opts.db_table)] + + # Compose the join dictionary into SQL describing the joins. + if joins: + sql.append(" ".join(["%s %s %s ON %s" % (join_type, table, alias, condition) + for (alias, (table, join_type, condition)) in joins.items()])) + + # Compose the tables clause into SQL. + if tables: + sql.append(", " + ", ".join(tables)) + + # Compose the where clause into SQL. + if where: + sql.append(where and "WHERE " + " AND ".join(where)) + + # ORDER BY clause + order_by = [] + if self._order_by is not None: + ordering_to_use = self._order_by + else: + ordering_to_use = opts.ordering + for f in handle_legacy_orderlist(ordering_to_use): + if f == '?': # Special case. + order_by.append(backend.get_random_function_sql()) + else: + if f.startswith('-'): + col_name = f[1:] + order = "DESC" + else: + col_name = f + order = "ASC" + if "." in col_name: + table_prefix, col_name = col_name.split('.', 1) + table_prefix = backend.quote_name(table_prefix) + '.' + else: + # Use the database table as a column prefix if it wasn't given, + # and if the requested column isn't a custom SELECT. + if "." not in col_name and col_name not in (self._select or ()): + table_prefix = backend.quote_name(opts.db_table) + '.' + else: + table_prefix = '' + order_by.append('%s%s %s' % (table_prefix, backend.quote_name(orderfield2column(col_name, opts)), order)) + if order_by: + sql.append("ORDER BY " + ", ".join(order_by)) + + # Look for column name collisions in the select elements + # and fix them with an AS alias. This allows us to do a + # SELECT * later in the paging query. + cols = [clause.split('.')[-1] for clause in select] + for index, col in enumerate(cols): + if cols.count(col) > 1: + col = '%s%d' % (col.replace('"', ''), index) + cols[index] = col + select[index] = '%s AS %s' % (select[index], col) + + # LIMIT and OFFSET clauses + # To support limits and offsets, Oracle requires some funky rewriting of an otherwise normal looking query. + select_clause = ",".join(select) + distinct = (self._distinct and "DISTINCT " or "") + + if order_by: + order_by_clause = " OVER (ORDER BY %s )" % (", ".join(order_by)) + else: + #Oracle's row_number() function always requires an order-by clause. + #So we need to define a default order-by, since none was provided. + order_by_clause = " OVER (ORDER BY %s.%s)" % \ + (backend.quote_name(opts.db_table), + backend.quote_name(opts.fields[0].db_column or opts.fields[0].column)) + # limit_and_offset_clause + if self._limit is None: + assert self._offset is None, "'offset' is not allowed without 'limit'" + + if self._offset is not None: + offset = int(self._offset) + else: + offset = 0 + if self._limit is not None: + limit = int(self._limit) + else: + limit = None + + limit_and_offset_clause = '' + if limit is not None: + limit_and_offset_clause = "WHERE rn > %s AND rn <= %s" % (offset, limit+offset) + elif offset: + limit_and_offset_clause = "WHERE rn > %s" % (offset) + + if len(limit_and_offset_clause) > 0: + fmt = \ +"""SELECT * FROM + (SELECT %s%s, + ROW_NUMBER()%s AS rn + %s) +%s""" + full_query = fmt % (distinct, select_clause, + order_by_clause, ' '.join(sql).strip(), + limit_and_offset_clause) + else: + full_query = None + + if get_full_query: + return select, " ".join(sql), params, full_query + else: + return select, " ".join(sql), params + + def resolve_columns(self, row, fields=()): + from django.db.models.fields import DateField, DateTimeField, \ + TimeField, BooleanField, NullBooleanField, DecimalField, Field + values = [] + for value, field in map(None, row, fields): + if isinstance(value, Database.LOB): + value = value.read() + # Oracle stores empty strings as null. We need to undo this in + # order to adhere to the Django convention of using the empty + # string instead of null, but only if the field accepts the + # empty string. + if value is None and isinstance(field, Field) and field.empty_strings_allowed: + value = '' + # Convert 1 or 0 to True or False + elif value in (1, 0) and isinstance(field, (BooleanField, NullBooleanField)): + value = bool(value) + # Convert floats to decimals + elif value is not None and isinstance(field, DecimalField): + value = util.typecast_decimal(field.format_number(value)) + # cx_Oracle always returns datetime.datetime objects for + # DATE and TIMESTAMP columns, but Django wants to see a + # python datetime.date, .time, or .datetime. We use the type + # of the Field to determine which to cast to, but it's not + # always available. + # As a workaround, we cast to date if all the time-related + # values are 0, or to time if the date is 1/1/1900. + # This could be cleaned a bit by adding a method to the Field + # classes to normalize values from the database (the to_python + # method is used for validation and isn't what we want here). + elif isinstance(value, Database.Timestamp): + # In Python 2.3, the cx_Oracle driver returns its own + # Timestamp object that we must convert to a datetime class. + if not isinstance(value, datetime.datetime): + value = datetime.datetime(value.year, value.month, value.day, value.hour, + value.minute, value.second, value.fsecond) + if isinstance(field, DateTimeField): + pass # DateTimeField subclasses DateField so must be checked first. + elif isinstance(field, DateField): + value = value.date() + elif isinstance(field, TimeField) or (value.year == 1900 and value.month == value.day == 1): + value = value.time() + elif value.hour == value.minute == value.second == value.microsecond == 0: + value = value.date() + values.append(value) + return values + + return OracleQuerySet + OPERATOR_MAPPING = { 'exact': '= %s', - 'iexact': 'LIKE %s', - 'contains': 'LIKE %s', - 'icontains': 'LIKE %s', + 'iexact': '= UPPER(%s)', + 'contains': "LIKE %s ESCAPE '\\'", + 'icontains': "LIKE UPPER(%s) ESCAPE '\\'", 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', - 'startswith': 'LIKE %s', - 'endswith': 'LIKE %s', - 'istartswith': 'LIKE %s', - 'iendswith': 'LIKE %s', + 'startswith': "LIKE %s ESCAPE '\\'", + 'endswith': "LIKE %s ESCAPE '\\'", + 'istartswith': "LIKE UPPER(%s) ESCAPE '\\'", + 'iendswith': "LIKE UPPER(%s) ESCAPE '\\'", } diff --git a/django/db/backends/oracle/client.py b/django/db/backends/oracle/client.py index 7e32ebef2f..372783aa97 100644 --- a/django/db/backends/oracle/client.py +++ b/django/db/backends/oracle/client.py @@ -2,9 +2,10 @@ from django.conf import settings import os def runshell(): - args = '' - args += settings.DATABASE_USER + dsn = settings.DATABASE_USER if settings.DATABASE_PASSWORD: - args += "/%s" % settings.DATABASE_PASSWORD - args += "@%s" % settings.DATABASE_NAME - os.execvp('sqlplus', args) + dsn += "/%s" % settings.DATABASE_PASSWORD + if settings.DATABASE_NAME: + dsn += "@%s" % settings.DATABASE_NAME + args = ["sqlplus", "-L", dsn] + os.execvp("sqlplus", args) diff --git a/django/db/backends/oracle/creation.py b/django/db/backends/oracle/creation.py index 14a864ac28..6b1a30d55d 100644 --- a/django/db/backends/oracle/creation.py +++ b/django/db/backends/oracle/creation.py @@ -1,26 +1,304 @@ +import sys, time +from django.core import management + +# This dictionary maps Field objects to their associated Oracle column +# types, as strings. Column-type strings can contain format strings; they'll +# be interpolated against the values of Field.__dict__ before being output. +# If a column type is set to None, it won't be included in the output. DATA_TYPES = { - 'AutoField': 'number(38)', - 'BooleanField': 'number(1)', - 'CharField': 'varchar2(%(maxlength)s)', - 'CommaSeparatedIntegerField': 'varchar2(%(maxlength)s)', - 'DateField': 'date', - 'DateTimeField': 'date', - 'DecimalField': 'number(%(max_digits)s, %(decimal_places)s)', - 'FileField': 'varchar2(100)', - 'FilePathField': 'varchar2(100)', - 'FloatField': 'double precision', - 'ImageField': 'varchar2(100)', - 'IntegerField': 'integer', - 'IPAddressField': 'char(15)', - 'ManyToManyField': None, - 'NullBooleanField': 'integer', - 'OneToOneField': 'integer', - 'PhoneNumberField': 'varchar(20)', - 'PositiveIntegerField': 'integer', - 'PositiveSmallIntegerField': 'smallint', - 'SlugField': 'varchar(50)', - 'SmallIntegerField': 'smallint', - 'TextField': 'long', - 'TimeField': 'timestamp', - 'USStateField': 'varchar(2)', + 'AutoField': 'NUMBER(11)', + 'BooleanField': 'NUMBER(1) CHECK (%(column)s IN (0,1))', + 'CharField': 'VARCHAR2(%(maxlength)s)', + 'CommaSeparatedIntegerField': 'VARCHAR2(%(maxlength)s)', + 'DateField': 'DATE', + 'DateTimeField': 'TIMESTAMP', + 'DecimalField': 'NUMBER(%(max_digits)s, %(decimal_places)s)', + 'FileField': 'VARCHAR2(100)', + 'FilePathField': 'VARCHAR2(100)', + 'FloatField': 'DOUBLE PRECISION', + 'ImageField': 'VARCHAR2(100)', + 'IntegerField': 'NUMBER(11)', + 'IPAddressField': 'VARCHAR2(15)', + 'ManyToManyField': None, + 'NullBooleanField': 'NUMBER(1) CHECK ((%(column)s IN (0,1)) OR (%(column)s IS NULL))', + 'OneToOneField': 'NUMBER(11)', + 'PhoneNumberField': 'VARCHAR2(20)', + 'PositiveIntegerField': 'NUMBER(11) CHECK (%(column)s >= 0)', + 'PositiveSmallIntegerField': 'NUMBER(11) CHECK (%(column)s >= 0)', + 'SlugField': 'VARCHAR2(50)', + 'SmallIntegerField': 'NUMBER(11)', + 'TextField': 'NCLOB', + 'TimeField': 'TIMESTAMP', + 'URLField': 'VARCHAR2(200)', + 'USStateField': 'CHAR(2)', } + +TEST_DATABASE_PREFIX = 'test_' +PASSWORD = 'Im_a_lumberjack' +REMEMBER = {} + + +def create_test_db(settings, connection, backend, verbosity=1, autoclobber=False): + + TEST_DATABASE_NAME = _test_database_name(settings) + TEST_DATABASE_USER = _test_database_user(settings) + TEST_DATABASE_PASSWD = _test_database_passwd(settings) + TEST_DATABASE_TBLSPACE = _test_database_tblspace(settings) + TEST_DATABASE_TBLSPACE_TMP = _test_database_tblspace_tmp(settings) + + parameters = { + 'dbname': TEST_DATABASE_NAME, + 'user': TEST_DATABASE_USER, + 'password': TEST_DATABASE_PASSWD, + 'tblspace': TEST_DATABASE_TBLSPACE, + 'tblspace_temp': TEST_DATABASE_TBLSPACE_TMP, + } + + REMEMBER['user'] = settings.DATABASE_USER + REMEMBER['passwd'] = settings.DATABASE_PASSWORD + + cursor = connection.cursor() + if _test_database_create(settings): + if verbosity >= 1: + print 'Creating test database...' + try: + _create_test_db(cursor, parameters, verbosity) + except Exception, e: + sys.stderr.write("Got an error creating the test database: %s\n" % e) + if not autoclobber: + confirm = raw_input("It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_DATABASE_NAME) + if autoclobber or confirm == 'yes': + try: + if verbosity >= 1: + print "Destroying old test database..." + _destroy_test_db(cursor, parameters, verbosity) + if verbosity >= 1: + print "Creating test database..." + _create_test_db(cursor, parameters, verbosity) + except Exception, e: + sys.stderr.write("Got an error recreating the test database: %s\n" % e) + sys.exit(2) + else: + print "Tests cancelled." + sys.exit(1) + + if _test_user_create(settings): + if verbosity >= 1: + print "Creating test user..." + try: + _create_test_user(cursor, parameters, verbosity) + except Exception, e: + sys.stderr.write("Got an error creating the test user: %s\n" % e) + if not autoclobber: + confirm = raw_input("It appears the test user, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_DATABASE_USER) + if autoclobber or confirm == 'yes': + try: + if verbosity >= 1: + print "Destroying old test user..." + _destroy_test_user(cursor, parameters, verbosity) + if verbosity >= 1: + print "Creating test user..." + _create_test_user(cursor, parameters, verbosity) + except Exception, e: + sys.stderr.write("Got an error recreating the test user: %s\n" % e) + sys.exit(2) + else: + print "Tests cancelled." + sys.exit(1) + + connection.close() + settings.DATABASE_USER = TEST_DATABASE_USER + settings.DATABASE_PASSWORD = TEST_DATABASE_PASSWD + + management.syncdb(verbosity, interactive=False) + + # Get a cursor (even though we don't need one yet). This has + # the side effect of initializing the test database. + cursor = connection.cursor() + + +def destroy_test_db(settings, connection, backend, old_database_name, verbosity=1): + connection.close() + + TEST_DATABASE_NAME = _test_database_name(settings) + TEST_DATABASE_USER = _test_database_user(settings) + TEST_DATABASE_PASSWD = _test_database_passwd(settings) + TEST_DATABASE_TBLSPACE = _test_database_tblspace(settings) + TEST_DATABASE_TBLSPACE_TMP = _test_database_tblspace_tmp(settings) + + settings.DATABASE_NAME = old_database_name + settings.DATABASE_USER = REMEMBER['user'] + settings.DATABASE_PASSWORD = REMEMBER['passwd'] + + parameters = { + 'dbname': TEST_DATABASE_NAME, + 'user': TEST_DATABASE_USER, + 'password': TEST_DATABASE_PASSWD, + 'tblspace': TEST_DATABASE_TBLSPACE, + 'tblspace_temp': TEST_DATABASE_TBLSPACE_TMP, + } + + REMEMBER['user'] = settings.DATABASE_USER + REMEMBER['passwd'] = settings.DATABASE_PASSWORD + + cursor = connection.cursor() + time.sleep(1) # To avoid "database is being accessed by other users" errors. + if _test_user_create(settings): + if verbosity >= 1: + print 'Destroying test user...' + _destroy_test_user(cursor, parameters, verbosity) + if _test_database_create(settings): + if verbosity >= 1: + print 'Destroying test database...' + _destroy_test_db(cursor, parameters, verbosity) + connection.close() + + +def _create_test_db(cursor, parameters, verbosity): + if verbosity >= 2: + print "_create_test_db(): dbname = %s" % parameters['dbname'] + statements = [ + """CREATE TABLESPACE %(tblspace)s + DATAFILE '%(tblspace)s.dbf' SIZE 20M + REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 100M + """, + """CREATE TEMPORARY TABLESPACE %(tblspace_temp)s + TEMPFILE '%(tblspace_temp)s.dbf' SIZE 20M + REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 100M + """, + ] + _execute_statements(cursor, statements, parameters, verbosity) + + +def _create_test_user(cursor, parameters, verbosity): + if verbosity >= 2: + print "_create_test_user(): username = %s" % parameters['user'] + statements = [ + """CREATE USER %(user)s + IDENTIFIED BY %(password)s + DEFAULT TABLESPACE %(tblspace)s + TEMPORARY TABLESPACE %(tblspace_temp)s + """, + """GRANT CONNECT, RESOURCE TO %(user)s""", + ] + _execute_statements(cursor, statements, parameters, verbosity) + + +def _destroy_test_db(cursor, parameters, verbosity): + if verbosity >= 2: + print "_destroy_test_db(): dbname=%s" % parameters['dbname'] + statements = [ + 'DROP TABLESPACE %(tblspace)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS', + 'DROP TABLESPACE %(tblspace_temp)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS', + ] + _execute_statements(cursor, statements, parameters, verbosity) + + +def _destroy_test_user(cursor, parameters, verbosity): + if verbosity >= 2: + print "_destroy_test_user(): user=%s" % parameters['user'] + print "Be patient. This can take some time..." + statements = [ + 'DROP USER %(user)s CASCADE', + ] + _execute_statements(cursor, statements, parameters, verbosity) + + +def _execute_statements(cursor, statements, parameters, verbosity): + for template in statements: + stmt = template % parameters + if verbosity >= 2: + print stmt + try: + cursor.execute(stmt) + except Exception, err: + sys.stderr.write("Failed (%s)\n" % (err)) + raise + + +def _test_database_name(settings): + name = TEST_DATABASE_PREFIX + settings.DATABASE_NAME + try: + if settings.TEST_DATABASE_NAME: + name = settings.TEST_DATABASE_NAME + except AttributeError: + pass + except: + raise + return name + + +def _test_database_create(settings): + name = True + try: + if settings.TEST_DATABASE_CREATE: + name = True + else: + name = False + except AttributeError: + pass + except: + raise + return name + + +def _test_user_create(settings): + name = True + try: + if settings.TEST_USER_CREATE: + name = True + else: + name = False + except AttributeError: + pass + except: + raise + return name + + +def _test_database_user(settings): + name = TEST_DATABASE_PREFIX + settings.DATABASE_NAME + try: + if settings.TEST_DATABASE_USER: + name = settings.TEST_DATABASE_USER + except AttributeError: + pass + except: + raise + return name + + +def _test_database_passwd(settings): + name = PASSWORD + try: + if settings.TEST_DATABASE_PASSWD: + name = settings.TEST_DATABASE_PASSWD + except AttributeError: + pass + except: + raise + return name + + +def _test_database_tblspace(settings): + name = TEST_DATABASE_PREFIX + settings.DATABASE_NAME + try: + if settings.TEST_DATABASE_TBLSPACE: + name = settings.TEST_DATABASE_TBLSPACE + except AttributeError: + pass + except: + raise + return name + + +def _test_database_tblspace_tmp(settings): + name = TEST_DATABASE_PREFIX + settings.DATABASE_NAME + '_temp' + try: + if settings.TEST_DATABASE_TBLSPACE_TMP: + name = settings.TEST_DATABASE_TBLSPACE_TMP + except AttributeError: + pass + except: + raise + return name diff --git a/django/db/backends/oracle/introspection.py b/django/db/backends/oracle/introspection.py index 7634206178..44430a0029 100644 --- a/django/db/backends/oracle/introspection.py +++ b/django/db/backends/oracle/introspection.py @@ -1,14 +1,19 @@ +from django.db.backends.oracle.base import quote_name import re +import cx_Oracle + foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)") def get_table_list(cursor): "Returns a list of table names in the current database." cursor.execute("SELECT TABLE_NAME FROM USER_TABLES") - return [row[0] for row in cursor.fetchall()] + return [row[0].upper() for row in cursor.fetchall()] def get_table_description(cursor, table_name): - return table_name + "Returns a description of the table, with the DB-API cursor.description interface." + cursor.execute("SELECT * FROM %s WHERE ROWNUM < 2" % quote_name(table_name)) + return cursor.description def _name_to_index(cursor, table_name): """ @@ -22,7 +27,24 @@ def get_relations(cursor, table_name): Returns a dictionary of {field_index: (field_index_other_table, other_table)} representing all relationships to the given table. Indexes are 0-based. """ - raise NotImplementedError + cursor.execute(""" +SELECT ta.column_id - 1, tb.table_name, tb.column_id - 1 +FROM user_constraints, USER_CONS_COLUMNS ca, USER_CONS_COLUMNS cb, + user_tab_cols ta, user_tab_cols tb +WHERE user_constraints.table_name = %s AND + ta.table_name = %s AND + ta.column_name = ca.column_name AND + ca.table_name = %s AND + user_constraints.constraint_name = ca.constraint_name AND + user_constraints.r_constraint_name = cb.constraint_name AND + cb.table_name = tb.table_name AND + cb.column_name = tb.column_name AND + ca.position = cb.position""", [table_name, table_name, table_name]) + + relations = {} + for row in cursor.fetchall(): + relations[row[0]] = (row[2], row[1]) + return relations def get_indexes(cursor, table_name): """ @@ -31,20 +53,46 @@ def get_indexes(cursor, table_name): {'primary_key': boolean representing whether it's the primary key, 'unique': boolean representing whether it's a unique index} """ - raise NotImplementedError + # This query retrieves each index on the given table, including the + # first associated field name + # "We were in the nick of time; you were in great peril!" + sql = """ +WITH primarycols AS ( + SELECT user_cons_columns.table_name, user_cons_columns.column_name, 1 AS PRIMARYCOL + FROM user_cons_columns, user_constraints + WHERE user_cons_columns.constraint_name = user_constraints.constraint_name AND + user_constraints.constraint_type = 'P' AND + user_cons_columns.table_name = %s), + uniquecols AS ( + SELECT user_ind_columns.table_name, user_ind_columns.column_name, 1 AS UNIQUECOL + FROM user_indexes, user_ind_columns + WHERE uniqueness = 'UNIQUE' AND + user_indexes.index_name = user_ind_columns.index_name AND + user_ind_columns.table_name = %s) +SELECT allcols.column_name, primarycols.primarycol, uniquecols.UNIQUECOL +FROM (SELECT column_name FROM primarycols UNION SELECT column_name FROM +uniquecols) allcols, + primarycols, uniquecols +WHERE allcols.column_name = primarycols.column_name (+) AND + allcols.column_name = uniquecols.column_name (+) + """ + cursor.execute(sql, [table_name, table_name]) + indexes = {} + for row in cursor.fetchall(): + # row[1] (idx.indkey) is stored in the DB as an array. It comes out as + # a string of space-separated integers. This designates the field + # indexes (1-based) of the fields that have indexes on the table. + # Here, we skip any indexes across multiple fields. + indexes[row[0]] = {'primary_key': row[1], 'unique': row[2]} + return indexes -# Maps type codes to Django Field types. +# Maps type objects to Django Field types. DATA_TYPES_REVERSE = { - 16: 'BooleanField', - 21: 'SmallIntegerField', - 23: 'IntegerField', - 25: 'TextField', - 869: 'IPAddressField', - 1043: 'CharField', - 1082: 'DateField', - 1083: 'TimeField', - 1114: 'DateTimeField', - 1184: 'DateTimeField', - 1266: 'TimeField', - 1700: 'DecimalField', + cx_Oracle.CLOB: 'TextField', + cx_Oracle.DATETIME: 'DateTimeField', + cx_Oracle.FIXED_CHAR: 'CharField', + cx_Oracle.NCLOB: 'TextField', + cx_Oracle.NUMBER: 'DecimalField', + cx_Oracle.STRING: 'CharField', + cx_Oracle.TIMESTAMP: 'DateTimeField', } diff --git a/django/db/backends/postgresql/base.py b/django/db/backends/postgresql/base.py index fedbb6b7f1..351b553506 100644 --- a/django/db/backends/postgresql/base.py +++ b/django/db/backends/postgresql/base.py @@ -87,7 +87,7 @@ class DatabaseWrapper(local): global postgres_version if not postgres_version: cursor.execute("SELECT version()") - postgres_version = [int(val) for val in cursor.fetchone()[0].split()[1].split('.')] + postgres_version = [int(val) for val in cursor.fetchone()[0].split()[1].split('.')] if settings.DEBUG: return util.CursorDebugWrapper(cursor, self) return cursor @@ -105,7 +105,14 @@ class DatabaseWrapper(local): self.connection.close() self.connection = None +allows_group_by_ordinal = True +allows_unique_and_pk = True +autoindexes_primary_keys = True +needs_datetime_string_cast = True +needs_upper_for_iops = False supports_constraints = True +supports_tablespaces = False +uses_case_insensitive_names = False def quote_name(name): if name.startswith('"') and name.endswith('"'): @@ -138,6 +145,9 @@ def get_date_trunc_sql(lookup_type, field_name): # http://www.postgresql.org/docs/8.0/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name) +def get_datetime_cast_sql(): + return None + def get_limit_offset_sql(limit, offset=None): sql = "LIMIT %s" % limit if offset and offset != 0: @@ -149,7 +159,7 @@ def get_random_function_sql(): def get_deferrable_sql(): return " DEFERRABLE INITIALLY DEFERRED" - + def get_fulltext_search_sql(field_name): raise NotImplementedError @@ -159,12 +169,21 @@ def get_drop_foreignkey_sql(): def get_pk_default_value(): return "DEFAULT" +def get_max_name_length(): + return None + +def get_start_transaction_sql(): + return "BEGIN;" + +def get_autoinc_sql(table): + return None + def get_sql_flush(style, tables, sequences): """Return a list of SQL statements required to remove all data from all tables in the database (without actually removing the tables themselves) and put the database in an empty 'initial' state - - """ + + """ if tables: if postgres_version[0] >= 8 and postgres_version[1] >= 1: # Postgres 8.1+ can do 'TRUNCATE x, y, z...;'. In fact, it *has to* in order to be able to @@ -175,7 +194,7 @@ def get_sql_flush(style, tables, sequences): style.SQL_FIELD(', '.join([quote_name(table) for table in tables])) )] else: - # Older versions of Postgres can't do TRUNCATE in a single call, so they must use + # Older versions of Postgres can't do TRUNCATE in a single call, so they must use # a simple delete. sql = ['%s %s %s;' % \ (style.SQL_KEYWORD('DELETE'), @@ -243,7 +262,7 @@ def get_sql_sequence_reset(style, model_list): style.SQL_KEYWORD('FROM'), style.SQL_TABLE(f.m2m_db_table()))) return output - + # Register these custom typecasts, because Django expects dates/times to be # in Python's native (standard-library) datetime/time format, whereas psycopg # use mx.DateTime by default. diff --git a/django/db/backends/postgresql_psycopg2/base.py b/django/db/backends/postgresql_psycopg2/base.py index d9ad363ac1..01c0503078 100644 --- a/django/db/backends/postgresql_psycopg2/base.py +++ b/django/db/backends/postgresql_psycopg2/base.py @@ -55,7 +55,7 @@ class DatabaseWrapper(local): global postgres_version if not postgres_version: cursor.execute("SELECT version()") - postgres_version = [int(val) for val in cursor.fetchone()[0].split()[1].split('.')] + postgres_version = [int(val) for val in cursor.fetchone()[0].split()[1].split('.')] if settings.DEBUG: return util.CursorDebugWrapper(cursor, self) return cursor @@ -73,7 +73,14 @@ class DatabaseWrapper(local): self.connection.close() self.connection = None +allows_group_by_ordinal = True +allows_unique_and_pk = True +autoindexes_primary_keys = True +needs_datetime_string_cast = False +needs_upper_for_iops = False supports_constraints = True +supports_tablespaces = False +uses_case_insensitive_names = True def quote_name(name): if name.startswith('"') and name.endswith('"'): @@ -98,6 +105,9 @@ def get_date_trunc_sql(lookup_type, field_name): # http://www.postgresql.org/docs/8.0/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name) +def get_datetime_cast_sql(): + return None + def get_limit_offset_sql(limit, offset=None): sql = "LIMIT %s" % limit if offset and offset != 0: @@ -119,6 +129,15 @@ def get_drop_foreignkey_sql(): def get_pk_default_value(): return "DEFAULT" +def get_max_name_length(): + return None + +def get_start_transaction_sql(): + return "BEGIN;" + +def get_autoinc_sql(table): + return None + def get_sql_flush(style, tables, sequences): """Return a list of SQL statements required to remove all data from all tables in the database (without actually removing the tables @@ -139,7 +158,7 @@ def get_sql_flush(style, tables, sequences): style.SQL_KEYWORD('FROM'), style.SQL_FIELD(quote_name(table)) ) for table in tables] - + # 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements # to reset sequence indices for sequence in sequences: @@ -200,7 +219,7 @@ def get_sql_sequence_reset(style, model_list): style.SQL_KEYWORD('FROM'), style.SQL_TABLE(f.m2m_db_table()))) return output - + OPERATOR_MAPPING = { 'exact': '= %s', 'iexact': 'ILIKE %s', diff --git a/django/db/backends/sqlite3/base.py b/django/db/backends/sqlite3/base.py index 5cd67a32f5..b753879d7a 100644 --- a/django/db/backends/sqlite3/base.py +++ b/django/db/backends/sqlite3/base.py @@ -107,7 +107,14 @@ class SQLiteCursorWrapper(Database.Cursor): def convert_query(self, query, num_params): return query % tuple("?" * num_params) +allows_group_by_ordinal = True +allows_unique_and_pk = True +autoindexes_primary_keys = True +needs_datetime_string_cast = True +needs_upper_for_iops = False supports_constraints = False +supports_tablespaces = False +uses_case_insensitive_names = False def quote_name(name): if name.startswith('"') and name.endswith('"'): @@ -139,6 +146,9 @@ def get_date_trunc_sql(lookup_type, field_name): # sqlite doesn't support DATE_TRUNC, so we fake it as above. return 'django_date_trunc("%s", %s)' % (lookup_type.lower(), field_name) +def get_datetime_cast_sql(): + return None + def get_limit_offset_sql(limit, offset=None): sql = "LIMIT %s" % limit if offset and offset != 0: @@ -160,11 +170,20 @@ def get_drop_foreignkey_sql(): def get_pk_default_value(): return "NULL" +def get_max_name_length(): + return None + +def get_start_transaction_sql(): + return "BEGIN;" + +def get_autoinc_sql(table): + return None + def get_sql_flush(style, tables, sequences): """Return a list of SQL statements required to remove all data from all tables in the database (without actually removing the tables themselves) and put the database in an empty 'initial' state - + """ # NB: The generated SQL below is specific to SQLite # Note: The DELETE FROM... SQL generated below works for SQLite databases @@ -182,7 +201,7 @@ def get_sql_sequence_reset(style, model_list): "Returns a list of the SQL statements to reset sequences for the given models." # No sequence reset required return [] - + def _sqlite_date_trunc(lookup_type, dt): try: dt = util.typecast_timestamp(dt) diff --git a/django/db/backends/util.py b/django/db/backends/util.py index 81c752e664..58f2d1e990 100644 --- a/django/db/backends/util.py +++ b/django/db/backends/util.py @@ -1,4 +1,5 @@ import datetime +import md5 from time import time try: @@ -107,6 +108,16 @@ def rev_typecast_decimal(d): return None return str(d) +def truncate_name(name, length=None): + """Shortens a string to a repeatable mangled version with the given length. + """ + if length is None or len(name) <= length: + return name + + hash = md5.md5(name).hexdigest()[:4] + + return '%s%s' % (name[:length-4], hash) + ################################################################################## # Helper functions for dictfetch* for databases that don't natively support them # ################################################################################## diff --git a/django/db/models/base.py b/django/db/models/base.py index e02d6de861..b1c4a43628 100644 --- a/django/db/models/base.py +++ b/django/db/models/base.py @@ -96,9 +96,9 @@ class Model(object): def __init__(self, *args, **kwargs): dispatcher.send(signal=signals.pre_init, sender=self.__class__, args=args, kwargs=kwargs) - + # There is a rather weird disparity here; if kwargs, it's set, then args - # overrides it. It should be one or the other; don't duplicate the work + # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and nstantiation for iteration is 33% faster. args_len = len(args) @@ -122,10 +122,10 @@ class Model(object): # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) - + # Now we're left with the unprocessed fields that *must* come from # keywords, or default. - + for field in fields_iter: if kwargs: if isinstance(field.rel, ManyToOneRel): @@ -147,7 +147,7 @@ class Model(object): try: val = getattr(rel_obj, field.rel.get_related_field().attname) except AttributeError: - raise TypeError("Invalid value: %r should be a %s instance, not a %s" % + raise TypeError("Invalid value: %r should be a %s instance, not a %s" % (field.name, field.rel.to, type(rel_obj))) else: val = kwargs.pop(field.attname, field.get_default()) @@ -210,17 +210,18 @@ class Model(object): record_exists = True if pk_set: # Determine whether a record with the primary key already exists. - cursor.execute("SELECT 1 FROM %s WHERE %s=%%s LIMIT 1" % \ - (backend.quote_name(self._meta.db_table), backend.quote_name(self._meta.pk.column)), [pk_val]) + cursor.execute("SELECT COUNT(*) FROM %s WHERE %s=%%s" % \ + (backend.quote_name(self._meta.db_table), backend.quote_name(self._meta.pk.column)), + self._meta.pk.get_db_prep_lookup('exact', pk_val)) # If it does already exist, do an UPDATE. - if cursor.fetchone(): + if cursor.fetchone()[0] > 0: db_values = [f.get_db_prep_save(f.pre_save(self, False)) for f in non_pks] if db_values: cursor.execute("UPDATE %s SET %s WHERE %s=%%s" % \ (backend.quote_name(self._meta.db_table), ','.join(['%s=%%s' % backend.quote_name(f.column) for f in non_pks]), backend.quote_name(self._meta.pk.column)), - db_values + [pk_val]) + db_values + self._meta.pk.get_db_prep_lookup('exact', pk_val)) else: record_exists = False if not pk_set or not record_exists: diff --git a/django/db/models/fields/__init__.py b/django/db/models/fields/__init__.py index 136ce31b8b..016e26099b 100644 --- a/django/db/models/fields/__init__.py +++ b/django/db/models/fields/__init__.py @@ -74,12 +74,16 @@ class Field(object): core=False, rel=None, default=NOT_PROVIDED, editable=True, serialize=True, prepopulate_from=None, unique_for_date=None, unique_for_month=None, unique_for_year=None, validator_list=None, choices=None, radio_admin=None, - help_text='', db_column=None): + help_text='', db_column=None, db_tablespace=None): self.name = name self.verbose_name = verbose_name self.primary_key = primary_key self.maxlength, self.unique = maxlength, unique self.blank, self.null = blank, null + # Oracle treats the empty string ('') as null, so coerce the null + # option whenever '' is a possible value. + if self.empty_strings_allowed and settings.DATABASE_ENGINE == 'oracle': + self.null = True self.core, self.rel, self.default = core, rel, default self.editable = editable self.serialize = serialize @@ -91,6 +95,7 @@ class Field(object): self.radio_admin = radio_admin self.help_text = help_text self.db_column = db_column + self.db_tablespace = db_tablespace # Set db_index to True if the field has a relationship and doesn't explicitly set db_index. self.db_index = db_index @@ -201,7 +206,7 @@ class Field(object): if callable(self.default): return self.default() return self.default - if not self.empty_strings_allowed or self.null: + if not self.empty_strings_allowed or (self.null and settings.DATABASE_ENGINE != 'oracle'): return None return "" @@ -806,6 +811,7 @@ class IPAddressField(Field): validators.isValidIPAddress4(field_data, None) class NullBooleanField(Field): + empty_strings_allowed = False def __init__(self, *args, **kwargs): kwargs['null'] = True Field.__init__(self, *args, **kwargs) @@ -875,10 +881,18 @@ class TimeField(Field): Field.__init__(self, verbose_name, name, **kwargs) def get_db_prep_lookup(self, lookup_type, value): - if lookup_type == 'range': - value = [str(v) for v in value] + if settings.DATABASE_ENGINE == 'oracle': + # Oracle requires a date in order to parse. + def prep(value): + if isinstance(value, datetime.time): + value = datetime.datetime.combine(datetime.date(1900, 1, 1), value) + return str(value) else: - value = str(value) + prep = str + if lookup_type == 'range': + value = [prep(v) for v in value] + else: + value = prep(value) return Field.get_db_prep_lookup(self, lookup_type, value) def pre_save(self, model_instance, add): @@ -896,7 +910,15 @@ class TimeField(Field): # doesn't support microseconds. if settings.DATABASE_ENGINE == 'mysql' and hasattr(value, 'microsecond'): value = value.replace(microsecond=0) - value = str(value) + if settings.DATABASE_ENGINE == 'oracle': + # cx_Oracle expects a datetime.datetime to persist into TIMESTAMP field. + if isinstance(value, datetime.time): + value = datetime.datetime(1900, 1, 1, value.hour, value.minute, + value.second, value.microsecond) + elif isinstance(value, basestring): + value = datetime.datetime(*(time.strptime(value, '%H:%M:%S')[:6])) + else: + value = str(value) return Field.get_db_prep_save(self, value) def get_manipulator_field_objs(self): diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py index e514b3f854..7286959302 100644 --- a/django/db/models/fields/related.py +++ b/django/db/models/fields/related.py @@ -336,10 +336,7 @@ def create_many_related_manager(superclass): (target_col_name, self.join_table, source_col_name, target_col_name, ",".join(['%s'] * len(new_ids))), [self._pk_val] + list(new_ids)) - if cursor.rowcount is not None and cursor.rowcount != 0: - existing_ids = set([row[0] for row in cursor.fetchmany(cursor.rowcount)]) - else: - existing_ids = set() + existing_ids = set([row[0] for row in cursor.fetchall()]) # Add the ones that aren't there already for obj_id in (new_ids - existing_ids): diff --git a/django/db/models/options.py b/django/db/models/options.py index dd6c586ddd..93627f7b72 100644 --- a/django/db/models/options.py +++ b/django/db/models/options.py @@ -13,7 +13,7 @@ get_verbose_name = lambda class_name: re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]| DEFAULT_NAMES = ('verbose_name', 'db_table', 'ordering', 'unique_together', 'permissions', 'get_latest_by', - 'order_with_respect_to', 'app_label') + 'order_with_respect_to', 'app_label', 'db_tablespace') class Options(object): def __init__(self, meta): @@ -27,6 +27,7 @@ class Options(object): self.object_name, self.app_label = None, None self.get_latest_by = None self.order_with_respect_to = None + self.db_tablespace = None self.admin = None self.meta = meta self.pk = None @@ -59,6 +60,8 @@ class Options(object): del self.meta def _prepare(self, model): + from django.db import backend + from django.db.backends.util import truncate_name if self.order_with_respect_to: self.order_with_respect_to = self.get_field(self.order_with_respect_to) self.ordering = ('_order',) @@ -73,6 +76,8 @@ class Options(object): # If the db_table wasn't provided, use the app_label + module_name. if not self.db_table: self.db_table = "%s_%s" % (self.app_label, self.module_name) + self.db_table = truncate_name(self.db_table, + backend.get_max_name_length()) def add_field(self, field): # Insert the given field in the order in which it was created, using @@ -88,10 +93,10 @@ class Options(object): def __repr__(self): return '' % self.object_name - + def __str__(self): return "%s.%s" % (self.app_label, self.module_name) - + def get_field(self, name, many_to_many=True): "Returns the requested field by name. Raises FieldDoesNotExist on error." to_search = many_to_many and (self.fields + self.many_to_many) or self.fields diff --git a/django/db/models/query.py b/django/db/models/query.py index 8a43b1c90f..24d701b10d 100644 --- a/django/db/models/query.py +++ b/django/db/models/query.py @@ -4,6 +4,7 @@ from django.db.models import signals, loading from django.dispatch import dispatcher from django.utils.datastructures import SortedDict from django.contrib.contenttypes import generic +import datetime import operator import re @@ -78,7 +79,7 @@ def quote_only_if_word(word): else: return backend.quote_name(word) -class QuerySet(object): +class _QuerySet(object): "Represents a lazy database lookup for a set of objects" def __init__(self, model=None): self.model = model @@ -182,13 +183,18 @@ class QuerySet(object): cursor = connection.cursor() cursor.execute("SELECT " + (self._distinct and "DISTINCT " or "") + ",".join(select) + sql, params) + fill_cache = self._select_related - index_end = len(self.model._meta.fields) + fields = self.model._meta.fields + index_end = len(fields) + has_resolve_columns = hasattr(self, 'resolve_columns') while 1: rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE) if not rows: raise StopIteration for row in rows: + if has_resolve_columns: + row = self.resolve_columns(row, fields) if fill_cache: obj, index_end = get_cached_row(klass=self.model, row=row, index_start=0, max_depth=self._max_related_depth) @@ -552,6 +558,12 @@ class QuerySet(object): return select, " ".join(sql), params +# Use the backend's QuerySet class if it defines one, otherwise use _QuerySet. +if hasattr(backend, 'get_query_set_class'): + QuerySet = backend.get_query_set_class(_QuerySet) +else: + QuerySet = _QuerySet + class ValuesQuerySet(QuerySet): def __init__(self, *args, **kwargs): super(ValuesQuerySet, self).__init__(*args, **kwargs) @@ -566,35 +578,38 @@ class ValuesQuerySet(QuerySet): # self._fields is a list of field names to fetch. if self._fields: - #columns = [self.model._meta.get_field(f, many_to_many=False).column for f in self._fields] if not self._select: - columns = [self.model._meta.get_field(f, many_to_many=False).column for f in self._fields] + fields = [self.model._meta.get_field(f, many_to_many=False) for f in self._fields] else: - columns = [] + fields = [] for f in self._fields: if f in [field.name for field in self.model._meta.fields]: - columns.append( self.model._meta.get_field(f, many_to_many=False).column ) + fields.append(self.model._meta.get_field(f, many_to_many=False)) elif not self._select.has_key( f ): raise FieldDoesNotExist, '%s has no field named %r' % ( self.model._meta.object_name, f ) field_names = self._fields else: # Default to all fields. - columns = [f.column for f in self.model._meta.fields] - field_names = [f.attname for f in self.model._meta.fields] + fields = self.model._meta.fields + field_names = [f.attname for f in fields] + columns = [f.column for f in fields] select = ['%s.%s' % (backend.quote_name(self.model._meta.db_table), backend.quote_name(c)) for c in columns] - # Add any additional SELECTs. if self._select: select.extend(['(%s) AS %s' % (quote_only_if_word(s[1]), backend.quote_name(s[0])) for s in self._select.items()]) cursor = connection.cursor() cursor.execute("SELECT " + (self._distinct and "DISTINCT " or "") + ",".join(select) + sql, params) + + has_resolve_columns = hasattr(self, 'resolve_columns') while 1: rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE) if not rows: raise StopIteration for row in rows: + if has_resolve_columns: + row = self.resolve_columns(row, fields) yield dict(zip(field_names, row)) def _clone(self, klass=None, **kwargs): @@ -605,25 +620,49 @@ class ValuesQuerySet(QuerySet): class DateQuerySet(QuerySet): def iterator(self): from django.db.backends.util import typecast_timestamp + from django.db.models.fields import DateTimeField self._order_by = () # Clear this because it'll mess things up otherwise. if self._field.null: self._where.append('%s.%s IS NOT NULL' % \ (backend.quote_name(self.model._meta.db_table), backend.quote_name(self._field.column))) - try: select, sql, params = self._get_sql_clause() except EmptyResultSet: raise StopIteration - sql = 'SELECT %s %s GROUP BY 1 ORDER BY 1 %s' % \ + table_name = backend.quote_name(self.model._meta.db_table) + field_name = backend.quote_name(self._field.column) + + if backend.allows_group_by_ordinal: + group_by = '1' + else: + group_by = backend.get_date_trunc_sql(self._kind, + '%s.%s' % (table_name, field_name)) + + sql = 'SELECT %s %s GROUP BY %s ORDER BY 1 %s' % \ (backend.get_date_trunc_sql(self._kind, '%s.%s' % (backend.quote_name(self.model._meta.db_table), - backend.quote_name(self._field.column))), sql, self._order) + backend.quote_name(self._field.column))), sql, group_by, self._order) cursor = connection.cursor() cursor.execute(sql, params) - # We have to manually run typecast_timestamp(str()) on the results, because - # MySQL doesn't automatically cast the result of date functions as datetime - # objects -- MySQL returns the values as strings, instead. - return [typecast_timestamp(str(row[0])) for row in cursor.fetchall()] + + has_resolve_columns = hasattr(self, 'resolve_columns') + needs_datetime_string_cast = backend.needs_datetime_string_cast + dates = [] + # It would be better to use self._field here instead of DateTimeField(), + # but in Oracle that will result in a list of datetime.date instead of + # datetime.datetime. + fields = [DateTimeField()] + while 1: + rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE) + if not rows: + return dates + for row in rows: + date = row[0] + if has_resolve_columns: + date = self.resolve_columns([date], fields)[0] + elif needs_datetime_string_cast: + date = typecast_timestamp(str(date)) + dates.append(date) def _clone(self, klass=None, **kwargs): c = super(DateQuerySet, self)._clone(klass, **kwargs) @@ -731,8 +770,17 @@ def get_where_clause(lookup_type, table_prefix, field_name, value): if table_prefix.endswith('.'): table_prefix = backend.quote_name(table_prefix[:-1])+'.' field_name = backend.quote_name(field_name) + if type(value) == datetime.datetime and backend.get_datetime_cast_sql(): + cast_sql = backend.get_datetime_cast_sql() + else: + cast_sql = '%s' + if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith') and backend.needs_upper_for_iops: + format = 'UPPER(%s%s) %s' + else: + format = '%s%s %s' try: - return '%s%s %s' % (table_prefix, field_name, (backend.OPERATOR_MAPPING[lookup_type] % '%s')) + return format % (table_prefix, field_name, + backend.OPERATOR_MAPPING[lookup_type] % cast_sql) except KeyError: pass if lookup_type == 'in': diff --git a/django/newforms/widgets.py b/django/newforms/widgets.py index b0c9a70f57..d4b5f596d1 100644 --- a/django/newforms/widgets.py +++ b/django/newforms/widgets.py @@ -8,12 +8,10 @@ except NameError: from sets import Set as set # Python 2.3 fallback from itertools import chain - from django.utils.datastructures import MultiValueDict from django.utils.html import escape from django.utils.translation import gettext from django.utils.encoding import StrAndUnicode, smart_unicode - from util import flatatt __all__ = ( diff --git a/django/test/utils.py b/django/test/utils.py index f5122fa96d..4a858a6ec7 100644 --- a/django/test/utils.py +++ b/django/test/utils.py @@ -1,6 +1,7 @@ import sys, time from django.conf import settings -from django.db import connection, transaction, backend +from django.db import connection, backend, get_creation_module +from django.core import management, mail from django.core import management, mail from django.dispatch import dispatcher from django.test import signals @@ -88,6 +89,12 @@ def get_postgresql_create_suffix(): return '' def create_test_db(verbosity=1, autoclobber=False): + # If the database backend wants to create the test DB itself, let it + creation_module = get_creation_module() + if hasattr(creation_module, "create_test_db"): + creation_module.create_test_db(settings, connection, backend, verbosity, autoclobber) + return + if verbosity >= 1: print "Creating test database..." # If we're using SQLite, it's more convenient to test against an @@ -142,6 +149,12 @@ def create_test_db(verbosity=1, autoclobber=False): cursor = connection.cursor() def destroy_test_db(old_database_name, verbosity=1): + # If the database wants to drop the test DB itself, let it + creation_module = get_creation_module() + if hasattr(creation_module, "destroy_test_db"): + creation_module.destroy_test_db(settings, connection, backend, old_database_name, verbosity) + return + # Unless we're using SQLite, remove the test database to clean up after # ourselves. Connect to the previous database (not the test database) # to do so, because it's not allowed to delete a database while being diff --git a/docs/faq.txt b/docs/faq.txt index bdd8c5360e..d7d8f41146 100644 --- a/docs/faq.txt +++ b/docs/faq.txt @@ -301,7 +301,7 @@ means it can run on a variety of server platforms. If you want to use Django with a database, which is probably the case, you'll also need a database engine. PostgreSQL_ is recommended, because we're -PostgreSQL fans, and MySQL_ and `SQLite 3`_ are also supported. +PostgreSQL fans, and MySQL_, `SQLite 3`_, and Oracle_ are also supported. .. _Python: http://www.python.org/ .. _Apache 2: http://httpd.apache.org/ @@ -310,6 +310,7 @@ PostgreSQL fans, and MySQL_ and `SQLite 3`_ are also supported. .. _PostgreSQL: http://www.postgresql.org/ .. _MySQL: http://www.mysql.com/ .. _`SQLite 3`: http://www.sqlite.org/ +.. _Oracle: http://www.oracle.com/ Do I lose anything by using Python 2.3 versus newer Python versions, such as Python 2.5? ---------------------------------------------------------------------------------------- diff --git a/docs/install.txt b/docs/install.txt index 698e44f0ef..99aad4e52d 100644 --- a/docs/install.txt +++ b/docs/install.txt @@ -64,6 +64,8 @@ installed. * If you're using SQLite, you'll need pysqlite_. Use version 2.0.3 or higher. +* If you're using Oracle, you'll need cx_Oracle_, version 4.3.1 or higher. + .. _PostgreSQL: http://www.postgresql.org/ .. _MySQL: http://www.mysql.com/ .. _Django's ticket system: http://code.djangoproject.com/report/1 @@ -73,6 +75,7 @@ installed. .. _SQLite: http://www.sqlite.org/ .. _pysqlite: http://initd.org/tracker/pysqlite .. _MySQL backend: ../databases/ +.. _cx_Oracle: http://www.python.net/crew/atuining/cx_Oracle/ Remove any old versions of Django ================================= diff --git a/docs/model-api.txt b/docs/model-api.txt index fa143bc502..f7b56110d6 100644 --- a/docs/model-api.txt +++ b/docs/model-api.txt @@ -492,6 +492,11 @@ has ``null=True``, that means it has two possible values for "no data": possible values for "no data;" Django convention is to use the empty string, not ``NULL``. +.. note:: + Due to database limitations, when using the Oracle backend the + ``null=True`` option will be coerced for string-based fields that can + blank, and the value ``NULL`` will be stored to denote the empty string. + ``blank`` ~~~~~~~~~ @@ -586,6 +591,13 @@ scenes. If ``True``, ``django-admin.py sqlindexes`` will output a ``CREATE INDEX`` statement for this field. +``db_tablespace`` +~~~~~~~~~~~~~~~~~ + +If this field is indexed, the name of the database tablespace to use for the +index. The default is the ``db_tablespace`` of the model, if any. If the +backend doesn't support tablespaces, this option is ignored. + ``default`` ~~~~~~~~~~~ @@ -996,6 +1008,12 @@ If your database table name is an SQL reserved word, or contains characters that aren't allowed in Python variable names -- notably, the hyphen -- that's OK. Django quotes column and table names behind the scenes. +``db_tablespace`` +----------------- + +The name of the database tablespace to use for the model. If the backend +doesn't support tablespaces, this option is ignored. + ``get_latest_by`` ----------------- diff --git a/docs/settings.txt b/docs/settings.txt index 12e6dab4bc..897cdc8099 100644 --- a/docs/settings.txt +++ b/docs/settings.txt @@ -244,9 +244,9 @@ DATABASE_ENGINE Default: ``''`` (Empty string) -Which database backend to use. Either ``'postgresql_psycopg2'``, -``'postgresql'``, ``'mysql'``, ``'mysql_old'``, ``'sqlite3'`` or -``'ado_mssql'``. +The database backend to use. Either ``'postgresql_psycopg2'``, +``'postgresql'``, ``'mysql'``, ``'mysql_old'``, ``'sqlite3'``, +``'oracle'``, or ``'ado_mssql'``. DATABASE_HOST ------------- diff --git a/tests/regressiontests/datatypes/__init__.py b/tests/regressiontests/datatypes/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/regressiontests/datatypes/models.py b/tests/regressiontests/datatypes/models.py new file mode 100644 index 0000000000..8c5c8c285d --- /dev/null +++ b/tests/regressiontests/datatypes/models.py @@ -0,0 +1,59 @@ +""" +This is a basic model to test saving and loading boolean and date-related +types, which in the past were problematic for some database backends. +""" + +from django.db import models +from django.conf import settings + +class Donut(models.Model): + name = models.CharField(maxlength=100) + is_frosted = models.BooleanField(default=False) + has_sprinkles = models.NullBooleanField() + baked_date = models.DateField(null=True) + baked_time = models.TimeField(null=True) + consumed_at = models.DateTimeField(null=True) + + class Meta: + ordering = ('consumed_at',) + + def __str__(self): + return self.name + +__test__ = {'API_TESTS': """ +# No donuts are in the system yet. +>>> Donut.objects.all() +[] + +>>> d = Donut(name='Apple Fritter') + +# Ensure we're getting True and False, not 0 and 1 +>>> d.is_frosted +False +>>> d.has_sprinkles +>>> d.has_sprinkles = True +>>> d.has_sprinkles == True +True +>>> d.save() +>>> d2 = Donut.objects.all()[0] +>>> d2 + +>>> d2.is_frosted == False +True +>>> d2.has_sprinkles == True +True + +>>> import datetime +>>> d2.baked_date = datetime.date(year=1938, month=6, day=4) +>>> d2.baked_time = datetime.time(hour=5, minute=30) +>>> d2.consumed_at = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59) +>>> d2.save() + +>>> d3 = Donut.objects.all()[0] +>>> d3.baked_date +datetime.date(1938, 6, 4) +>>> d3.baked_time +datetime.time(5, 30) +>>> d3.consumed_at +datetime.datetime(2007, 4, 20, 16, 19, 59) +"""} diff --git a/tests/regressiontests/serializers_regress/tests.py b/tests/regressiontests/serializers_regress/tests.py index 1a144c8356..febcfa822e 100644 --- a/tests/regressiontests/serializers_regress/tests.py +++ b/tests/regressiontests/serializers_regress/tests.py @@ -15,6 +15,7 @@ from django.utils.functional import curry from django.core import serializers from django.db import transaction from django.core import management +from django.conf import settings from models import * try: @@ -116,10 +117,13 @@ test_data = [ (data_obj, 31, DateTimeData, None), (data_obj, 40, EmailData, "hovercraft@example.com"), (data_obj, 41, EmailData, None), + (data_obj, 42, EmailData, ""), (data_obj, 50, FileData, 'file:///foo/bar/whiz.txt'), (data_obj, 51, FileData, None), + (data_obj, 52, FileData, ""), (data_obj, 60, FilePathData, "/foo/bar/whiz.txt"), (data_obj, 61, FilePathData, None), + (data_obj, 62, FilePathData, ""), (data_obj, 70, DecimalData, decimal.Decimal('12.345')), (data_obj, 71, DecimalData, decimal.Decimal('-12.345')), (data_obj, 72, DecimalData, decimal.Decimal('0.0')), @@ -146,6 +150,7 @@ test_data = [ (data_obj, 131, PositiveSmallIntegerData, None), (data_obj, 140, SlugData, "this-is-a-slug"), (data_obj, 141, SlugData, None), + (data_obj, 142, SlugData, ""), (data_obj, 150, SmallData, 12), (data_obj, 151, SmallData, -12), (data_obj, 152, SmallData, 0), @@ -160,8 +165,10 @@ The end."""), (data_obj, 171, TimeData, None), (data_obj, 180, USStateData, "MA"), (data_obj, 181, USStateData, None), + (data_obj, 182, USStateData, ""), (data_obj, 190, XMLData, ""), (data_obj, 191, XMLData, None), + (data_obj, 192, XMLData, ""), (generic_obj, 200, GenericData, ['Generic Object 1', 'tag1', 'tag2']), (generic_obj, 201, GenericData, ['Generic Object 2', 'tag2', 'tag3']), @@ -241,6 +248,15 @@ The end."""), # (pk_obj, 790, XMLPKData, ""), ] +# Because Oracle treats the empty string as NULL, Oracle is expected to fail +# when field.empty_strings_allowed is True and the value is None; skip these +# tests. +if settings.DATABASE_ENGINE == 'oracle': + test_data = [data for data in test_data + if not (data[0] == data_obj and + data[2]._meta.get_field('data').empty_strings_allowed and + data[3] is None)] + # Dynamically create serializer tests to ensure that all # registered serializers are automatically tested. class SerializerTests(unittest.TestCase):