2015-09-06 01:39:42 +08:00
|
|
|
import collections
|
2015-01-11 06:52:59 +08:00
|
|
|
import logging
|
2013-05-11 11:08:45 +08:00
|
|
|
import os
|
2013-07-01 20:22:27 +08:00
|
|
|
import unittest
|
2015-01-28 20:35:27 +08:00
|
|
|
from importlib import import_module
|
2013-07-01 20:22:27 +08:00
|
|
|
from unittest import TestSuite, defaultTestLoader
|
2013-05-11 11:08:45 +08:00
|
|
|
|
|
|
|
from django.conf import settings
|
|
|
|
from django.core.exceptions import ImproperlyConfigured
|
2015-09-06 03:52:36 +08:00
|
|
|
from django.db import DEFAULT_DB_ALIAS, connections
|
2014-04-12 17:42:06 +08:00
|
|
|
from django.test import SimpleTestCase, TestCase
|
2013-05-11 11:08:45 +08:00
|
|
|
from django.test.utils import setup_test_environment, teardown_test_environment
|
2014-12-14 20:37:26 +08:00
|
|
|
from django.utils.datastructures import OrderedSet
|
2015-01-11 06:52:59 +08:00
|
|
|
from django.utils.six import StringIO
|
|
|
|
|
|
|
|
|
|
|
|
class DebugSQLTextTestResult(unittest.TextTestResult):
|
|
|
|
def __init__(self, stream, descriptions, verbosity):
|
|
|
|
self.logger = logging.getLogger('django.db.backends')
|
|
|
|
self.logger.setLevel(logging.DEBUG)
|
|
|
|
super(DebugSQLTextTestResult, self).__init__(stream, descriptions, verbosity)
|
|
|
|
|
|
|
|
def startTest(self, test):
|
|
|
|
self.debug_sql_stream = StringIO()
|
|
|
|
self.handler = logging.StreamHandler(self.debug_sql_stream)
|
|
|
|
self.logger.addHandler(self.handler)
|
|
|
|
super(DebugSQLTextTestResult, self).startTest(test)
|
|
|
|
|
|
|
|
def stopTest(self, test):
|
|
|
|
super(DebugSQLTextTestResult, self).stopTest(test)
|
|
|
|
self.logger.removeHandler(self.handler)
|
|
|
|
if self.showAll:
|
|
|
|
self.debug_sql_stream.seek(0)
|
|
|
|
self.stream.write(self.debug_sql_stream.read())
|
|
|
|
self.stream.writeln(self.separator2)
|
|
|
|
|
|
|
|
def addError(self, test, err):
|
|
|
|
super(DebugSQLTextTestResult, self).addError(test, err)
|
|
|
|
self.debug_sql_stream.seek(0)
|
|
|
|
self.errors[-1] = self.errors[-1] + (self.debug_sql_stream.read(),)
|
|
|
|
|
|
|
|
def addFailure(self, test, err):
|
|
|
|
super(DebugSQLTextTestResult, self).addFailure(test, err)
|
|
|
|
self.debug_sql_stream.seek(0)
|
|
|
|
self.failures[-1] = self.failures[-1] + (self.debug_sql_stream.read(),)
|
|
|
|
|
|
|
|
def printErrorList(self, flavour, errors):
|
|
|
|
for test, err, sql_debug in errors:
|
|
|
|
self.stream.writeln(self.separator1)
|
|
|
|
self.stream.writeln("%s: %s" % (flavour, self.getDescription(test)))
|
|
|
|
self.stream.writeln(self.separator2)
|
|
|
|
self.stream.writeln("%s" % err)
|
|
|
|
self.stream.writeln(self.separator2)
|
|
|
|
self.stream.writeln("%s" % sql_debug)
|
2013-05-11 11:08:45 +08:00
|
|
|
|
|
|
|
|
|
|
|
class DiscoverRunner(object):
|
|
|
|
"""
|
|
|
|
A Django test runner that uses unittest2 test discovery.
|
|
|
|
"""
|
|
|
|
|
2013-09-10 21:49:39 +08:00
|
|
|
test_suite = TestSuite
|
|
|
|
test_runner = unittest.TextTestRunner
|
2013-05-11 11:08:45 +08:00
|
|
|
test_loader = defaultTestLoader
|
2014-04-12 17:42:06 +08:00
|
|
|
reorder_by = (TestCase, SimpleTestCase)
|
2013-05-11 11:08:45 +08:00
|
|
|
|
2015-01-11 06:52:59 +08:00
|
|
|
def __init__(self, pattern=None, top_level=None, verbosity=1,
|
|
|
|
interactive=True, failfast=False, keepdb=False,
|
|
|
|
reverse=False, debug_sql=False, **kwargs):
|
2013-05-11 11:08:45 +08:00
|
|
|
|
|
|
|
self.pattern = pattern
|
|
|
|
self.top_level = top_level
|
|
|
|
|
|
|
|
self.verbosity = verbosity
|
|
|
|
self.interactive = interactive
|
|
|
|
self.failfast = failfast
|
2014-05-28 05:13:08 +08:00
|
|
|
self.keepdb = keepdb
|
2014-11-23 00:59:05 +08:00
|
|
|
self.reverse = reverse
|
2015-01-11 06:52:59 +08:00
|
|
|
self.debug_sql = debug_sql
|
2013-05-11 11:08:45 +08:00
|
|
|
|
2014-06-07 02:12:23 +08:00
|
|
|
@classmethod
|
|
|
|
def add_arguments(cls, parser):
|
|
|
|
parser.add_argument('-t', '--top-level-directory',
|
|
|
|
action='store', dest='top_level', default=None,
|
|
|
|
help='Top level of project for unittest discovery.')
|
|
|
|
parser.add_argument('-p', '--pattern', action='store', dest='pattern',
|
|
|
|
default="test*.py",
|
|
|
|
help='The test matching pattern. Defaults to test*.py.')
|
|
|
|
parser.add_argument('-k', '--keepdb', action='store_true', dest='keepdb',
|
|
|
|
default=False,
|
2014-11-23 00:59:05 +08:00
|
|
|
help='Preserves the test DB between runs.')
|
|
|
|
parser.add_argument('-r', '--reverse', action='store_true', dest='reverse',
|
|
|
|
default=False,
|
|
|
|
help='Reverses test cases order.')
|
2015-01-11 06:52:59 +08:00
|
|
|
parser.add_argument('-d', '--debug-sql', action='store_true', dest='debug_sql',
|
|
|
|
default=False,
|
|
|
|
help='Prints logged SQL queries on failure.')
|
2014-06-07 02:12:23 +08:00
|
|
|
|
2013-05-11 11:08:45 +08:00
|
|
|
def setup_test_environment(self, **kwargs):
|
|
|
|
setup_test_environment()
|
|
|
|
settings.DEBUG = False
|
|
|
|
unittest.installHandler()
|
|
|
|
|
|
|
|
def build_suite(self, test_labels=None, extra_tests=None, **kwargs):
|
2013-09-10 21:49:39 +08:00
|
|
|
suite = self.test_suite()
|
2013-05-11 11:08:45 +08:00
|
|
|
test_labels = test_labels or ['.']
|
|
|
|
extra_tests = extra_tests or []
|
|
|
|
|
|
|
|
discover_kwargs = {}
|
|
|
|
if self.pattern is not None:
|
|
|
|
discover_kwargs['pattern'] = self.pattern
|
|
|
|
if self.top_level is not None:
|
|
|
|
discover_kwargs['top_level_dir'] = self.top_level
|
|
|
|
|
|
|
|
for label in test_labels:
|
|
|
|
kwargs = discover_kwargs.copy()
|
|
|
|
tests = None
|
|
|
|
|
|
|
|
label_as_path = os.path.abspath(label)
|
|
|
|
|
|
|
|
# if a module, or "module.ClassName[.method_name]", just run those
|
|
|
|
if not os.path.exists(label_as_path):
|
|
|
|
tests = self.test_loader.loadTestsFromName(label)
|
|
|
|
elif os.path.isdir(label_as_path) and not self.top_level:
|
|
|
|
# Try to be a bit smarter than unittest about finding the
|
|
|
|
# default top-level for a given directory path, to avoid
|
|
|
|
# breaking relative imports. (Unittest's default is to set
|
|
|
|
# top-level equal to the path, which means relative imports
|
|
|
|
# will result in "Attempted relative import in non-package.").
|
|
|
|
|
|
|
|
# We'd be happy to skip this and require dotted module paths
|
|
|
|
# (which don't cause this problem) instead of file paths (which
|
|
|
|
# do), but in the case of a directory in the cwd, which would
|
|
|
|
# be equally valid if considered as a top-level module or as a
|
|
|
|
# directory path, unittest unfortunately prefers the latter.
|
|
|
|
|
|
|
|
top_level = label_as_path
|
|
|
|
while True:
|
|
|
|
init_py = os.path.join(top_level, '__init__.py')
|
|
|
|
if os.path.exists(init_py):
|
|
|
|
try_next = os.path.dirname(top_level)
|
|
|
|
if try_next == top_level:
|
|
|
|
# __init__.py all the way down? give up.
|
|
|
|
break
|
|
|
|
top_level = try_next
|
|
|
|
continue
|
|
|
|
break
|
|
|
|
kwargs['top_level_dir'] = top_level
|
|
|
|
|
2013-12-17 00:04:28 +08:00
|
|
|
if not (tests and tests.countTestCases()) and is_discoverable(label):
|
|
|
|
# Try discovery if path is a package or directory
|
2013-05-11 11:08:45 +08:00
|
|
|
tests = self.test_loader.discover(start_dir=label, **kwargs)
|
|
|
|
|
2013-12-17 00:04:28 +08:00
|
|
|
# Make unittest forget the top-level dir it calculated from this
|
2013-05-11 11:08:45 +08:00
|
|
|
# run, to support running tests from two different top-levels.
|
|
|
|
self.test_loader._top_level_dir = None
|
|
|
|
|
|
|
|
suite.addTests(tests)
|
|
|
|
|
|
|
|
for test in extra_tests:
|
|
|
|
suite.addTest(test)
|
|
|
|
|
2014-11-23 00:59:05 +08:00
|
|
|
return reorder_suite(suite, self.reorder_by, self.reverse)
|
2013-05-11 11:08:45 +08:00
|
|
|
|
|
|
|
def setup_databases(self, **kwargs):
|
2015-01-11 06:52:59 +08:00
|
|
|
return setup_databases(
|
|
|
|
self.verbosity, self.interactive, self.keepdb, self.debug_sql,
|
|
|
|
**kwargs
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_resultclass(self):
|
|
|
|
return DebugSQLTextTestResult if self.debug_sql else None
|
2013-05-11 11:08:45 +08:00
|
|
|
|
|
|
|
def run_suite(self, suite, **kwargs):
|
2015-01-11 06:52:59 +08:00
|
|
|
resultclass = self.get_resultclass()
|
2013-09-10 21:49:39 +08:00
|
|
|
return self.test_runner(
|
2013-05-11 11:08:45 +08:00
|
|
|
verbosity=self.verbosity,
|
|
|
|
failfast=self.failfast,
|
2015-01-11 06:52:59 +08:00
|
|
|
resultclass=resultclass,
|
2013-05-11 11:08:45 +08:00
|
|
|
).run(suite)
|
|
|
|
|
|
|
|
def teardown_databases(self, old_config, **kwargs):
|
|
|
|
"""
|
|
|
|
Destroys all the non-mirror databases.
|
|
|
|
"""
|
2015-09-06 15:20:35 +08:00
|
|
|
for connection, old_name, destroy in old_config:
|
2014-06-06 05:42:08 +08:00
|
|
|
if destroy:
|
|
|
|
connection.creation.destroy_test_db(old_name, self.verbosity, self.keepdb)
|
2013-05-11 11:08:45 +08:00
|
|
|
|
|
|
|
def teardown_test_environment(self, **kwargs):
|
|
|
|
unittest.removeHandler()
|
|
|
|
teardown_test_environment()
|
|
|
|
|
|
|
|
def suite_result(self, suite, result, **kwargs):
|
|
|
|
return len(result.failures) + len(result.errors)
|
|
|
|
|
|
|
|
def run_tests(self, test_labels, extra_tests=None, **kwargs):
|
|
|
|
"""
|
|
|
|
Run the unit tests for all the test labels in the provided list.
|
|
|
|
|
|
|
|
Test labels should be dotted Python paths to test modules, test
|
|
|
|
classes, or test methods.
|
|
|
|
|
|
|
|
A list of 'extra' tests may also be provided; these tests
|
|
|
|
will be added to the test suite.
|
|
|
|
|
|
|
|
Returns the number of tests that failed.
|
|
|
|
"""
|
|
|
|
self.setup_test_environment()
|
|
|
|
suite = self.build_suite(test_labels, extra_tests)
|
|
|
|
old_config = self.setup_databases()
|
|
|
|
result = self.run_suite(suite)
|
|
|
|
self.teardown_databases(old_config)
|
|
|
|
self.teardown_test_environment()
|
|
|
|
return self.suite_result(suite, result)
|
|
|
|
|
|
|
|
|
2013-12-17 00:04:28 +08:00
|
|
|
def is_discoverable(label):
|
|
|
|
"""
|
|
|
|
Check if a test label points to a python package or file directory.
|
2014-01-01 01:41:34 +08:00
|
|
|
|
|
|
|
Relative labels like "." and ".." are seen as directories.
|
2013-12-17 00:04:28 +08:00
|
|
|
"""
|
|
|
|
try:
|
|
|
|
mod = import_module(label)
|
2014-01-01 01:41:34 +08:00
|
|
|
except (ImportError, TypeError):
|
2013-12-17 00:04:28 +08:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
return hasattr(mod, '__path__')
|
|
|
|
|
|
|
|
return os.path.isdir(os.path.abspath(label))
|
|
|
|
|
|
|
|
|
2013-05-11 11:08:45 +08:00
|
|
|
def dependency_ordered(test_databases, dependencies):
|
|
|
|
"""
|
|
|
|
Reorder test_databases into an order that honors the dependencies
|
2014-01-20 08:45:29 +08:00
|
|
|
described in TEST[DEPENDENCIES].
|
2013-05-11 11:08:45 +08:00
|
|
|
"""
|
|
|
|
ordered_test_databases = []
|
|
|
|
resolved_databases = set()
|
|
|
|
|
|
|
|
# Maps db signature to dependencies of all it's aliases
|
|
|
|
dependencies_map = {}
|
|
|
|
|
2014-04-27 01:18:45 +08:00
|
|
|
# sanity check - no DB can depend on its own alias
|
2013-05-11 11:08:45 +08:00
|
|
|
for sig, (_, aliases) in test_databases:
|
|
|
|
all_deps = set()
|
|
|
|
for alias in aliases:
|
|
|
|
all_deps.update(dependencies.get(alias, []))
|
|
|
|
if not all_deps.isdisjoint(aliases):
|
|
|
|
raise ImproperlyConfigured(
|
|
|
|
"Circular dependency: databases %r depend on each other, "
|
|
|
|
"but are aliases." % aliases)
|
|
|
|
dependencies_map[sig] = all_deps
|
|
|
|
|
|
|
|
while test_databases:
|
|
|
|
changed = False
|
|
|
|
deferred = []
|
|
|
|
|
|
|
|
# Try to find a DB that has all it's dependencies met
|
|
|
|
for signature, (db_name, aliases) in test_databases:
|
|
|
|
if dependencies_map[signature].issubset(resolved_databases):
|
|
|
|
resolved_databases.update(aliases)
|
|
|
|
ordered_test_databases.append((signature, (db_name, aliases)))
|
|
|
|
changed = True
|
|
|
|
else:
|
|
|
|
deferred.append((signature, (db_name, aliases)))
|
|
|
|
|
|
|
|
if not changed:
|
|
|
|
raise ImproperlyConfigured(
|
2014-01-20 08:45:29 +08:00
|
|
|
"Circular dependency in TEST[DEPENDENCIES]")
|
2013-05-11 11:08:45 +08:00
|
|
|
test_databases = deferred
|
|
|
|
return ordered_test_databases
|
|
|
|
|
|
|
|
|
2014-11-23 00:59:05 +08:00
|
|
|
def reorder_suite(suite, classes, reverse=False):
|
2013-05-11 11:08:45 +08:00
|
|
|
"""
|
|
|
|
Reorders a test suite by test type.
|
|
|
|
|
|
|
|
`classes` is a sequence of types
|
|
|
|
|
|
|
|
All tests of type classes[0] are placed first, then tests of type
|
|
|
|
classes[1], etc. Tests with no match in classes are placed last.
|
2014-11-23 00:59:05 +08:00
|
|
|
|
|
|
|
If `reverse` is True, tests within classes are sorted in opposite order,
|
|
|
|
but test classes are not reversed.
|
2013-05-11 11:08:45 +08:00
|
|
|
"""
|
|
|
|
class_count = len(classes)
|
2013-09-10 21:49:39 +08:00
|
|
|
suite_class = type(suite)
|
2014-12-14 20:37:26 +08:00
|
|
|
bins = [OrderedSet() for i in range(class_count + 1)]
|
2014-11-23 00:59:05 +08:00
|
|
|
partition_suite(suite, classes, bins, reverse=reverse)
|
2014-12-14 20:37:26 +08:00
|
|
|
reordered_suite = suite_class()
|
|
|
|
for i in range(class_count + 1):
|
|
|
|
reordered_suite.addTests(bins[i])
|
|
|
|
return reordered_suite
|
2013-05-11 11:08:45 +08:00
|
|
|
|
|
|
|
|
2014-11-23 00:59:05 +08:00
|
|
|
def partition_suite(suite, classes, bins, reverse=False):
|
2013-05-11 11:08:45 +08:00
|
|
|
"""
|
2014-10-24 02:15:41 +08:00
|
|
|
Partitions a test suite by test type. Also prevents duplicated tests.
|
2013-05-11 11:08:45 +08:00
|
|
|
|
|
|
|
classes is a sequence of types
|
|
|
|
bins is a sequence of TestSuites, one more than classes
|
2014-11-23 00:59:05 +08:00
|
|
|
reverse changes the ordering of tests within bins
|
2013-05-11 11:08:45 +08:00
|
|
|
|
|
|
|
Tests of type classes[i] are added to bins[i],
|
|
|
|
tests with no match found in classes are place in bins[-1]
|
|
|
|
"""
|
2013-09-10 21:49:39 +08:00
|
|
|
suite_class = type(suite)
|
2014-11-23 00:59:05 +08:00
|
|
|
if reverse:
|
|
|
|
suite = reversed(tuple(suite))
|
2013-05-11 11:08:45 +08:00
|
|
|
for test in suite:
|
2013-09-10 21:49:39 +08:00
|
|
|
if isinstance(test, suite_class):
|
2014-11-23 00:59:05 +08:00
|
|
|
partition_suite(test, classes, bins, reverse=reverse)
|
2013-05-11 11:08:45 +08:00
|
|
|
else:
|
|
|
|
for i in range(len(classes)):
|
|
|
|
if isinstance(test, classes[i]):
|
2014-12-14 20:37:26 +08:00
|
|
|
bins[i].add(test)
|
2013-05-11 11:08:45 +08:00
|
|
|
break
|
|
|
|
else:
|
2014-12-14 20:37:26 +08:00
|
|
|
bins[-1].add(test)
|
2013-05-11 11:08:45 +08:00
|
|
|
|
|
|
|
|
2015-09-06 15:45:36 +08:00
|
|
|
def get_unique_databases():
|
2015-09-06 01:39:42 +08:00
|
|
|
"""
|
|
|
|
Figure out which databases actually need to be created.
|
|
|
|
|
|
|
|
Deduplicate entries in DATABASES that correspond the same database or are
|
|
|
|
configured as test mirrors.
|
|
|
|
|
2015-09-06 15:45:36 +08:00
|
|
|
Returns an ordered mapping of signatures to (name, list of aliases)
|
|
|
|
where all aliases share the same unerlying database.
|
2015-09-06 01:39:42 +08:00
|
|
|
"""
|
2013-05-11 11:08:45 +08:00
|
|
|
test_databases = {}
|
|
|
|
dependencies = {}
|
2013-07-05 07:58:58 +08:00
|
|
|
default_sig = connections[DEFAULT_DB_ALIAS].creation.test_db_signature()
|
2015-09-06 15:45:36 +08:00
|
|
|
|
2013-05-11 11:08:45 +08:00
|
|
|
for alias in connections:
|
|
|
|
connection = connections[alias]
|
2014-01-20 08:45:29 +08:00
|
|
|
test_settings = connection.settings_dict['TEST']
|
2015-09-06 15:45:36 +08:00
|
|
|
|
2014-01-20 08:45:29 +08:00
|
|
|
if test_settings['MIRROR']:
|
2015-09-06 15:45:36 +08:00
|
|
|
target = test_settings['MIRROR']
|
|
|
|
signature = connections[target].creation.test_db_signature()
|
|
|
|
|
2013-05-11 11:08:45 +08:00
|
|
|
else:
|
2015-09-06 15:45:36 +08:00
|
|
|
signature = connection.creation.test_db_signature()
|
2013-05-11 11:08:45 +08:00
|
|
|
|
2014-01-20 08:45:29 +08:00
|
|
|
if 'DEPENDENCIES' in test_settings:
|
|
|
|
dependencies[alias] = test_settings['DEPENDENCIES']
|
2015-09-06 15:45:36 +08:00
|
|
|
elif alias != DEFAULT_DB_ALIAS and signature != default_sig:
|
|
|
|
dependencies[alias] = test_settings.get('DEPENDENCIES', [DEFAULT_DB_ALIAS])
|
|
|
|
|
|
|
|
# Store a tuple with DB parameters that uniquely identify it.
|
|
|
|
# If we have two aliases with the same values for that tuple,
|
|
|
|
# we only need to create the test database once.
|
|
|
|
item = test_databases.setdefault(
|
|
|
|
signature, (connection.settings_dict['NAME'], set()))
|
|
|
|
item[1].add(alias)
|
2013-05-11 11:08:45 +08:00
|
|
|
|
2015-09-06 01:39:42 +08:00
|
|
|
test_databases = dependency_ordered(test_databases.items(), dependencies)
|
|
|
|
test_databases = collections.OrderedDict(test_databases)
|
2015-09-06 15:45:36 +08:00
|
|
|
return test_databases
|
2015-09-06 01:39:42 +08:00
|
|
|
|
|
|
|
|
|
|
|
def setup_databases(verbosity, interactive, keepdb=False, debug_sql=False, **kwargs):
|
|
|
|
"""
|
|
|
|
Creates the test databases.
|
|
|
|
"""
|
2015-09-06 15:45:36 +08:00
|
|
|
test_databases = get_unique_databases()
|
2015-09-06 01:39:42 +08:00
|
|
|
|
2013-05-11 11:08:45 +08:00
|
|
|
old_names = []
|
|
|
|
|
2015-09-06 01:39:42 +08:00
|
|
|
for signature, (db_name, aliases) in test_databases.items():
|
2015-09-06 02:00:33 +08:00
|
|
|
first_alias = None
|
2013-05-11 11:08:45 +08:00
|
|
|
for alias in aliases:
|
|
|
|
connection = connections[alias]
|
2015-09-06 02:00:33 +08:00
|
|
|
old_names.append((connection, db_name, first_alias is None))
|
|
|
|
|
2015-09-06 15:45:36 +08:00
|
|
|
# Actually create the database for the first connection
|
2015-09-06 02:00:33 +08:00
|
|
|
if first_alias is None:
|
|
|
|
first_alias = alias
|
|
|
|
connection.creation.create_test_db(
|
2014-06-09 10:30:15 +08:00
|
|
|
verbosity,
|
|
|
|
autoclobber=not interactive,
|
|
|
|
keepdb=keepdb,
|
2014-09-15 23:17:12 +08:00
|
|
|
serialize=connection.settings_dict.get("TEST", {}).get("SERIALIZE", True),
|
2014-06-09 10:30:15 +08:00
|
|
|
)
|
2015-09-06 15:45:36 +08:00
|
|
|
# Configure all other connections as mirrors of the first one
|
2013-05-11 11:08:45 +08:00
|
|
|
else:
|
2015-09-06 02:00:33 +08:00
|
|
|
connections[alias].creation.set_as_test_mirror(
|
|
|
|
connections[first_alias].settings_dict)
|
2013-05-11 11:08:45 +08:00
|
|
|
|
2015-01-11 06:52:59 +08:00
|
|
|
if debug_sql:
|
|
|
|
for alias in connections:
|
|
|
|
connections[alias].force_debug_cursor = True
|
2015-09-06 01:39:42 +08:00
|
|
|
|
2015-09-06 15:20:35 +08:00
|
|
|
return old_names
|